##// END OF EJS Templates
delta-chain: move the debugdeltachain command in revlogutils...
marmoute -
r51963:d7f975e4 default
parent child Browse files
Show More
@@ -1,4836 +1,4657 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 open,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 bundlerepo,
41 41 changegroup,
42 42 cmdutil,
43 43 color,
44 44 context,
45 45 copies,
46 46 dagparser,
47 47 dirstateutils,
48 48 encoding,
49 49 error,
50 50 exchange,
51 51 extensions,
52 52 filelog,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 manifest,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 verify,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .stabletailgraph import stabletailsort
98 98 from .utils import (
99 99 cborutil,
100 100 compression,
101 101 dateutil,
102 102 procutil,
103 103 stringutil,
104 104 urlutil,
105 105 )
106 106
107 107 from .revlogutils import (
108 constants as revlog_constants,
109 108 debug as revlog_debug,
110 deltas as deltautil,
111 109 nodemap,
112 110 rewrite,
113 111 sidedata,
114 112 )
115 113
116 114 release = lockmod.release
117 115
118 116 table = {}
119 117 table.update(strip.command._table)
120 118 command = registrar.command(table)
121 119
122 120
123 121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 122 def debugancestor(ui, repo, *args):
125 123 """find the ancestor revision of two revisions in a given index"""
126 124 if len(args) == 3:
127 125 index, rev1, rev2 = args
128 126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 127 lookup = r.lookup
130 128 elif len(args) == 2:
131 129 if not repo:
132 130 raise error.Abort(
133 131 _(b'there is no Mercurial repository here (.hg not found)')
134 132 )
135 133 rev1, rev2 = args
136 134 r = repo.changelog
137 135 lookup = repo.lookup
138 136 else:
139 137 raise error.Abort(_(b'either two or three arguments required'))
140 138 a = r.ancestor(lookup(rev1), lookup(rev2))
141 139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142 140
143 141
144 142 @command(b'debugantivirusrunning', [])
145 143 def debugantivirusrunning(ui, repo):
146 144 """attempt to trigger an antivirus scanner to see if one is active"""
147 145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 146 f.write(
149 147 util.b85decode(
150 148 # This is a base85-armored version of the EICAR test file. See
151 149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 152 )
155 153 )
156 154 # Give an AV engine time to scan the file.
157 155 time.sleep(2)
158 156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159 157
160 158
161 159 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 160 def debugapplystreamclonebundle(ui, repo, fname):
163 161 """apply a stream clone bundle file"""
164 162 f = hg.openpath(ui, fname)
165 163 gen = exchange.readbundle(ui, f, fname)
166 164 gen.apply(repo)
167 165
168 166
169 167 @command(
170 168 b'debugbuilddag',
171 169 [
172 170 (
173 171 b'm',
174 172 b'mergeable-file',
175 173 None,
176 174 _(b'add single file mergeable changes'),
177 175 ),
178 176 (
179 177 b'o',
180 178 b'overwritten-file',
181 179 None,
182 180 _(b'add single file all revs overwrite'),
183 181 ),
184 182 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 183 (
186 184 b'',
187 185 b'from-existing',
188 186 None,
189 187 _(b'continue from a non-empty repository'),
190 188 ),
191 189 ],
192 190 _(b'[OPTION]... [TEXT]'),
193 191 )
194 192 def debugbuilddag(
195 193 ui,
196 194 repo,
197 195 text=None,
198 196 mergeable_file=False,
199 197 overwritten_file=False,
200 198 new_file=False,
201 199 from_existing=False,
202 200 ):
203 201 """builds a repo with a given DAG from scratch in the current empty repo
204 202
205 203 The description of the DAG is read from stdin if not given on the
206 204 command line.
207 205
208 206 Elements:
209 207
210 208 - "+n" is a linear run of n nodes based on the current default parent
211 209 - "." is a single node based on the current default parent
212 210 - "$" resets the default parent to null (implied at the start);
213 211 otherwise the default parent is always the last node created
214 212 - "<p" sets the default parent to the backref p
215 213 - "*p" is a fork at parent p, which is a backref
216 214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 215 - "/p2" is a merge of the preceding node and p2
218 216 - ":tag" defines a local tag for the preceding node
219 217 - "@branch" sets the named branch for subsequent nodes
220 218 - "#...\\n" is a comment up to the end of the line
221 219
222 220 Whitespace between the above elements is ignored.
223 221
224 222 A backref is either
225 223
226 224 - a number n, which references the node curr-n, where curr is the current
227 225 node, or
228 226 - the name of a local tag you placed earlier using ":tag", or
229 227 - empty to denote the default parent.
230 228
231 229 All string valued-elements are either strictly alphanumeric, or must
232 230 be enclosed in double quotes ("..."), with "\\" as escape character.
233 231 """
234 232
235 233 if text is None:
236 234 ui.status(_(b"reading DAG from stdin\n"))
237 235 text = ui.fin.read()
238 236
239 237 cl = repo.changelog
240 238 if len(cl) > 0 and not from_existing:
241 239 raise error.Abort(_(b'repository is not empty'))
242 240
243 241 # determine number of revs in DAG
244 242 total = 0
245 243 for type, data in dagparser.parsedag(text):
246 244 if type == b'n':
247 245 total += 1
248 246
249 247 if mergeable_file:
250 248 linesperrev = 2
251 249 # make a file with k lines per rev
252 250 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 251 initialmergedlines.append(b"")
254 252
255 253 tags = []
256 254 progress = ui.makeprogress(
257 255 _(b'building'), unit=_(b'revisions'), total=total
258 256 )
259 257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 258 at = -1
261 259 atbranch = b'default'
262 260 nodeids = []
263 261 id = 0
264 262 progress.update(id)
265 263 for type, data in dagparser.parsedag(text):
266 264 if type == b'n':
267 265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 266 id, ps = data
269 267
270 268 files = []
271 269 filecontent = {}
272 270
273 271 p2 = None
274 272 if mergeable_file:
275 273 fn = b"mf"
276 274 p1 = repo[ps[0]]
277 275 if len(ps) > 1:
278 276 p2 = repo[ps[1]]
279 277 pa = p1.ancestor(p2)
280 278 base, local, other = [
281 279 x[fn].data() for x in (pa, p1, p2)
282 280 ]
283 281 m3 = simplemerge.Merge3Text(base, local, other)
284 282 ml = [
285 283 l.strip()
286 284 for l in simplemerge.render_minimized(m3)[0]
287 285 ]
288 286 ml.append(b"")
289 287 elif at > 0:
290 288 ml = p1[fn].data().split(b"\n")
291 289 else:
292 290 ml = initialmergedlines
293 291 ml[id * linesperrev] += b" r%i" % id
294 292 mergedtext = b"\n".join(ml)
295 293 files.append(fn)
296 294 filecontent[fn] = mergedtext
297 295
298 296 if overwritten_file:
299 297 fn = b"of"
300 298 files.append(fn)
301 299 filecontent[fn] = b"r%i\n" % id
302 300
303 301 if new_file:
304 302 fn = b"nf%i" % id
305 303 files.append(fn)
306 304 filecontent[fn] = b"r%i\n" % id
307 305 if len(ps) > 1:
308 306 if not p2:
309 307 p2 = repo[ps[1]]
310 308 for fn in p2:
311 309 if fn.startswith(b"nf"):
312 310 files.append(fn)
313 311 filecontent[fn] = p2[fn].data()
314 312
315 313 def fctxfn(repo, cx, path):
316 314 if path in filecontent:
317 315 return context.memfilectx(
318 316 repo, cx, path, filecontent[path]
319 317 )
320 318 return None
321 319
322 320 if len(ps) == 0 or ps[0] < 0:
323 321 pars = [None, None]
324 322 elif len(ps) == 1:
325 323 pars = [nodeids[ps[0]], None]
326 324 else:
327 325 pars = [nodeids[p] for p in ps]
328 326 cx = context.memctx(
329 327 repo,
330 328 pars,
331 329 b"r%i" % id,
332 330 files,
333 331 fctxfn,
334 332 date=(id, 0),
335 333 user=b"debugbuilddag",
336 334 extra={b'branch': atbranch},
337 335 )
338 336 nodeid = repo.commitctx(cx)
339 337 nodeids.append(nodeid)
340 338 at = id
341 339 elif type == b'l':
342 340 id, name = data
343 341 ui.note((b'tag %s\n' % name))
344 342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 343 elif type == b'a':
346 344 ui.note((b'branch %s\n' % data))
347 345 atbranch = data
348 346 progress.update(id)
349 347
350 348 if tags:
351 349 repo.vfs.write(b"localtags", b"".join(tags))
352 350
353 351
354 352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 353 indent_string = b' ' * indent
356 354 if all:
357 355 ui.writenoi18n(
358 356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 357 % indent_string
360 358 )
361 359
362 360 def showchunks(named):
363 361 ui.write(b"\n%s%s\n" % (indent_string, named))
364 362 for deltadata in gen.deltaiter():
365 363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 364 ui.write(
367 365 b"%s%s %s %s %s %s %d\n"
368 366 % (
369 367 indent_string,
370 368 hex(node),
371 369 hex(p1),
372 370 hex(p2),
373 371 hex(cs),
374 372 hex(deltabase),
375 373 len(delta),
376 374 )
377 375 )
378 376
379 377 gen.changelogheader()
380 378 showchunks(b"changelog")
381 379 gen.manifestheader()
382 380 showchunks(b"manifest")
383 381 for chunkdata in iter(gen.filelogheader, {}):
384 382 fname = chunkdata[b'filename']
385 383 showchunks(fname)
386 384 else:
387 385 if isinstance(gen, bundle2.unbundle20):
388 386 raise error.Abort(_(b'use debugbundle2 for this file'))
389 387 gen.changelogheader()
390 388 for deltadata in gen.deltaiter():
391 389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 391
394 392
395 393 def _debugobsmarkers(ui, part, indent=0, **opts):
396 394 """display version and markers contained in 'data'"""
397 395 data = part.read()
398 396 indent_string = b' ' * indent
399 397 try:
400 398 version, markers = obsolete._readmarkers(data)
401 399 except error.UnknownVersion as exc:
402 400 msg = b"%sunsupported version: %s (%d bytes)\n"
403 401 msg %= indent_string, exc.version, len(data)
404 402 ui.write(msg)
405 403 else:
406 404 msg = b"%sversion: %d (%d bytes)\n"
407 405 msg %= indent_string, version, len(data)
408 406 ui.write(msg)
409 407 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
410 408 for rawmarker in sorted(markers):
411 409 m = obsutil.marker(None, rawmarker)
412 410 fm.startitem()
413 411 fm.plain(indent_string)
414 412 cmdutil.showmarker(fm, m)
415 413 fm.end()
416 414
417 415
418 416 def _debugphaseheads(ui, data, indent=0):
419 417 """display version and markers contained in 'data'"""
420 418 indent_string = b' ' * indent
421 419 headsbyphase = phases.binarydecode(data)
422 420 for phase in phases.allphases:
423 421 for head in headsbyphase[phase]:
424 422 ui.write(indent_string)
425 423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 424
427 425
428 426 def _quasirepr(thing):
429 427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 428 return b'{%s}' % (
431 429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 430 )
433 431 return pycompat.bytestr(repr(thing))
434 432
435 433
436 434 def _debugbundle2(ui, gen, all=None, **opts):
437 435 """lists the contents of a bundle2"""
438 436 if not isinstance(gen, bundle2.unbundle20):
439 437 raise error.Abort(_(b'not a bundle2 file'))
440 438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 439 parttypes = opts.get('part_type', [])
442 440 for part in gen.iterparts():
443 441 if parttypes and part.type not in parttypes:
444 442 continue
445 443 msg = b'%s -- %s (mandatory: %r)\n'
446 444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 445 if part.type == b'changegroup':
448 446 version = part.params.get(b'version', b'01')
449 447 cg = changegroup.getunbundler(version, part, b'UN')
450 448 if not ui.quiet:
451 449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 450 if part.type == b'obsmarkers':
453 451 if not ui.quiet:
454 452 _debugobsmarkers(ui, part, indent=4, **opts)
455 453 if part.type == b'phase-heads':
456 454 if not ui.quiet:
457 455 _debugphaseheads(ui, part, indent=4)
458 456
459 457
460 458 @command(
461 459 b'debugbundle',
462 460 [
463 461 (b'a', b'all', None, _(b'show all details')),
464 462 (b'', b'part-type', [], _(b'show only the named part type')),
465 463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 464 ],
467 465 _(b'FILE'),
468 466 norepo=True,
469 467 )
470 468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 469 """lists the contents of a bundle"""
472 470 with hg.openpath(ui, bundlepath) as f:
473 471 if spec:
474 472 spec = exchange.getbundlespec(ui, f)
475 473 ui.write(b'%s\n' % spec)
476 474 return
477 475
478 476 gen = exchange.readbundle(ui, f, bundlepath)
479 477 if isinstance(gen, bundle2.unbundle20):
480 478 return _debugbundle2(ui, gen, all=all, **opts)
481 479 _debugchangegroup(ui, gen, all=all, **opts)
482 480
483 481
484 482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 483 def debugcapabilities(ui, path, **opts):
486 484 """lists the capabilities of a remote peer"""
487 485 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
488 486 try:
489 487 caps = peer.capabilities()
490 488 ui.writenoi18n(b'Main capabilities:\n')
491 489 for c in sorted(caps):
492 490 ui.write(b' %s\n' % c)
493 491 b2caps = bundle2.bundle2caps(peer)
494 492 if b2caps:
495 493 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 494 for key, values in sorted(b2caps.items()):
497 495 ui.write(b' %s\n' % key)
498 496 for v in values:
499 497 ui.write(b' %s\n' % v)
500 498 finally:
501 499 peer.close()
502 500
503 501
504 502 @command(
505 503 b'debugchangedfiles',
506 504 [
507 505 (
508 506 b'',
509 507 b'compute',
510 508 False,
511 509 b"compute information instead of reading it from storage",
512 510 ),
513 511 ],
514 512 b'REV',
515 513 )
516 514 def debugchangedfiles(ui, repo, rev, **opts):
517 515 """list the stored files changes for a revision"""
518 516 ctx = logcmdutil.revsingle(repo, rev, None)
519 517 files = None
520 518
521 519 if opts['compute']:
522 520 files = metadata.compute_all_files_changes(ctx)
523 521 else:
524 522 sd = repo.changelog.sidedata(ctx.rev())
525 523 files_block = sd.get(sidedata.SD_FILES)
526 524 if files_block is not None:
527 525 files = metadata.decode_files_sidedata(sd)
528 526 if files is not None:
529 527 for f in sorted(files.touched):
530 528 if f in files.added:
531 529 action = b"added"
532 530 elif f in files.removed:
533 531 action = b"removed"
534 532 elif f in files.merged:
535 533 action = b"merged"
536 534 elif f in files.salvaged:
537 535 action = b"salvaged"
538 536 else:
539 537 action = b"touched"
540 538
541 539 copy_parent = b""
542 540 copy_source = b""
543 541 if f in files.copied_from_p1:
544 542 copy_parent = b"p1"
545 543 copy_source = files.copied_from_p1[f]
546 544 elif f in files.copied_from_p2:
547 545 copy_parent = b"p2"
548 546 copy_source = files.copied_from_p2[f]
549 547
550 548 data = (action, copy_parent, f, copy_source)
551 549 template = b"%-8s %2s: %s, %s;\n"
552 550 ui.write(template % data)
553 551
554 552
555 553 @command(b'debugcheckstate', [], b'')
556 554 def debugcheckstate(ui, repo):
557 555 """validate the correctness of the current dirstate"""
558 556 errors = verify.verifier(repo)._verify_dirstate()
559 557 if errors:
560 558 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 559 raise error.Abort(errstr)
562 560
563 561
564 562 @command(
565 563 b'debugcolor',
566 564 [(b'', b'style', None, _(b'show all configured styles'))],
567 565 b'hg debugcolor',
568 566 )
569 567 def debugcolor(ui, repo, **opts):
570 568 """show available color, effects or style"""
571 569 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 570 if opts.get('style'):
573 571 return _debugdisplaystyle(ui)
574 572 else:
575 573 return _debugdisplaycolor(ui)
576 574
577 575
578 576 def _debugdisplaycolor(ui):
579 577 ui = ui.copy()
580 578 ui._styles.clear()
581 579 for effect in color._activeeffects(ui).keys():
582 580 ui._styles[effect] = effect
583 581 if ui._terminfoparams:
584 582 for k, v in ui.configitems(b'color'):
585 583 if k.startswith(b'color.'):
586 584 ui._styles[k] = k[6:]
587 585 elif k.startswith(b'terminfo.'):
588 586 ui._styles[k] = k[9:]
589 587 ui.write(_(b'available colors:\n'))
590 588 # sort label with a '_' after the other to group '_background' entry.
591 589 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 590 for colorname, label in items:
593 591 ui.write(b'%s\n' % colorname, label=label)
594 592
595 593
596 594 def _debugdisplaystyle(ui):
597 595 ui.write(_(b'available style:\n'))
598 596 if not ui._styles:
599 597 return
600 598 width = max(len(s) for s in ui._styles)
601 599 for label, effects in sorted(ui._styles.items()):
602 600 ui.write(b'%s' % label, label=label)
603 601 if effects:
604 602 # 50
605 603 ui.write(b': ')
606 604 ui.write(b' ' * (max(0, width - len(label))))
607 605 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 606 ui.write(b'\n')
609 607
610 608
611 609 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 610 def debugcreatestreamclonebundle(ui, repo, fname):
613 611 """create a stream clone bundle file
614 612
615 613 Stream bundles are special bundles that are essentially archives of
616 614 revlog files. They are commonly used for cloning very quickly.
617 615
618 616 This command creates a "version 1" stream clone, which is deprecated in
619 617 favor of newer versions of the stream protocol. Bundles using such newer
620 618 versions can be generated using the `hg bundle` command.
621 619 """
622 620 # TODO we may want to turn this into an abort when this functionality
623 621 # is moved into `hg bundle`.
624 622 if phases.hassecret(repo):
625 623 ui.warn(
626 624 _(
627 625 b'(warning: stream clone bundle will contain secret '
628 626 b'revisions)\n'
629 627 )
630 628 )
631 629
632 630 requirements, gen = streamclone.generatebundlev1(repo)
633 631 changegroup.writechunks(ui, gen, fname)
634 632
635 633 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636 634
637 635
638 636 @command(
639 637 b'debugdag',
640 638 [
641 639 (b't', b'tags', None, _(b'use tags as labels')),
642 640 (b'b', b'branches', None, _(b'annotate with branch names')),
643 641 (b'', b'dots', None, _(b'use dots for runs')),
644 642 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 643 ],
646 644 _(b'[OPTION]... [FILE [REV]...]'),
647 645 optionalrepo=True,
648 646 )
649 647 def debugdag(ui, repo, file_=None, *revs, **opts):
650 648 """format the changelog or an index DAG as a concise textual description
651 649
652 650 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 651 revision numbers, they get labeled in the output as rN.
654 652
655 653 Otherwise, the changelog DAG of the current repo is emitted.
656 654 """
657 655 spaces = opts.get('spaces')
658 656 dots = opts.get('dots')
659 657 if file_:
660 658 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 659 revs = {int(r) for r in revs}
662 660
663 661 def events():
664 662 for r in rlog:
665 663 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 664 if r in revs:
667 665 yield b'l', (r, b"r%i" % r)
668 666
669 667 elif repo:
670 668 cl = repo.changelog
671 669 tags = opts.get('tags')
672 670 branches = opts.get('branches')
673 671 if tags:
674 672 labels = {}
675 673 for l, n in repo.tags().items():
676 674 labels.setdefault(cl.rev(n), []).append(l)
677 675
678 676 def events():
679 677 b = b"default"
680 678 for r in cl:
681 679 if branches:
682 680 newb = cl.read(cl.node(r))[5][b'branch']
683 681 if newb != b:
684 682 yield b'a', newb
685 683 b = newb
686 684 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 685 if tags:
688 686 ls = labels.get(r)
689 687 if ls:
690 688 for l in ls:
691 689 yield b'l', (r, l)
692 690
693 691 else:
694 692 raise error.Abort(_(b'need repo for changelog dag'))
695 693
696 694 for line in dagparser.dagtextlines(
697 695 events(),
698 696 addspaces=spaces,
699 697 wraplabels=True,
700 698 wrapannotations=True,
701 699 wrapnonlinear=dots,
702 700 usedots=dots,
703 701 maxlinewidth=70,
704 702 ):
705 703 ui.write(line)
706 704 ui.write(b"\n")
707 705
708 706
709 707 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 708 def debugdata(ui, repo, file_, rev=None, **opts):
711 709 """dump the contents of a data file revision"""
712 710 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
713 711 if rev is not None:
714 712 raise error.InputError(
715 713 _(b'cannot specify a revision with other arguments')
716 714 )
717 715 file_, rev = None, file_
718 716 elif rev is None:
719 717 raise error.InputError(_(b'please specify a revision'))
720 718 r = cmdutil.openstorage(
721 719 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
722 720 )
723 721 try:
724 722 ui.write(r.rawdata(r.lookup(rev)))
725 723 except KeyError:
726 724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 725
728 726
729 727 @command(
730 728 b'debugdate',
731 729 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 730 _(b'[-e] DATE [RANGE]'),
733 731 norepo=True,
734 732 optionalrepo=True,
735 733 )
736 734 def debugdate(ui, date, range=None, **opts):
737 735 """parse and display a date"""
738 736 if opts["extended"]:
739 737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 738 else:
741 739 d = dateutil.parsedate(date)
742 740 ui.writenoi18n(b"internal: %d %d\n" % d)
743 741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 742 if range:
745 743 m = dateutil.matchdate(range)
746 744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 745
748 746
749 747 @command(
750 748 b'debugdeltachain',
751 749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 750 _(b'-c|-m|FILE'),
753 751 optionalrepo=True,
754 752 )
755 753 def debugdeltachain(ui, repo, file_=None, **opts):
756 754 """dump information about delta chains in a revlog
757 755
758 756 Output can be templatized. Available template keywords are:
759 757
760 758 :``rev``: revision number
761 759 :``p1``: parent 1 revision number (for reference)
762 760 :``p2``: parent 2 revision number (for reference)
763 761 :``chainid``: delta chain identifier (numbered by unique base)
764 762 :``chainlen``: delta chain length to this revision
765 763 :``prevrev``: previous revision in delta chain
766 764 :``deltatype``: role of delta / how it was computed
767 765 - base: a full snapshot
768 766 - snap: an intermediate snapshot
769 767 - p1: a delta against the first parent
770 768 - p2: a delta against the second parent
771 769 - skip1: a delta against the same base as p1
772 770 (when p1 has empty delta
773 771 - skip2: a delta against the same base as p2
774 772 (when p2 has empty delta
775 773 - prev: a delta against the previous revision
776 774 - other: a delta against an arbitrary revision
777 775 :``compsize``: compressed size of revision
778 776 :``uncompsize``: uncompressed size of revision
779 777 :``chainsize``: total size of compressed revisions in chain
780 778 :``chainratio``: total chain size divided by uncompressed revision size
781 779 (new delta chains typically start at ratio 2.00)
782 780 :``lindist``: linear distance from base revision in delta chain to end
783 781 of this revision
784 782 :``extradist``: total size of revisions not part of this delta chain from
785 783 base of delta chain to end of this revision; a measurement
786 784 of how much extra data we need to read/seek across to read
787 785 the delta chain for this revision
788 786 :``extraratio``: extradist divided by chainsize; another representation of
789 787 how much unrelated data is needed to load this delta chain
790 788
791 789 If the repository is configured to use the sparse read, additional keywords
792 790 are available:
793 791
794 792 :``readsize``: total size of data read from the disk for a revision
795 793 (sum of the sizes of all the blocks)
796 794 :``largestblock``: size of the largest block of data read from the disk
797 795 :``readdensity``: density of useful bytes in the data read from the disk
798 796 :``srchunks``: in how many data hunks the whole revision would be read
799 797
800 798 The sparse read can be enabled with experimental.sparse-read = True
801 799 """
802 r = cmdutil.openrevlog(
800 revlog = cmdutil.openrevlog(
803 801 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
804 802 )
805 index = r.index
806 start = r.start
807 length = r.length
808 generaldelta = r.delta_config.general_delta
809 withsparseread = r.data_config.with_sparse_read
810
811 # security to avoid crash on corrupted revlogs
812 total_revs = len(index)
813
814 chain_size_cache = {}
815
816 def revinfo(rev):
817 e = index[rev]
818 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
819 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
820
821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824
825 # If the parents of a revision has an empty delta, we never try to delta
826 # against that parent, but directly against the delta base of that
827 # parent (recursively). It avoids adding a useless entry in the chain.
828 #
829 # However we need to detect that as a special case for delta-type, that
830 # is not simply "other".
831 p1_base = p1
832 if p1 != nullrev and p1 < total_revs:
833 e1 = index[p1]
834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 if (
837 new_base == p1_base
838 or new_base == nullrev
839 or new_base >= total_revs
840 ):
841 break
842 p1_base = new_base
843 e1 = index[p1_base]
844 p2_base = p2
845 if p2 != nullrev and p2 < total_revs:
846 e2 = index[p2]
847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 if (
850 new_base == p2_base
851 or new_base == nullrev
852 or new_base >= total_revs
853 ):
854 break
855 p2_base = new_base
856 e2 = index[p2_base]
857
858 if generaldelta:
859 if base == p1:
860 deltatype = b'p1'
861 elif base == p2:
862 deltatype = b'p2'
863 elif base == rev:
864 deltatype = b'base'
865 elif base == p1_base:
866 deltatype = b'skip1'
867 elif base == p2_base:
868 deltatype = b'skip2'
869 elif r.issnapshot(rev):
870 deltatype = b'snap'
871 elif base == rev - 1:
872 deltatype = b'prev'
873 else:
874 deltatype = b'other'
875 else:
876 if base == rev:
877 deltatype = b'base'
878 else:
879 deltatype = b'prev'
880
881 chain = r._deltachain(rev)[0]
882 chain_size = 0
883 for iter_rev in reversed(chain):
884 cached = chain_size_cache.get(iter_rev)
885 if cached is not None:
886 chain_size += cached
887 break
888 e = index[iter_rev]
889 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
890 chain_size_cache[rev] = chain_size
891
892 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
893
894 803 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
895 804
896 fm.plain(
897 b' rev p1 p2 chain# chainlen prev delta '
898 b'size rawsize chainsize ratio lindist extradist '
899 b'extraratio'
900 )
901 if withsparseread:
902 fm.plain(b' readsize largestblk rddensity srchunks')
903 fm.plain(b'\n')
904
905 chainbases = {}
906 for rev in r:
907 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
908 chainbase = chain[0]
909 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
910 basestart = start(chainbase)
911 revstart = start(rev)
912 lineardist = revstart + comp - basestart
913 extradist = lineardist - chainsize
914 try:
915 prevrev = chain[-2]
916 except IndexError:
917 prevrev = -1
918
919 if uncomp != 0:
920 chainratio = float(chainsize) / float(uncomp)
921 else:
922 chainratio = chainsize
923
924 if chainsize != 0:
925 extraratio = float(extradist) / float(chainsize)
926 else:
927 extraratio = extradist
928
805 lines = revlog_debug.debug_delta_chain(revlog)
806 # first entry is the header
807 header = next(lines)
808 fm.plain(header)
809 for entry in lines:
810 label = b' '.join(e[0] for e in entry)
811 format = b' '.join(e[1] for e in entry)
812 values = [e[3] for e in entry]
813 data = dict((e[2], e[3]) for e in entry)
929 814 fm.startitem()
930 fm.write(
931 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
932 b'uncompsize chainsize chainratio lindist extradist '
933 b'extraratio',
934 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
935 rev,
936 p1,
937 p2,
938 chainid,
939 len(chain),
940 prevrev,
941 deltatype,
942 comp,
943 uncomp,
944 chainsize,
945 chainratio,
946 lineardist,
947 extradist,
948 extraratio,
949 rev=rev,
950 chainid=chainid,
951 chainlen=len(chain),
952 prevrev=prevrev,
953 deltatype=deltatype,
954 compsize=comp,
955 uncompsize=uncomp,
956 chainsize=chainsize,
957 chainratio=chainratio,
958 lindist=lineardist,
959 extradist=extradist,
960 extraratio=extraratio,
961 )
962 if withsparseread:
963 readsize = 0
964 largestblock = 0
965 srchunks = 0
966
967 for revschunk in deltautil.slicechunk(r, chain):
968 srchunks += 1
969 blkend = start(revschunk[-1]) + length(revschunk[-1])
970 blksize = blkend - start(revschunk[0])
971
972 readsize += blksize
973 if largestblock < blksize:
974 largestblock = blksize
975
976 if readsize:
977 readdensity = float(chainsize) / float(readsize)
978 else:
979 readdensity = 1
980
981 fm.write(
982 b'readsize largestblock readdensity srchunks',
983 b' %10d %10d %9.5f %8d',
984 readsize,
985 largestblock,
986 readdensity,
987 srchunks,
988 readsize=readsize,
989 largestblock=largestblock,
990 readdensity=readdensity,
991 srchunks=srchunks,
992 )
993
815 fm.write(label, format, *values, **data)
994 816 fm.plain(b'\n')
995
996 817 fm.end()
997 818
998 819
999 820 @command(
1000 821 b'debug-delta-find',
1001 822 cmdutil.debugrevlogopts
1002 823 + cmdutil.formatteropts
1003 824 + [
1004 825 (
1005 826 b'',
1006 827 b'source',
1007 828 b'full',
1008 829 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1009 830 ),
1010 831 ],
1011 832 _(b'-c|-m|FILE REV'),
1012 833 optionalrepo=True,
1013 834 )
1014 835 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1015 836 """display the computation to get to a valid delta for storing REV
1016 837
1017 838 This command will replay the process used to find the "best" delta to store
1018 839 a revision and display information about all the steps used to get to that
1019 840 result.
1020 841
1021 842 By default, the process is fed with a the full-text for the revision. This
1022 843 can be controlled with the --source flag.
1023 844
1024 845 The revision use the revision number of the target storage (not changelog
1025 846 revision number).
1026 847
1027 848 note: the process is initiated from a full text of the revision to store.
1028 849 """
1029 850 if arg_2 is None:
1030 851 file_ = None
1031 852 rev = arg_1
1032 853 else:
1033 854 file_ = arg_1
1034 855 rev = arg_2
1035 856
1036 857 rev = int(rev)
1037 858
1038 859 revlog = cmdutil.openrevlog(
1039 860 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
1040 861 )
1041 862 p1r, p2r = revlog.parentrevs(rev)
1042 863
1043 864 if source == b'full':
1044 865 base_rev = nullrev
1045 866 elif source == b'storage':
1046 867 base_rev = revlog.deltaparent(rev)
1047 868 elif source == b'p1':
1048 869 base_rev = p1r
1049 870 elif source == b'p2':
1050 871 base_rev = p2r
1051 872 elif source == b'prev':
1052 873 base_rev = rev - 1
1053 874 else:
1054 875 raise error.InputError(b"invalid --source value: %s" % source)
1055 876
1056 877 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1057 878
1058 879
1059 880 @command(
1060 881 b'debugdirstate|debugstate',
1061 882 [
1062 883 (
1063 884 b'',
1064 885 b'nodates',
1065 886 None,
1066 887 _(b'do not display the saved mtime (DEPRECATED)'),
1067 888 ),
1068 889 (b'', b'dates', True, _(b'display the saved mtime')),
1069 890 (b'', b'datesort', None, _(b'sort by saved mtime')),
1070 891 (
1071 892 b'',
1072 893 b'docket',
1073 894 False,
1074 895 _(b'display the docket (metadata file) instead'),
1075 896 ),
1076 897 (
1077 898 b'',
1078 899 b'all',
1079 900 False,
1080 901 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1081 902 ),
1082 903 ],
1083 904 _(b'[OPTION]...'),
1084 905 )
1085 906 def debugstate(ui, repo, **opts):
1086 907 """show the contents of the current dirstate"""
1087 908
1088 909 if opts.get("docket"):
1089 910 if not repo.dirstate._use_dirstate_v2:
1090 911 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1091 912
1092 913 docket = repo.dirstate._map.docket
1093 914 (
1094 915 start_offset,
1095 916 root_nodes,
1096 917 nodes_with_entry,
1097 918 nodes_with_copy,
1098 919 unused_bytes,
1099 920 _unused,
1100 921 ignore_pattern,
1101 922 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1102 923
1103 924 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1104 925 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1105 926 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1106 927 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1107 928 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1108 929 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1109 930 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1110 931 ui.write(
1111 932 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1112 933 )
1113 934 return
1114 935
1115 936 nodates = not opts['dates']
1116 937 if opts.get('nodates') is not None:
1117 938 nodates = True
1118 939 datesort = opts.get('datesort')
1119 940
1120 941 if datesort:
1121 942
1122 943 def keyfunc(entry):
1123 944 filename, _state, _mode, _size, mtime = entry
1124 945 return (mtime, filename)
1125 946
1126 947 else:
1127 948 keyfunc = None # sort by filename
1128 949 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1129 950 entries.sort(key=keyfunc)
1130 951 for entry in entries:
1131 952 filename, state, mode, size, mtime = entry
1132 953 if mtime == -1:
1133 954 timestr = b'unset '
1134 955 elif nodates:
1135 956 timestr = b'set '
1136 957 else:
1137 958 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1138 959 timestr = encoding.strtolocal(timestr)
1139 960 if mode & 0o20000:
1140 961 mode = b'lnk'
1141 962 else:
1142 963 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1143 964 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1144 965 for f in repo.dirstate.copies():
1145 966 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1146 967
1147 968
1148 969 @command(
1149 970 b'debugdirstateignorepatternshash',
1150 971 [],
1151 972 _(b''),
1152 973 )
1153 974 def debugdirstateignorepatternshash(ui, repo, **opts):
1154 975 """show the hash of ignore patterns stored in dirstate if v2,
1155 976 or nothing for dirstate-v2
1156 977 """
1157 978 if repo.dirstate._use_dirstate_v2:
1158 979 docket = repo.dirstate._map.docket
1159 980 hash_len = 20 # 160 bits for SHA-1
1160 981 hash_bytes = docket.tree_metadata[-hash_len:]
1161 982 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1162 983
1163 984
1164 985 @command(
1165 986 b'debugdiscovery',
1166 987 [
1167 988 (b'', b'old', None, _(b'use old-style discovery')),
1168 989 (
1169 990 b'',
1170 991 b'nonheads',
1171 992 None,
1172 993 _(b'use old-style discovery with non-heads included'),
1173 994 ),
1174 995 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1175 996 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1176 997 (
1177 998 b'',
1178 999 b'local-as-revs',
1179 1000 b"",
1180 1001 b'treat local has having these revisions only',
1181 1002 ),
1182 1003 (
1183 1004 b'',
1184 1005 b'remote-as-revs',
1185 1006 b"",
1186 1007 b'use local as remote, with only these revisions',
1187 1008 ),
1188 1009 ]
1189 1010 + cmdutil.remoteopts
1190 1011 + cmdutil.formatteropts,
1191 1012 _(b'[--rev REV] [OTHER]'),
1192 1013 )
1193 1014 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1194 1015 """runs the changeset discovery protocol in isolation
1195 1016
1196 1017 The local peer can be "replaced" by a subset of the local repository by
1197 1018 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1198 1019 can be "replaced" by a subset of the local repository using the
1199 1020 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1200 1021 discovery situations.
1201 1022
1202 1023 The following developer oriented config are relevant for people playing with this command:
1203 1024
1204 1025 * devel.discovery.exchange-heads=True
1205 1026
1206 1027 If False, the discovery will not start with
1207 1028 remote head fetching and local head querying.
1208 1029
1209 1030 * devel.discovery.grow-sample=True
1210 1031
1211 1032 If False, the sample size used in set discovery will not be increased
1212 1033 through the process
1213 1034
1214 1035 * devel.discovery.grow-sample.dynamic=True
1215 1036
1216 1037 When discovery.grow-sample.dynamic is True, the default, the sample size is
1217 1038 adapted to the shape of the undecided set (it is set to the max of:
1218 1039 <target-size>, len(roots(undecided)), len(heads(undecided)
1219 1040
1220 1041 * devel.discovery.grow-sample.rate=1.05
1221 1042
1222 1043 the rate at which the sample grow
1223 1044
1224 1045 * devel.discovery.randomize=True
1225 1046
1226 1047 If andom sampling during discovery are deterministic. It is meant for
1227 1048 integration tests.
1228 1049
1229 1050 * devel.discovery.sample-size=200
1230 1051
1231 1052 Control the initial size of the discovery sample
1232 1053
1233 1054 * devel.discovery.sample-size.initial=100
1234 1055
1235 1056 Control the initial size of the discovery for initial change
1236 1057 """
1237 1058 unfi = repo.unfiltered()
1238 1059
1239 1060 # setup potential extra filtering
1240 1061 local_revs = opts["local_as_revs"]
1241 1062 remote_revs = opts["remote_as_revs"]
1242 1063
1243 1064 # make sure tests are repeatable
1244 1065 random.seed(int(opts['seed']))
1245 1066
1246 1067 if not remote_revs:
1247 1068 path = urlutil.get_unique_pull_path_obj(
1248 1069 b'debugdiscovery', ui, remoteurl
1249 1070 )
1250 1071 branches = (path.branch, [])
1251 1072 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1252 1073 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1253 1074 else:
1254 1075 branches = (None, [])
1255 1076 remote_filtered_revs = logcmdutil.revrange(
1256 1077 unfi, [b"not (::(%s))" % remote_revs]
1257 1078 )
1258 1079 remote_filtered_revs = frozenset(remote_filtered_revs)
1259 1080
1260 1081 def remote_func(x):
1261 1082 return remote_filtered_revs
1262 1083
1263 1084 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1264 1085
1265 1086 remote = repo.peer()
1266 1087 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1267 1088
1268 1089 if local_revs:
1269 1090 local_filtered_revs = logcmdutil.revrange(
1270 1091 unfi, [b"not (::(%s))" % local_revs]
1271 1092 )
1272 1093 local_filtered_revs = frozenset(local_filtered_revs)
1273 1094
1274 1095 def local_func(x):
1275 1096 return local_filtered_revs
1276 1097
1277 1098 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1278 1099 repo = repo.filtered(b'debug-discovery-local-filter')
1279 1100
1280 1101 data = {}
1281 1102 if opts.get('old'):
1282 1103
1283 1104 def doit(pushedrevs, remoteheads, remote=remote):
1284 1105 if not hasattr(remote, 'branches'):
1285 1106 # enable in-client legacy support
1286 1107 remote = localrepo.locallegacypeer(remote.local())
1287 1108 if remote_revs:
1288 1109 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1289 1110 remote._repo = r
1290 1111 common, _in, hds = treediscovery.findcommonincoming(
1291 1112 repo, remote, force=True, audit=data
1292 1113 )
1293 1114 common = set(common)
1294 1115 if not opts.get('nonheads'):
1295 1116 ui.writenoi18n(
1296 1117 b"unpruned common: %s\n"
1297 1118 % b" ".join(sorted(short(n) for n in common))
1298 1119 )
1299 1120
1300 1121 clnode = repo.changelog.node
1301 1122 common = repo.revs(b'heads(::%ln)', common)
1302 1123 common = {clnode(r) for r in common}
1303 1124 return common, hds
1304 1125
1305 1126 else:
1306 1127
1307 1128 def doit(pushedrevs, remoteheads, remote=remote):
1308 1129 nodes = None
1309 1130 if pushedrevs:
1310 1131 revs = logcmdutil.revrange(repo, pushedrevs)
1311 1132 nodes = [repo[r].node() for r in revs]
1312 1133 common, any, hds = setdiscovery.findcommonheads(
1313 1134 ui,
1314 1135 repo,
1315 1136 remote,
1316 1137 ancestorsof=nodes,
1317 1138 audit=data,
1318 1139 abortwhenunrelated=False,
1319 1140 )
1320 1141 return common, hds
1321 1142
1322 1143 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1323 1144 localrevs = opts['rev']
1324 1145
1325 1146 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1326 1147 if fm.strict_format:
1327 1148
1328 1149 @contextlib.contextmanager
1329 1150 def may_capture_output():
1330 1151 ui.pushbuffer()
1331 1152 yield
1332 1153 data[b'output'] = ui.popbuffer()
1333 1154
1334 1155 else:
1335 1156 may_capture_output = util.nullcontextmanager
1336 1157 with may_capture_output():
1337 1158 with util.timedcm('debug-discovery') as t:
1338 1159 common, hds = doit(localrevs, remoterevs)
1339 1160
1340 1161 # compute all statistics
1341 1162 if len(common) == 1 and repo.nullid in common:
1342 1163 common = set()
1343 1164 heads_common = set(common)
1344 1165 heads_remote = set(hds)
1345 1166 heads_local = set(repo.heads())
1346 1167 # note: they cannot be a local or remote head that is in common and not
1347 1168 # itself a head of common.
1348 1169 heads_common_local = heads_common & heads_local
1349 1170 heads_common_remote = heads_common & heads_remote
1350 1171 heads_common_both = heads_common & heads_remote & heads_local
1351 1172
1352 1173 all = repo.revs(b'all()')
1353 1174 common = repo.revs(b'::%ln', common)
1354 1175 roots_common = repo.revs(b'roots(::%ld)', common)
1355 1176 missing = repo.revs(b'not ::%ld', common)
1356 1177 heads_missing = repo.revs(b'heads(%ld)', missing)
1357 1178 roots_missing = repo.revs(b'roots(%ld)', missing)
1358 1179 assert len(common) + len(missing) == len(all)
1359 1180
1360 1181 initial_undecided = repo.revs(
1361 1182 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1362 1183 )
1363 1184 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1364 1185 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1365 1186 common_initial_undecided = initial_undecided & common
1366 1187 missing_initial_undecided = initial_undecided & missing
1367 1188
1368 1189 data[b'elapsed'] = t.elapsed
1369 1190 data[b'nb-common-heads'] = len(heads_common)
1370 1191 data[b'nb-common-heads-local'] = len(heads_common_local)
1371 1192 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1372 1193 data[b'nb-common-heads-both'] = len(heads_common_both)
1373 1194 data[b'nb-common-roots'] = len(roots_common)
1374 1195 data[b'nb-head-local'] = len(heads_local)
1375 1196 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1376 1197 data[b'nb-head-remote'] = len(heads_remote)
1377 1198 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1378 1199 heads_common_remote
1379 1200 )
1380 1201 data[b'nb-revs'] = len(all)
1381 1202 data[b'nb-revs-common'] = len(common)
1382 1203 data[b'nb-revs-missing'] = len(missing)
1383 1204 data[b'nb-missing-heads'] = len(heads_missing)
1384 1205 data[b'nb-missing-roots'] = len(roots_missing)
1385 1206 data[b'nb-ini_und'] = len(initial_undecided)
1386 1207 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1387 1208 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1388 1209 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1389 1210 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1390 1211
1391 1212 fm.startitem()
1392 1213 fm.data(**pycompat.strkwargs(data))
1393 1214 # display discovery summary
1394 1215 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1395 1216 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1396 1217 if b'total-round-trips-heads' in data:
1397 1218 fm.plain(
1398 1219 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1399 1220 )
1400 1221 if b'total-round-trips-branches' in data:
1401 1222 fm.plain(
1402 1223 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1403 1224 % data
1404 1225 )
1405 1226 if b'total-round-trips-between' in data:
1406 1227 fm.plain(
1407 1228 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1408 1229 )
1409 1230 fm.plain(b"queries: %(total-queries)9d\n" % data)
1410 1231 if b'total-queries-branches' in data:
1411 1232 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1412 1233 if b'total-queries-between' in data:
1413 1234 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1414 1235 fm.plain(b"heads summary:\n")
1415 1236 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1416 1237 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1417 1238 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1418 1239 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1419 1240 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1420 1241 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1421 1242 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1422 1243 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1423 1244 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1424 1245 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1425 1246 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1426 1247 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1427 1248 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1428 1249 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1429 1250 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1430 1251 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1431 1252 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1432 1253 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1433 1254 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1434 1255 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1435 1256 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1436 1257 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1437 1258
1438 1259 if ui.verbose:
1439 1260 fm.plain(
1440 1261 b"common heads: %s\n"
1441 1262 % b" ".join(sorted(short(n) for n in heads_common))
1442 1263 )
1443 1264 fm.end()
1444 1265
1445 1266
1446 1267 _chunksize = 4 << 10
1447 1268
1448 1269
1449 1270 @command(
1450 1271 b'debugdownload',
1451 1272 [
1452 1273 (b'o', b'output', b'', _(b'path')),
1453 1274 ],
1454 1275 optionalrepo=True,
1455 1276 )
1456 1277 def debugdownload(ui, repo, url, output=None, **opts):
1457 1278 """download a resource using Mercurial logic and config"""
1458 1279 fh = urlmod.open(ui, url, output)
1459 1280
1460 1281 dest = ui
1461 1282 if output:
1462 1283 dest = open(output, b"wb", _chunksize)
1463 1284 try:
1464 1285 data = fh.read(_chunksize)
1465 1286 while data:
1466 1287 dest.write(data)
1467 1288 data = fh.read(_chunksize)
1468 1289 finally:
1469 1290 if output:
1470 1291 dest.close()
1471 1292
1472 1293
1473 1294 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1474 1295 def debugextensions(ui, repo, **opts):
1475 1296 '''show information about active extensions'''
1476 1297 exts = extensions.extensions(ui)
1477 1298 hgver = util.version()
1478 1299 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1479 1300 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1480 1301 isinternal = extensions.ismoduleinternal(extmod)
1481 1302 extsource = None
1482 1303
1483 1304 if hasattr(extmod, '__file__'):
1484 1305 extsource = pycompat.fsencode(extmod.__file__)
1485 1306 elif getattr(sys, 'oxidized', False):
1486 1307 extsource = pycompat.sysexecutable
1487 1308 if isinternal:
1488 1309 exttestedwith = [] # never expose magic string to users
1489 1310 else:
1490 1311 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1491 1312 extbuglink = getattr(extmod, 'buglink', None)
1492 1313
1493 1314 fm.startitem()
1494 1315
1495 1316 if ui.quiet or ui.verbose:
1496 1317 fm.write(b'name', b'%s\n', extname)
1497 1318 else:
1498 1319 fm.write(b'name', b'%s', extname)
1499 1320 if isinternal or hgver in exttestedwith:
1500 1321 fm.plain(b'\n')
1501 1322 elif not exttestedwith:
1502 1323 fm.plain(_(b' (untested!)\n'))
1503 1324 else:
1504 1325 lasttestedversion = exttestedwith[-1]
1505 1326 fm.plain(b' (%s!)\n' % lasttestedversion)
1506 1327
1507 1328 fm.condwrite(
1508 1329 ui.verbose and extsource,
1509 1330 b'source',
1510 1331 _(b' location: %s\n'),
1511 1332 extsource or b"",
1512 1333 )
1513 1334
1514 1335 if ui.verbose:
1515 1336 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1516 1337 fm.data(bundled=isinternal)
1517 1338
1518 1339 fm.condwrite(
1519 1340 ui.verbose and exttestedwith,
1520 1341 b'testedwith',
1521 1342 _(b' tested with: %s\n'),
1522 1343 fm.formatlist(exttestedwith, name=b'ver'),
1523 1344 )
1524 1345
1525 1346 fm.condwrite(
1526 1347 ui.verbose and extbuglink,
1527 1348 b'buglink',
1528 1349 _(b' bug reporting: %s\n'),
1529 1350 extbuglink or b"",
1530 1351 )
1531 1352
1532 1353 fm.end()
1533 1354
1534 1355
1535 1356 @command(
1536 1357 b'debugfileset',
1537 1358 [
1538 1359 (
1539 1360 b'r',
1540 1361 b'rev',
1541 1362 b'',
1542 1363 _(b'apply the filespec on this revision'),
1543 1364 _(b'REV'),
1544 1365 ),
1545 1366 (
1546 1367 b'',
1547 1368 b'all-files',
1548 1369 False,
1549 1370 _(b'test files from all revisions and working directory'),
1550 1371 ),
1551 1372 (
1552 1373 b's',
1553 1374 b'show-matcher',
1554 1375 None,
1555 1376 _(b'print internal representation of matcher'),
1556 1377 ),
1557 1378 (
1558 1379 b'p',
1559 1380 b'show-stage',
1560 1381 [],
1561 1382 _(b'print parsed tree at the given stage'),
1562 1383 _(b'NAME'),
1563 1384 ),
1564 1385 ],
1565 1386 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1566 1387 )
1567 1388 def debugfileset(ui, repo, expr, **opts):
1568 1389 '''parse and apply a fileset specification'''
1569 1390 from . import fileset
1570 1391
1571 1392 fileset.symbols # force import of fileset so we have predicates to optimize
1572 1393
1573 1394 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1574 1395
1575 1396 stages = [
1576 1397 (b'parsed', pycompat.identity),
1577 1398 (b'analyzed', filesetlang.analyze),
1578 1399 (b'optimized', filesetlang.optimize),
1579 1400 ]
1580 1401 stagenames = {n for n, f in stages}
1581 1402
1582 1403 showalways = set()
1583 1404 if ui.verbose and not opts['show_stage']:
1584 1405 # show parsed tree by --verbose (deprecated)
1585 1406 showalways.add(b'parsed')
1586 1407 if opts['show_stage'] == [b'all']:
1587 1408 showalways.update(stagenames)
1588 1409 else:
1589 1410 for n in opts['show_stage']:
1590 1411 if n not in stagenames:
1591 1412 raise error.Abort(_(b'invalid stage name: %s') % n)
1592 1413 showalways.update(opts['show_stage'])
1593 1414
1594 1415 tree = filesetlang.parse(expr)
1595 1416 for n, f in stages:
1596 1417 tree = f(tree)
1597 1418 if n in showalways:
1598 1419 if opts['show_stage'] or n != b'parsed':
1599 1420 ui.write(b"* %s:\n" % n)
1600 1421 ui.write(filesetlang.prettyformat(tree), b"\n")
1601 1422
1602 1423 files = set()
1603 1424 if opts['all_files']:
1604 1425 for r in repo:
1605 1426 c = repo[r]
1606 1427 files.update(c.files())
1607 1428 files.update(c.substate)
1608 1429 if opts['all_files'] or ctx.rev() is None:
1609 1430 wctx = repo[None]
1610 1431 files.update(
1611 1432 repo.dirstate.walk(
1612 1433 scmutil.matchall(repo),
1613 1434 subrepos=list(wctx.substate),
1614 1435 unknown=True,
1615 1436 ignored=True,
1616 1437 )
1617 1438 )
1618 1439 files.update(wctx.substate)
1619 1440 else:
1620 1441 files.update(ctx.files())
1621 1442 files.update(ctx.substate)
1622 1443
1623 1444 m = ctx.matchfileset(repo.getcwd(), expr)
1624 1445 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1625 1446 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1626 1447 for f in sorted(files):
1627 1448 if not m(f):
1628 1449 continue
1629 1450 ui.write(b"%s\n" % f)
1630 1451
1631 1452
1632 1453 @command(
1633 1454 b"debug-repair-issue6528",
1634 1455 [
1635 1456 (
1636 1457 b'',
1637 1458 b'to-report',
1638 1459 b'',
1639 1460 _(b'build a report of affected revisions to this file'),
1640 1461 _(b'FILE'),
1641 1462 ),
1642 1463 (
1643 1464 b'',
1644 1465 b'from-report',
1645 1466 b'',
1646 1467 _(b'repair revisions listed in this report file'),
1647 1468 _(b'FILE'),
1648 1469 ),
1649 1470 (
1650 1471 b'',
1651 1472 b'paranoid',
1652 1473 False,
1653 1474 _(b'check that both detection methods do the same thing'),
1654 1475 ),
1655 1476 ]
1656 1477 + cmdutil.dryrunopts,
1657 1478 )
1658 1479 def debug_repair_issue6528(ui, repo, **opts):
1659 1480 """find affected revisions and repair them. See issue6528 for more details.
1660 1481
1661 1482 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1662 1483 computation of affected revisions for a given repository across clones.
1663 1484 The report format is line-based (with empty lines ignored):
1664 1485
1665 1486 ```
1666 1487 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1667 1488 ```
1668 1489
1669 1490 There can be multiple broken revisions per filelog, they are separated by
1670 1491 a comma with no spaces. The only space is between the revision(s) and the
1671 1492 filename.
1672 1493
1673 1494 Note that this does *not* mean that this repairs future affected revisions,
1674 1495 that needs a separate fix at the exchange level that was introduced in
1675 1496 Mercurial 5.9.1.
1676 1497
1677 1498 There is a `--paranoid` flag to test that the fast implementation is correct
1678 1499 by checking it against the slow implementation. Since this matter is quite
1679 1500 urgent and testing every edge-case is probably quite costly, we use this
1680 1501 method to test on large repositories as a fuzzing method of sorts.
1681 1502 """
1682 1503 cmdutil.check_incompatible_arguments(
1683 1504 opts, 'to_report', ['from_report', 'dry_run']
1684 1505 )
1685 1506 dry_run = opts.get('dry_run')
1686 1507 to_report = opts.get('to_report')
1687 1508 from_report = opts.get('from_report')
1688 1509 paranoid = opts.get('paranoid')
1689 1510 # TODO maybe add filelog pattern and revision pattern parameters to help
1690 1511 # narrow down the search for users that know what they're looking for?
1691 1512
1692 1513 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1693 1514 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1694 1515 raise error.Abort(_(msg))
1695 1516
1696 1517 rewrite.repair_issue6528(
1697 1518 ui,
1698 1519 repo,
1699 1520 dry_run=dry_run,
1700 1521 to_report=to_report,
1701 1522 from_report=from_report,
1702 1523 paranoid=paranoid,
1703 1524 )
1704 1525
1705 1526
1706 1527 @command(b'debugformat', [] + cmdutil.formatteropts)
1707 1528 def debugformat(ui, repo, **opts):
1708 1529 """display format information about the current repository
1709 1530
1710 1531 Use --verbose to get extra information about current config value and
1711 1532 Mercurial default."""
1712 1533 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1713 1534 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1714 1535
1715 1536 def makeformatname(name):
1716 1537 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1717 1538
1718 1539 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1719 1540 if fm.isplain():
1720 1541
1721 1542 def formatvalue(value):
1722 1543 if hasattr(value, 'startswith'):
1723 1544 return value
1724 1545 if value:
1725 1546 return b'yes'
1726 1547 else:
1727 1548 return b'no'
1728 1549
1729 1550 else:
1730 1551 formatvalue = pycompat.identity
1731 1552
1732 1553 fm.plain(b'format-variant')
1733 1554 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1734 1555 fm.plain(b' repo')
1735 1556 if ui.verbose:
1736 1557 fm.plain(b' config default')
1737 1558 fm.plain(b'\n')
1738 1559 for fv in upgrade.allformatvariant:
1739 1560 fm.startitem()
1740 1561 repovalue = fv.fromrepo(repo)
1741 1562 configvalue = fv.fromconfig(repo)
1742 1563
1743 1564 if repovalue != configvalue:
1744 1565 namelabel = b'formatvariant.name.mismatchconfig'
1745 1566 repolabel = b'formatvariant.repo.mismatchconfig'
1746 1567 elif repovalue != fv.default:
1747 1568 namelabel = b'formatvariant.name.mismatchdefault'
1748 1569 repolabel = b'formatvariant.repo.mismatchdefault'
1749 1570 else:
1750 1571 namelabel = b'formatvariant.name.uptodate'
1751 1572 repolabel = b'formatvariant.repo.uptodate'
1752 1573
1753 1574 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1754 1575 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1755 1576 if fv.default != configvalue:
1756 1577 configlabel = b'formatvariant.config.special'
1757 1578 else:
1758 1579 configlabel = b'formatvariant.config.default'
1759 1580 fm.condwrite(
1760 1581 ui.verbose,
1761 1582 b'config',
1762 1583 b' %6s',
1763 1584 formatvalue(configvalue),
1764 1585 label=configlabel,
1765 1586 )
1766 1587 fm.condwrite(
1767 1588 ui.verbose,
1768 1589 b'default',
1769 1590 b' %7s',
1770 1591 formatvalue(fv.default),
1771 1592 label=b'formatvariant.default',
1772 1593 )
1773 1594 fm.plain(b'\n')
1774 1595 fm.end()
1775 1596
1776 1597
1777 1598 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1778 1599 def debugfsinfo(ui, path=b"."):
1779 1600 """show information detected about current filesystem"""
1780 1601 ui.writenoi18n(b'path: %s\n' % path)
1781 1602 ui.writenoi18n(
1782 1603 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1783 1604 )
1784 1605 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1785 1606 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1786 1607 ui.writenoi18n(
1787 1608 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1788 1609 )
1789 1610 ui.writenoi18n(
1790 1611 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1791 1612 )
1792 1613 casesensitive = b'(unknown)'
1793 1614 try:
1794 1615 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1795 1616 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1796 1617 except OSError:
1797 1618 pass
1798 1619 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1799 1620
1800 1621
1801 1622 @command(
1802 1623 b'debuggetbundle',
1803 1624 [
1804 1625 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1805 1626 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1806 1627 (
1807 1628 b't',
1808 1629 b'type',
1809 1630 b'bzip2',
1810 1631 _(b'bundle compression type to use'),
1811 1632 _(b'TYPE'),
1812 1633 ),
1813 1634 ],
1814 1635 _(b'REPO FILE [-H|-C ID]...'),
1815 1636 norepo=True,
1816 1637 )
1817 1638 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1818 1639 """retrieves a bundle from a repo
1819 1640
1820 1641 Every ID must be a full-length hex node id string. Saves the bundle to the
1821 1642 given file.
1822 1643 """
1823 1644 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1824 1645 if not repo.capable(b'getbundle'):
1825 1646 raise error.Abort(b"getbundle() not supported by target repository")
1826 1647 args = {}
1827 1648 if common:
1828 1649 args['common'] = [bin(s) for s in common]
1829 1650 if head:
1830 1651 args['heads'] = [bin(s) for s in head]
1831 1652 # TODO: get desired bundlecaps from command line.
1832 1653 args['bundlecaps'] = None
1833 1654 bundle = repo.getbundle(b'debug', **args)
1834 1655
1835 1656 bundletype = opts.get('type', b'bzip2').lower()
1836 1657 btypes = {
1837 1658 b'none': b'HG10UN',
1838 1659 b'bzip2': b'HG10BZ',
1839 1660 b'gzip': b'HG10GZ',
1840 1661 b'bundle2': b'HG20',
1841 1662 }
1842 1663 bundletype = btypes.get(bundletype)
1843 1664 if bundletype not in bundle2.bundletypes:
1844 1665 raise error.Abort(_(b'unknown bundle type specified with --type'))
1845 1666 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1846 1667
1847 1668
1848 1669 @command(b'debugignore', [], b'[FILE]...')
1849 1670 def debugignore(ui, repo, *files, **opts):
1850 1671 """display the combined ignore pattern and information about ignored files
1851 1672
1852 1673 With no argument display the combined ignore pattern.
1853 1674
1854 1675 Given space separated file names, shows if the given file is ignored and
1855 1676 if so, show the ignore rule (file and line number) that matched it.
1856 1677 """
1857 1678 ignore = repo.dirstate._ignore
1858 1679 if not files:
1859 1680 # Show all the patterns
1860 1681 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1861 1682 else:
1862 1683 m = scmutil.match(repo[None], pats=files)
1863 1684 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1864 1685 for f in m.files():
1865 1686 nf = util.normpath(f)
1866 1687 ignored = None
1867 1688 ignoredata = None
1868 1689 if nf != b'.':
1869 1690 if ignore(nf):
1870 1691 ignored = nf
1871 1692 ignoredata = repo.dirstate._ignorefileandline(nf)
1872 1693 else:
1873 1694 for p in pathutil.finddirs(nf):
1874 1695 if ignore(p):
1875 1696 ignored = p
1876 1697 ignoredata = repo.dirstate._ignorefileandline(p)
1877 1698 break
1878 1699 if ignored:
1879 1700 if ignored == nf:
1880 1701 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1881 1702 else:
1882 1703 ui.write(
1883 1704 _(
1884 1705 b"%s is ignored because of "
1885 1706 b"containing directory %s\n"
1886 1707 )
1887 1708 % (uipathfn(f), ignored)
1888 1709 )
1889 1710 ignorefile, lineno, line = ignoredata
1890 1711 ui.write(
1891 1712 _(b"(ignore rule in %s, line %d: '%s')\n")
1892 1713 % (ignorefile, lineno, line)
1893 1714 )
1894 1715 else:
1895 1716 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1896 1717
1897 1718
1898 1719 @command(
1899 1720 b'debug-revlog-index|debugindex',
1900 1721 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1901 1722 _(b'-c|-m|FILE'),
1902 1723 )
1903 1724 def debugindex(ui, repo, file_=None, **opts):
1904 1725 """dump index data for a revlog"""
1905 1726 opts = pycompat.byteskwargs(opts)
1906 1727 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1907 1728
1908 1729 fm = ui.formatter(b'debugindex', opts)
1909 1730
1910 1731 revlog = getattr(store, '_revlog', store)
1911 1732
1912 1733 return revlog_debug.debug_index(
1913 1734 ui,
1914 1735 repo,
1915 1736 formatter=fm,
1916 1737 revlog=revlog,
1917 1738 full_node=ui.debugflag,
1918 1739 )
1919 1740
1920 1741
1921 1742 @command(
1922 1743 b'debugindexdot',
1923 1744 cmdutil.debugrevlogopts,
1924 1745 _(b'-c|-m|FILE'),
1925 1746 optionalrepo=True,
1926 1747 )
1927 1748 def debugindexdot(ui, repo, file_=None, **opts):
1928 1749 """dump an index DAG as a graphviz dot file"""
1929 1750 r = cmdutil.openstorage(
1930 1751 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1931 1752 )
1932 1753 ui.writenoi18n(b"digraph G {\n")
1933 1754 for i in r:
1934 1755 node = r.node(i)
1935 1756 pp = r.parents(node)
1936 1757 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1937 1758 if pp[1] != repo.nullid:
1938 1759 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1939 1760 ui.write(b"}\n")
1940 1761
1941 1762
1942 1763 @command(b'debugindexstats', [])
1943 1764 def debugindexstats(ui, repo):
1944 1765 """show stats related to the changelog index"""
1945 1766 repo.changelog.shortest(repo.nullid, 1)
1946 1767 index = repo.changelog.index
1947 1768 if not hasattr(index, 'stats'):
1948 1769 raise error.Abort(_(b'debugindexstats only works with native code'))
1949 1770 for k, v in sorted(index.stats().items()):
1950 1771 ui.write(b'%s: %d\n' % (k, v))
1951 1772
1952 1773
1953 1774 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1954 1775 def debuginstall(ui, **opts):
1955 1776 """test Mercurial installation
1956 1777
1957 1778 Returns 0 on success.
1958 1779 """
1959 1780 problems = 0
1960 1781
1961 1782 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1962 1783 fm.startitem()
1963 1784
1964 1785 # encoding might be unknown or wrong. don't translate these messages.
1965 1786 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1966 1787 err = None
1967 1788 try:
1968 1789 codecs.lookup(pycompat.sysstr(encoding.encoding))
1969 1790 except LookupError as inst:
1970 1791 err = stringutil.forcebytestr(inst)
1971 1792 problems += 1
1972 1793 fm.condwrite(
1973 1794 err,
1974 1795 b'encodingerror',
1975 1796 b" %s\n (check that your locale is properly set)\n",
1976 1797 err,
1977 1798 )
1978 1799
1979 1800 # Python
1980 1801 pythonlib = None
1981 1802 if hasattr(os, '__file__'):
1982 1803 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1983 1804 elif getattr(sys, 'oxidized', False):
1984 1805 pythonlib = pycompat.sysexecutable
1985 1806
1986 1807 fm.write(
1987 1808 b'pythonexe',
1988 1809 _(b"checking Python executable (%s)\n"),
1989 1810 pycompat.sysexecutable or _(b"unknown"),
1990 1811 )
1991 1812 fm.write(
1992 1813 b'pythonimplementation',
1993 1814 _(b"checking Python implementation (%s)\n"),
1994 1815 pycompat.sysbytes(platform.python_implementation()),
1995 1816 )
1996 1817 fm.write(
1997 1818 b'pythonver',
1998 1819 _(b"checking Python version (%s)\n"),
1999 1820 (b"%d.%d.%d" % sys.version_info[:3]),
2000 1821 )
2001 1822 fm.write(
2002 1823 b'pythonlib',
2003 1824 _(b"checking Python lib (%s)...\n"),
2004 1825 pythonlib or _(b"unknown"),
2005 1826 )
2006 1827
2007 1828 try:
2008 1829 from . import rustext # pytype: disable=import-error
2009 1830
2010 1831 rustext.__doc__ # trigger lazy import
2011 1832 except ImportError:
2012 1833 rustext = None
2013 1834
2014 1835 security = set(sslutil.supportedprotocols)
2015 1836 if sslutil.hassni:
2016 1837 security.add(b'sni')
2017 1838
2018 1839 fm.write(
2019 1840 b'pythonsecurity',
2020 1841 _(b"checking Python security support (%s)\n"),
2021 1842 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2022 1843 )
2023 1844
2024 1845 # These are warnings, not errors. So don't increment problem count. This
2025 1846 # may change in the future.
2026 1847 if b'tls1.2' not in security:
2027 1848 fm.plain(
2028 1849 _(
2029 1850 b' TLS 1.2 not supported by Python install; '
2030 1851 b'network connections lack modern security\n'
2031 1852 )
2032 1853 )
2033 1854 if b'sni' not in security:
2034 1855 fm.plain(
2035 1856 _(
2036 1857 b' SNI not supported by Python install; may have '
2037 1858 b'connectivity issues with some servers\n'
2038 1859 )
2039 1860 )
2040 1861
2041 1862 fm.plain(
2042 1863 _(
2043 1864 b"checking Rust extensions (%s)\n"
2044 1865 % (b'missing' if rustext is None else b'installed')
2045 1866 ),
2046 1867 )
2047 1868
2048 1869 # TODO print CA cert info
2049 1870
2050 1871 # hg version
2051 1872 hgver = util.version()
2052 1873 fm.write(
2053 1874 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2054 1875 )
2055 1876 fm.write(
2056 1877 b'hgverextra',
2057 1878 _(b"checking Mercurial custom build (%s)\n"),
2058 1879 b'+'.join(hgver.split(b'+')[1:]),
2059 1880 )
2060 1881
2061 1882 # compiled modules
2062 1883 hgmodules = None
2063 1884 if hasattr(sys.modules[__name__], '__file__'):
2064 1885 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2065 1886 elif getattr(sys, 'oxidized', False):
2066 1887 hgmodules = pycompat.sysexecutable
2067 1888
2068 1889 fm.write(
2069 1890 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2070 1891 )
2071 1892 fm.write(
2072 1893 b'hgmodules',
2073 1894 _(b"checking installed modules (%s)...\n"),
2074 1895 hgmodules or _(b"unknown"),
2075 1896 )
2076 1897
2077 1898 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2078 1899 rustext = rustandc # for now, that's the only case
2079 1900 cext = policy.policy in (b'c', b'allow') or rustandc
2080 1901 nopure = cext or rustext
2081 1902 if nopure:
2082 1903 err = None
2083 1904 try:
2084 1905 if cext:
2085 1906 from .cext import ( # pytype: disable=import-error
2086 1907 base85,
2087 1908 bdiff,
2088 1909 mpatch,
2089 1910 osutil,
2090 1911 )
2091 1912
2092 1913 # quiet pyflakes
2093 1914 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2094 1915 if rustext:
2095 1916 from .rustext import ( # pytype: disable=import-error
2096 1917 ancestor,
2097 1918 dirstate,
2098 1919 )
2099 1920
2100 1921 dir(ancestor), dir(dirstate) # quiet pyflakes
2101 1922 except Exception as inst:
2102 1923 err = stringutil.forcebytestr(inst)
2103 1924 problems += 1
2104 1925 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2105 1926
2106 1927 compengines = util.compengines._engines.values()
2107 1928 fm.write(
2108 1929 b'compengines',
2109 1930 _(b'checking registered compression engines (%s)\n'),
2110 1931 fm.formatlist(
2111 1932 sorted(e.name() for e in compengines),
2112 1933 name=b'compengine',
2113 1934 fmt=b'%s',
2114 1935 sep=b', ',
2115 1936 ),
2116 1937 )
2117 1938 fm.write(
2118 1939 b'compenginesavail',
2119 1940 _(b'checking available compression engines (%s)\n'),
2120 1941 fm.formatlist(
2121 1942 sorted(e.name() for e in compengines if e.available()),
2122 1943 name=b'compengine',
2123 1944 fmt=b'%s',
2124 1945 sep=b', ',
2125 1946 ),
2126 1947 )
2127 1948 wirecompengines = compression.compengines.supportedwireengines(
2128 1949 compression.SERVERROLE
2129 1950 )
2130 1951 fm.write(
2131 1952 b'compenginesserver',
2132 1953 _(
2133 1954 b'checking available compression engines '
2134 1955 b'for wire protocol (%s)\n'
2135 1956 ),
2136 1957 fm.formatlist(
2137 1958 [e.name() for e in wirecompengines if e.wireprotosupport()],
2138 1959 name=b'compengine',
2139 1960 fmt=b'%s',
2140 1961 sep=b', ',
2141 1962 ),
2142 1963 )
2143 1964 re2 = b'missing'
2144 1965 if util.has_re2():
2145 1966 re2 = b'available'
2146 1967 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2147 1968 fm.data(re2=bool(util._re2))
2148 1969
2149 1970 # templates
2150 1971 p = templater.templatedir()
2151 1972 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2152 1973 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2153 1974 if p:
2154 1975 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2155 1976 if m:
2156 1977 # template found, check if it is working
2157 1978 err = None
2158 1979 try:
2159 1980 templater.templater.frommapfile(m)
2160 1981 except Exception as inst:
2161 1982 err = stringutil.forcebytestr(inst)
2162 1983 p = None
2163 1984 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2164 1985 else:
2165 1986 p = None
2166 1987 fm.condwrite(
2167 1988 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2168 1989 )
2169 1990 fm.condwrite(
2170 1991 not m,
2171 1992 b'defaulttemplatenotfound',
2172 1993 _(b" template '%s' not found\n"),
2173 1994 b"default",
2174 1995 )
2175 1996 if not p:
2176 1997 problems += 1
2177 1998 fm.condwrite(
2178 1999 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2179 2000 )
2180 2001
2181 2002 # editor
2182 2003 editor = ui.geteditor()
2183 2004 editor = util.expandpath(editor)
2184 2005 editorbin = procutil.shellsplit(editor)[0]
2185 2006 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2186 2007 cmdpath = procutil.findexe(editorbin)
2187 2008 fm.condwrite(
2188 2009 not cmdpath and editor == b'vi',
2189 2010 b'vinotfound',
2190 2011 _(
2191 2012 b" No commit editor set and can't find %s in PATH\n"
2192 2013 b" (specify a commit editor in your configuration"
2193 2014 b" file)\n"
2194 2015 ),
2195 2016 not cmdpath and editor == b'vi' and editorbin,
2196 2017 )
2197 2018 fm.condwrite(
2198 2019 not cmdpath and editor != b'vi',
2199 2020 b'editornotfound',
2200 2021 _(
2201 2022 b" Can't find editor '%s' in PATH\n"
2202 2023 b" (specify a commit editor in your configuration"
2203 2024 b" file)\n"
2204 2025 ),
2205 2026 not cmdpath and editorbin,
2206 2027 )
2207 2028 if not cmdpath and editor != b'vi':
2208 2029 problems += 1
2209 2030
2210 2031 # check username
2211 2032 username = None
2212 2033 err = None
2213 2034 try:
2214 2035 username = ui.username()
2215 2036 except error.Abort as e:
2216 2037 err = e.message
2217 2038 problems += 1
2218 2039
2219 2040 fm.condwrite(
2220 2041 username, b'username', _(b"checking username (%s)\n"), username
2221 2042 )
2222 2043 fm.condwrite(
2223 2044 err,
2224 2045 b'usernameerror',
2225 2046 _(
2226 2047 b"checking username...\n %s\n"
2227 2048 b" (specify a username in your configuration file)\n"
2228 2049 ),
2229 2050 err,
2230 2051 )
2231 2052
2232 2053 for name, mod in extensions.extensions():
2233 2054 handler = getattr(mod, 'debuginstall', None)
2234 2055 if handler is not None:
2235 2056 problems += handler(ui, fm)
2236 2057
2237 2058 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2238 2059 if not problems:
2239 2060 fm.data(problems=problems)
2240 2061 fm.condwrite(
2241 2062 problems,
2242 2063 b'problems',
2243 2064 _(b"%d problems detected, please check your install!\n"),
2244 2065 problems,
2245 2066 )
2246 2067 fm.end()
2247 2068
2248 2069 return problems
2249 2070
2250 2071
2251 2072 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2252 2073 def debugknown(ui, repopath, *ids, **opts):
2253 2074 """test whether node ids are known to a repo
2254 2075
2255 2076 Every ID must be a full-length hex node id string. Returns a list of 0s
2256 2077 and 1s indicating unknown/known.
2257 2078 """
2258 2079 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2259 2080 if not repo.capable(b'known'):
2260 2081 raise error.Abort(b"known() not supported by target repository")
2261 2082 flags = repo.known([bin(s) for s in ids])
2262 2083 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2263 2084
2264 2085
2265 2086 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2266 2087 def debuglabelcomplete(ui, repo, *args):
2267 2088 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2268 2089 debugnamecomplete(ui, repo, *args)
2269 2090
2270 2091
2271 2092 @command(
2272 2093 b'debuglocks',
2273 2094 [
2274 2095 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2275 2096 (
2276 2097 b'W',
2277 2098 b'force-free-wlock',
2278 2099 None,
2279 2100 _(b'free the working state lock (DANGEROUS)'),
2280 2101 ),
2281 2102 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2282 2103 (
2283 2104 b'S',
2284 2105 b'set-wlock',
2285 2106 None,
2286 2107 _(b'set the working state lock until stopped'),
2287 2108 ),
2288 2109 ],
2289 2110 _(b'[OPTION]...'),
2290 2111 )
2291 2112 def debuglocks(ui, repo, **opts):
2292 2113 """show or modify state of locks
2293 2114
2294 2115 By default, this command will show which locks are held. This
2295 2116 includes the user and process holding the lock, the amount of time
2296 2117 the lock has been held, and the machine name where the process is
2297 2118 running if it's not local.
2298 2119
2299 2120 Locks protect the integrity of Mercurial's data, so should be
2300 2121 treated with care. System crashes or other interruptions may cause
2301 2122 locks to not be properly released, though Mercurial will usually
2302 2123 detect and remove such stale locks automatically.
2303 2124
2304 2125 However, detecting stale locks may not always be possible (for
2305 2126 instance, on a shared filesystem). Removing locks may also be
2306 2127 blocked by filesystem permissions.
2307 2128
2308 2129 Setting a lock will prevent other commands from changing the data.
2309 2130 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2310 2131 The set locks are removed when the command exits.
2311 2132
2312 2133 Returns 0 if no locks are held.
2313 2134
2314 2135 """
2315 2136
2316 2137 if opts.get('force_free_lock'):
2317 2138 repo.svfs.tryunlink(b'lock')
2318 2139 if opts.get('force_free_wlock'):
2319 2140 repo.vfs.tryunlink(b'wlock')
2320 2141 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2321 2142 return 0
2322 2143
2323 2144 locks = []
2324 2145 try:
2325 2146 if opts.get('set_wlock'):
2326 2147 try:
2327 2148 locks.append(repo.wlock(False))
2328 2149 except error.LockHeld:
2329 2150 raise error.Abort(_(b'wlock is already held'))
2330 2151 if opts.get('set_lock'):
2331 2152 try:
2332 2153 locks.append(repo.lock(False))
2333 2154 except error.LockHeld:
2334 2155 raise error.Abort(_(b'lock is already held'))
2335 2156 if len(locks):
2336 2157 try:
2337 2158 if ui.interactive():
2338 2159 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2339 2160 ui.promptchoice(prompt)
2340 2161 else:
2341 2162 msg = b"%d locks held, waiting for signal\n"
2342 2163 msg %= len(locks)
2343 2164 ui.status(msg)
2344 2165 while True: # XXX wait for a signal
2345 2166 time.sleep(0.1)
2346 2167 except KeyboardInterrupt:
2347 2168 msg = b"signal-received releasing locks\n"
2348 2169 ui.status(msg)
2349 2170 return 0
2350 2171 finally:
2351 2172 release(*locks)
2352 2173
2353 2174 now = time.time()
2354 2175 held = 0
2355 2176
2356 2177 def report(vfs, name, method):
2357 2178 # this causes stale locks to get reaped for more accurate reporting
2358 2179 try:
2359 2180 l = method(False)
2360 2181 except error.LockHeld:
2361 2182 l = None
2362 2183
2363 2184 if l:
2364 2185 l.release()
2365 2186 else:
2366 2187 try:
2367 2188 st = vfs.lstat(name)
2368 2189 age = now - st[stat.ST_MTIME]
2369 2190 user = util.username(st.st_uid)
2370 2191 locker = vfs.readlock(name)
2371 2192 if b":" in locker:
2372 2193 host, pid = locker.split(b':')
2373 2194 if host == socket.gethostname():
2374 2195 locker = b'user %s, process %s' % (user or b'None', pid)
2375 2196 else:
2376 2197 locker = b'user %s, process %s, host %s' % (
2377 2198 user or b'None',
2378 2199 pid,
2379 2200 host,
2380 2201 )
2381 2202 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2382 2203 return 1
2383 2204 except FileNotFoundError:
2384 2205 pass
2385 2206
2386 2207 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2387 2208 return 0
2388 2209
2389 2210 held += report(repo.svfs, b"lock", repo.lock)
2390 2211 held += report(repo.vfs, b"wlock", repo.wlock)
2391 2212
2392 2213 return held
2393 2214
2394 2215
2395 2216 @command(
2396 2217 b'debugmanifestfulltextcache',
2397 2218 [
2398 2219 (b'', b'clear', False, _(b'clear the cache')),
2399 2220 (
2400 2221 b'a',
2401 2222 b'add',
2402 2223 [],
2403 2224 _(b'add the given manifest nodes to the cache'),
2404 2225 _(b'NODE'),
2405 2226 ),
2406 2227 ],
2407 2228 b'',
2408 2229 )
2409 2230 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2410 2231 """show, clear or amend the contents of the manifest fulltext cache"""
2411 2232
2412 2233 def getcache():
2413 2234 r = repo.manifestlog.getstorage(b'')
2414 2235 try:
2415 2236 return r._fulltextcache
2416 2237 except AttributeError:
2417 2238 msg = _(
2418 2239 b"Current revlog implementation doesn't appear to have a "
2419 2240 b"manifest fulltext cache\n"
2420 2241 )
2421 2242 raise error.Abort(msg)
2422 2243
2423 2244 if opts.get('clear'):
2424 2245 with repo.wlock():
2425 2246 cache = getcache()
2426 2247 cache.clear(clear_persisted_data=True)
2427 2248 return
2428 2249
2429 2250 if add:
2430 2251 with repo.wlock():
2431 2252 m = repo.manifestlog
2432 2253 store = m.getstorage(b'')
2433 2254 for n in add:
2434 2255 try:
2435 2256 manifest = m[store.lookup(n)]
2436 2257 except error.LookupError as e:
2437 2258 raise error.Abort(
2438 2259 bytes(e), hint=b"Check your manifest node id"
2439 2260 )
2440 2261 manifest.read() # stores revisision in cache too
2441 2262 return
2442 2263
2443 2264 cache = getcache()
2444 2265 if not len(cache):
2445 2266 ui.write(_(b'cache empty\n'))
2446 2267 else:
2447 2268 ui.write(
2448 2269 _(
2449 2270 b'cache contains %d manifest entries, in order of most to '
2450 2271 b'least recent:\n'
2451 2272 )
2452 2273 % (len(cache),)
2453 2274 )
2454 2275 totalsize = 0
2455 2276 for nodeid in cache:
2456 2277 # Use cache.get to not update the LRU order
2457 2278 data = cache.peek(nodeid)
2458 2279 size = len(data)
2459 2280 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2460 2281 ui.write(
2461 2282 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2462 2283 )
2463 2284 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2464 2285 ui.write(
2465 2286 _(b'total cache data size %s, on-disk %s\n')
2466 2287 % (util.bytecount(totalsize), util.bytecount(ondisk))
2467 2288 )
2468 2289
2469 2290
2470 2291 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2471 2292 def debugmergestate(ui, repo, *args, **opts):
2472 2293 """print merge state
2473 2294
2474 2295 Use --verbose to print out information about whether v1 or v2 merge state
2475 2296 was chosen."""
2476 2297
2477 2298 if ui.verbose:
2478 2299 ms = mergestatemod.mergestate(repo)
2479 2300
2480 2301 # sort so that reasonable information is on top
2481 2302 v1records = ms._readrecordsv1()
2482 2303 v2records = ms._readrecordsv2()
2483 2304
2484 2305 if not v1records and not v2records:
2485 2306 pass
2486 2307 elif not v2records:
2487 2308 ui.writenoi18n(b'no version 2 merge state\n')
2488 2309 elif ms._v1v2match(v1records, v2records):
2489 2310 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2490 2311 else:
2491 2312 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2492 2313
2493 2314 if not opts['template']:
2494 2315 opts['template'] = (
2495 2316 b'{if(commits, "", "no merge state found\n")}'
2496 2317 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2497 2318 b'{files % "file: {path} (state \\"{state}\\")\n'
2498 2319 b'{if(local_path, "'
2499 2320 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2500 2321 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2501 2322 b' other path: {other_path} (node {other_node})\n'
2502 2323 b'")}'
2503 2324 b'{if(rename_side, "'
2504 2325 b' rename side: {rename_side}\n'
2505 2326 b' renamed path: {renamed_path}\n'
2506 2327 b'")}'
2507 2328 b'{extras % " extra: {key} = {value}\n"}'
2508 2329 b'"}'
2509 2330 b'{extras % "extra: {file} ({key} = {value})\n"}'
2510 2331 )
2511 2332
2512 2333 ms = mergestatemod.mergestate.read(repo)
2513 2334
2514 2335 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2515 2336 fm.startitem()
2516 2337
2517 2338 fm_commits = fm.nested(b'commits')
2518 2339 if ms.active():
2519 2340 for name, node, label_index in (
2520 2341 (b'local', ms.local, 0),
2521 2342 (b'other', ms.other, 1),
2522 2343 ):
2523 2344 fm_commits.startitem()
2524 2345 fm_commits.data(name=name)
2525 2346 fm_commits.data(node=hex(node))
2526 2347 if ms._labels and len(ms._labels) > label_index:
2527 2348 fm_commits.data(label=ms._labels[label_index])
2528 2349 fm_commits.end()
2529 2350
2530 2351 fm_files = fm.nested(b'files')
2531 2352 if ms.active():
2532 2353 for f in ms:
2533 2354 fm_files.startitem()
2534 2355 fm_files.data(path=f)
2535 2356 state = ms._state[f]
2536 2357 fm_files.data(state=state[0])
2537 2358 if state[0] in (
2538 2359 mergestatemod.MERGE_RECORD_UNRESOLVED,
2539 2360 mergestatemod.MERGE_RECORD_RESOLVED,
2540 2361 ):
2541 2362 fm_files.data(local_key=state[1])
2542 2363 fm_files.data(local_path=state[2])
2543 2364 fm_files.data(ancestor_path=state[3])
2544 2365 fm_files.data(ancestor_node=state[4])
2545 2366 fm_files.data(other_path=state[5])
2546 2367 fm_files.data(other_node=state[6])
2547 2368 fm_files.data(local_flags=state[7])
2548 2369 elif state[0] in (
2549 2370 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2550 2371 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2551 2372 ):
2552 2373 fm_files.data(renamed_path=state[1])
2553 2374 fm_files.data(rename_side=state[2])
2554 2375 fm_extras = fm_files.nested(b'extras')
2555 2376 for k, v in sorted(ms.extras(f).items()):
2556 2377 fm_extras.startitem()
2557 2378 fm_extras.data(key=k)
2558 2379 fm_extras.data(value=v)
2559 2380 fm_extras.end()
2560 2381
2561 2382 fm_files.end()
2562 2383
2563 2384 fm_extras = fm.nested(b'extras')
2564 2385 for f, d in sorted(ms.allextras().items()):
2565 2386 if f in ms:
2566 2387 # If file is in mergestate, we have already processed it's extras
2567 2388 continue
2568 2389 for k, v in d.items():
2569 2390 fm_extras.startitem()
2570 2391 fm_extras.data(file=f)
2571 2392 fm_extras.data(key=k)
2572 2393 fm_extras.data(value=v)
2573 2394 fm_extras.end()
2574 2395
2575 2396 fm.end()
2576 2397
2577 2398
2578 2399 @command(b'debugnamecomplete', [], _(b'NAME...'))
2579 2400 def debugnamecomplete(ui, repo, *args):
2580 2401 '''complete "names" - tags, open branch names, bookmark names'''
2581 2402
2582 2403 names = set()
2583 2404 # since we previously only listed open branches, we will handle that
2584 2405 # specially (after this for loop)
2585 2406 for name, ns in repo.names.items():
2586 2407 if name != b'branches':
2587 2408 names.update(ns.listnames(repo))
2588 2409 names.update(
2589 2410 tag
2590 2411 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2591 2412 if not closed
2592 2413 )
2593 2414 completions = set()
2594 2415 if not args:
2595 2416 args = [b'']
2596 2417 for a in args:
2597 2418 completions.update(n for n in names if n.startswith(a))
2598 2419 ui.write(b'\n'.join(sorted(completions)))
2599 2420 ui.write(b'\n')
2600 2421
2601 2422
2602 2423 @command(
2603 2424 b'debugnodemap',
2604 2425 (
2605 2426 cmdutil.debugrevlogopts
2606 2427 + [
2607 2428 (
2608 2429 b'',
2609 2430 b'dump-new',
2610 2431 False,
2611 2432 _(b'write a (new) persistent binary nodemap on stdout'),
2612 2433 ),
2613 2434 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2614 2435 (
2615 2436 b'',
2616 2437 b'check',
2617 2438 False,
2618 2439 _(b'check that the data on disk data are correct.'),
2619 2440 ),
2620 2441 (
2621 2442 b'',
2622 2443 b'metadata',
2623 2444 False,
2624 2445 _(b'display the on disk meta data for the nodemap'),
2625 2446 ),
2626 2447 ]
2627 2448 ),
2628 2449 _(b'-c|-m|FILE'),
2629 2450 )
2630 2451 def debugnodemap(ui, repo, file_=None, **opts):
2631 2452 """write and inspect on disk nodemap"""
2632 2453 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2633 2454 if file_ is not None:
2634 2455 raise error.InputError(
2635 2456 _(b'cannot specify a file with other arguments')
2636 2457 )
2637 2458 elif file_ is None:
2638 2459 opts['changelog'] = True
2639 2460 r = cmdutil.openstorage(
2640 2461 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2641 2462 )
2642 2463 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2643 2464 r = r._revlog
2644 2465 if opts['dump_new']:
2645 2466 if hasattr(r.index, "nodemap_data_all"):
2646 2467 data = r.index.nodemap_data_all()
2647 2468 else:
2648 2469 data = nodemap.persistent_data(r.index)
2649 2470 ui.write(data)
2650 2471 elif opts['dump_disk']:
2651 2472 nm_data = nodemap.persisted_data(r)
2652 2473 if nm_data is not None:
2653 2474 docket, data = nm_data
2654 2475 ui.write(data[:])
2655 2476 elif opts['check']:
2656 2477 nm_data = nodemap.persisted_data(r)
2657 2478 if nm_data is not None:
2658 2479 docket, data = nm_data
2659 2480 return nodemap.check_data(ui, r.index, data)
2660 2481 elif opts['metadata']:
2661 2482 nm_data = nodemap.persisted_data(r)
2662 2483 if nm_data is not None:
2663 2484 docket, data = nm_data
2664 2485 ui.write((b"uid: %s\n") % docket.uid)
2665 2486 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2666 2487 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2667 2488 ui.write((b"data-length: %d\n") % docket.data_length)
2668 2489 ui.write((b"data-unused: %d\n") % docket.data_unused)
2669 2490 unused_perc = docket.data_unused * 100.0 / docket.data_length
2670 2491 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2671 2492
2672 2493
2673 2494 @command(
2674 2495 b'debugobsolete',
2675 2496 [
2676 2497 (b'', b'flags', 0, _(b'markers flag')),
2677 2498 (
2678 2499 b'',
2679 2500 b'record-parents',
2680 2501 False,
2681 2502 _(b'record parent information for the precursor'),
2682 2503 ),
2683 2504 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2684 2505 (
2685 2506 b'',
2686 2507 b'exclusive',
2687 2508 False,
2688 2509 _(b'restrict display to markers only relevant to REV'),
2689 2510 ),
2690 2511 (b'', b'index', False, _(b'display index of the marker')),
2691 2512 (b'', b'delete', [], _(b'delete markers specified by indices')),
2692 2513 ]
2693 2514 + cmdutil.commitopts2
2694 2515 + cmdutil.formatteropts,
2695 2516 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2696 2517 )
2697 2518 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2698 2519 """create arbitrary obsolete marker
2699 2520
2700 2521 With no arguments, displays the list of obsolescence markers."""
2701 2522
2702 2523 def parsenodeid(s):
2703 2524 try:
2704 2525 # We do not use revsingle/revrange functions here to accept
2705 2526 # arbitrary node identifiers, possibly not present in the
2706 2527 # local repository.
2707 2528 n = bin(s)
2708 2529 if len(n) != repo.nodeconstants.nodelen:
2709 2530 raise ValueError
2710 2531 return n
2711 2532 except ValueError:
2712 2533 raise error.InputError(
2713 2534 b'changeset references must be full hexadecimal '
2714 2535 b'node identifiers'
2715 2536 )
2716 2537
2717 2538 if opts.get('delete'):
2718 2539 indices = []
2719 2540 for v in opts.get('delete'):
2720 2541 try:
2721 2542 indices.append(int(v))
2722 2543 except ValueError:
2723 2544 raise error.InputError(
2724 2545 _(b'invalid index value: %r') % v,
2725 2546 hint=_(b'use integers for indices'),
2726 2547 )
2727 2548
2728 2549 if repo.currenttransaction():
2729 2550 raise error.Abort(
2730 2551 _(b'cannot delete obsmarkers in the middle of transaction.')
2731 2552 )
2732 2553
2733 2554 with repo.lock():
2734 2555 n = repair.deleteobsmarkers(repo.obsstore, indices)
2735 2556 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2736 2557
2737 2558 return
2738 2559
2739 2560 if precursor is not None:
2740 2561 if opts['rev']:
2741 2562 raise error.InputError(
2742 2563 b'cannot select revision when creating marker'
2743 2564 )
2744 2565 metadata = {}
2745 2566 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2746 2567 succs = tuple(parsenodeid(succ) for succ in successors)
2747 2568 l = repo.lock()
2748 2569 try:
2749 2570 tr = repo.transaction(b'debugobsolete')
2750 2571 try:
2751 2572 date = opts.get('date')
2752 2573 if date:
2753 2574 date = dateutil.parsedate(date)
2754 2575 else:
2755 2576 date = None
2756 2577 prec = parsenodeid(precursor)
2757 2578 parents = None
2758 2579 if opts['record_parents']:
2759 2580 if prec not in repo.unfiltered():
2760 2581 raise error.Abort(
2761 2582 b'cannot used --record-parents on '
2762 2583 b'unknown changesets'
2763 2584 )
2764 2585 parents = repo.unfiltered()[prec].parents()
2765 2586 parents = tuple(p.node() for p in parents)
2766 2587 repo.obsstore.create(
2767 2588 tr,
2768 2589 prec,
2769 2590 succs,
2770 2591 opts['flags'],
2771 2592 parents=parents,
2772 2593 date=date,
2773 2594 metadata=metadata,
2774 2595 ui=ui,
2775 2596 )
2776 2597 tr.close()
2777 2598 except ValueError as exc:
2778 2599 raise error.Abort(
2779 2600 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2780 2601 )
2781 2602 finally:
2782 2603 tr.release()
2783 2604 finally:
2784 2605 l.release()
2785 2606 else:
2786 2607 if opts['rev']:
2787 2608 revs = logcmdutil.revrange(repo, opts['rev'])
2788 2609 nodes = [repo[r].node() for r in revs]
2789 2610 markers = list(
2790 2611 obsutil.getmarkers(
2791 2612 repo, nodes=nodes, exclusive=opts['exclusive']
2792 2613 )
2793 2614 )
2794 2615 markers.sort(key=lambda x: x._data)
2795 2616 else:
2796 2617 markers = obsutil.getmarkers(repo)
2797 2618
2798 2619 markerstoiter = markers
2799 2620 isrelevant = lambda m: True
2800 2621 if opts.get('rev') and opts.get('index'):
2801 2622 markerstoiter = obsutil.getmarkers(repo)
2802 2623 markerset = set(markers)
2803 2624 isrelevant = lambda m: m in markerset
2804 2625
2805 2626 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2806 2627 for i, m in enumerate(markerstoiter):
2807 2628 if not isrelevant(m):
2808 2629 # marker can be irrelevant when we're iterating over a set
2809 2630 # of markers (markerstoiter) which is bigger than the set
2810 2631 # of markers we want to display (markers)
2811 2632 # this can happen if both --index and --rev options are
2812 2633 # provided and thus we need to iterate over all of the markers
2813 2634 # to get the correct indices, but only display the ones that
2814 2635 # are relevant to --rev value
2815 2636 continue
2816 2637 fm.startitem()
2817 2638 ind = i if opts.get('index') else None
2818 2639 cmdutil.showmarker(fm, m, index=ind)
2819 2640 fm.end()
2820 2641
2821 2642
2822 2643 @command(
2823 2644 b'debugp1copies',
2824 2645 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2825 2646 _(b'[-r REV]'),
2826 2647 )
2827 2648 def debugp1copies(ui, repo, **opts):
2828 2649 """dump copy information compared to p1"""
2829 2650
2830 2651 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2831 2652 for dst, src in ctx.p1copies().items():
2832 2653 ui.write(b'%s -> %s\n' % (src, dst))
2833 2654
2834 2655
2835 2656 @command(
2836 2657 b'debugp2copies',
2837 2658 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2838 2659 _(b'[-r REV]'),
2839 2660 )
2840 2661 def debugp2copies(ui, repo, **opts):
2841 2662 """dump copy information compared to p2"""
2842 2663
2843 2664 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2844 2665 for dst, src in ctx.p2copies().items():
2845 2666 ui.write(b'%s -> %s\n' % (src, dst))
2846 2667
2847 2668
2848 2669 @command(
2849 2670 b'debugpathcomplete',
2850 2671 [
2851 2672 (b'f', b'full', None, _(b'complete an entire path')),
2852 2673 (b'n', b'normal', None, _(b'show only normal files')),
2853 2674 (b'a', b'added', None, _(b'show only added files')),
2854 2675 (b'r', b'removed', None, _(b'show only removed files')),
2855 2676 ],
2856 2677 _(b'FILESPEC...'),
2857 2678 )
2858 2679 def debugpathcomplete(ui, repo, *specs, **opts):
2859 2680 """complete part or all of a tracked path
2860 2681
2861 2682 This command supports shells that offer path name completion. It
2862 2683 currently completes only files already known to the dirstate.
2863 2684
2864 2685 Completion extends only to the next path segment unless
2865 2686 --full is specified, in which case entire paths are used."""
2866 2687
2867 2688 def complete(path, acceptable):
2868 2689 dirstate = repo.dirstate
2869 2690 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2870 2691 rootdir = repo.root + pycompat.ossep
2871 2692 if spec != repo.root and not spec.startswith(rootdir):
2872 2693 return [], []
2873 2694 if os.path.isdir(spec):
2874 2695 spec += b'/'
2875 2696 spec = spec[len(rootdir) :]
2876 2697 fixpaths = pycompat.ossep != b'/'
2877 2698 if fixpaths:
2878 2699 spec = spec.replace(pycompat.ossep, b'/')
2879 2700 speclen = len(spec)
2880 2701 fullpaths = opts['full']
2881 2702 files, dirs = set(), set()
2882 2703 adddir, addfile = dirs.add, files.add
2883 2704 for f, st in dirstate.items():
2884 2705 if f.startswith(spec) and st.state in acceptable:
2885 2706 if fixpaths:
2886 2707 f = f.replace(b'/', pycompat.ossep)
2887 2708 if fullpaths:
2888 2709 addfile(f)
2889 2710 continue
2890 2711 s = f.find(pycompat.ossep, speclen)
2891 2712 if s >= 0:
2892 2713 adddir(f[:s])
2893 2714 else:
2894 2715 addfile(f)
2895 2716 return files, dirs
2896 2717
2897 2718 acceptable = b''
2898 2719 if opts['normal']:
2899 2720 acceptable += b'nm'
2900 2721 if opts['added']:
2901 2722 acceptable += b'a'
2902 2723 if opts['removed']:
2903 2724 acceptable += b'r'
2904 2725 cwd = repo.getcwd()
2905 2726 if not specs:
2906 2727 specs = [b'.']
2907 2728
2908 2729 files, dirs = set(), set()
2909 2730 for spec in specs:
2910 2731 f, d = complete(spec, acceptable or b'nmar')
2911 2732 files.update(f)
2912 2733 dirs.update(d)
2913 2734 files.update(dirs)
2914 2735 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2915 2736 ui.write(b'\n')
2916 2737
2917 2738
2918 2739 @command(
2919 2740 b'debugpathcopies',
2920 2741 cmdutil.walkopts,
2921 2742 b'hg debugpathcopies REV1 REV2 [FILE]',
2922 2743 inferrepo=True,
2923 2744 )
2924 2745 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2925 2746 """show copies between two revisions"""
2926 2747 ctx1 = scmutil.revsingle(repo, rev1)
2927 2748 ctx2 = scmutil.revsingle(repo, rev2)
2928 2749 m = scmutil.match(ctx1, pats, opts)
2929 2750 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2930 2751 ui.write(b'%s -> %s\n' % (src, dst))
2931 2752
2932 2753
2933 2754 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2934 2755 def debugpeer(ui, path):
2935 2756 """establish a connection to a peer repository"""
2936 2757 # Always enable peer request logging. Requires --debug to display
2937 2758 # though.
2938 2759 overrides = {
2939 2760 (b'devel', b'debug.peer-request'): True,
2940 2761 }
2941 2762
2942 2763 with ui.configoverride(overrides):
2943 2764 peer = hg.peer(ui, {}, path)
2944 2765
2945 2766 try:
2946 2767 local = peer.local() is not None
2947 2768 canpush = peer.canpush()
2948 2769
2949 2770 ui.write(_(b'url: %s\n') % peer.url())
2950 2771 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2951 2772 ui.write(
2952 2773 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2953 2774 )
2954 2775 finally:
2955 2776 peer.close()
2956 2777
2957 2778
2958 2779 @command(
2959 2780 b'debugpickmergetool',
2960 2781 [
2961 2782 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2962 2783 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2963 2784 ]
2964 2785 + cmdutil.walkopts
2965 2786 + cmdutil.mergetoolopts,
2966 2787 _(b'[PATTERN]...'),
2967 2788 inferrepo=True,
2968 2789 )
2969 2790 def debugpickmergetool(ui, repo, *pats, **opts):
2970 2791 """examine which merge tool is chosen for specified file
2971 2792
2972 2793 As described in :hg:`help merge-tools`, Mercurial examines
2973 2794 configurations below in this order to decide which merge tool is
2974 2795 chosen for specified file.
2975 2796
2976 2797 1. ``--tool`` option
2977 2798 2. ``HGMERGE`` environment variable
2978 2799 3. configurations in ``merge-patterns`` section
2979 2800 4. configuration of ``ui.merge``
2980 2801 5. configurations in ``merge-tools`` section
2981 2802 6. ``hgmerge`` tool (for historical reason only)
2982 2803 7. default tool for fallback (``:merge`` or ``:prompt``)
2983 2804
2984 2805 This command writes out examination result in the style below::
2985 2806
2986 2807 FILE = MERGETOOL
2987 2808
2988 2809 By default, all files known in the first parent context of the
2989 2810 working directory are examined. Use file patterns and/or -I/-X
2990 2811 options to limit target files. -r/--rev is also useful to examine
2991 2812 files in another context without actual updating to it.
2992 2813
2993 2814 With --debug, this command shows warning messages while matching
2994 2815 against ``merge-patterns`` and so on, too. It is recommended to
2995 2816 use this option with explicit file patterns and/or -I/-X options,
2996 2817 because this option increases amount of output per file according
2997 2818 to configurations in hgrc.
2998 2819
2999 2820 With -v/--verbose, this command shows configurations below at
3000 2821 first (only if specified).
3001 2822
3002 2823 - ``--tool`` option
3003 2824 - ``HGMERGE`` environment variable
3004 2825 - configuration of ``ui.merge``
3005 2826
3006 2827 If merge tool is chosen before matching against
3007 2828 ``merge-patterns``, this command can't show any helpful
3008 2829 information, even with --debug. In such case, information above is
3009 2830 useful to know why a merge tool is chosen.
3010 2831 """
3011 2832 overrides = {}
3012 2833 if opts['tool']:
3013 2834 overrides[(b'ui', b'forcemerge')] = opts['tool']
3014 2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
3015 2836
3016 2837 with ui.configoverride(overrides, b'debugmergepatterns'):
3017 2838 hgmerge = encoding.environ.get(b"HGMERGE")
3018 2839 if hgmerge is not None:
3019 2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3020 2841 uimerge = ui.config(b"ui", b"merge")
3021 2842 if uimerge:
3022 2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3023 2844
3024 2845 ctx = scmutil.revsingle(repo, opts.get('rev'))
3025 2846 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3026 2847 changedelete = opts['changedelete']
3027 2848 for path in ctx.walk(m):
3028 2849 fctx = ctx[path]
3029 2850 with ui.silent(
3030 2851 error=True
3031 2852 ) if not ui.debugflag else util.nullcontextmanager():
3032 2853 tool, toolpath = filemerge._picktool(
3033 2854 repo,
3034 2855 ui,
3035 2856 path,
3036 2857 fctx.isbinary(),
3037 2858 b'l' in fctx.flags(),
3038 2859 changedelete,
3039 2860 )
3040 2861 ui.write(b'%s = %s\n' % (path, tool))
3041 2862
3042 2863
3043 2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3044 2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3045 2866 """access the pushkey key/value protocol
3046 2867
3047 2868 With two args, list the keys in the given namespace.
3048 2869
3049 2870 With five args, set a key to new if it currently is set to old.
3050 2871 Reports success or failure.
3051 2872 """
3052 2873
3053 2874 target = hg.peer(ui, {}, repopath)
3054 2875 try:
3055 2876 if keyinfo:
3056 2877 key, old, new = keyinfo
3057 2878 with target.commandexecutor() as e:
3058 2879 r = e.callcommand(
3059 2880 b'pushkey',
3060 2881 {
3061 2882 b'namespace': namespace,
3062 2883 b'key': key,
3063 2884 b'old': old,
3064 2885 b'new': new,
3065 2886 },
3066 2887 ).result()
3067 2888
3068 2889 ui.status(pycompat.bytestr(r) + b'\n')
3069 2890 return not r
3070 2891 else:
3071 2892 for k, v in sorted(target.listkeys(namespace).items()):
3072 2893 ui.write(
3073 2894 b"%s\t%s\n"
3074 2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
3075 2896 )
3076 2897 finally:
3077 2898 target.close()
3078 2899
3079 2900
3080 2901 @command(b'debugpvec', [], _(b'A B'))
3081 2902 def debugpvec(ui, repo, a, b=None):
3082 2903 ca = scmutil.revsingle(repo, a)
3083 2904 cb = scmutil.revsingle(repo, b)
3084 2905 pa = pvec.ctxpvec(ca)
3085 2906 pb = pvec.ctxpvec(cb)
3086 2907 if pa == pb:
3087 2908 rel = b"="
3088 2909 elif pa > pb:
3089 2910 rel = b">"
3090 2911 elif pa < pb:
3091 2912 rel = b"<"
3092 2913 elif pa | pb:
3093 2914 rel = b"|"
3094 2915 ui.write(_(b"a: %s\n") % pa)
3095 2916 ui.write(_(b"b: %s\n") % pb)
3096 2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3097 2918 ui.write(
3098 2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3099 2920 % (
3100 2921 abs(pa._depth - pb._depth),
3101 2922 pvec._hamming(pa._vec, pb._vec),
3102 2923 pa.distance(pb),
3103 2924 rel,
3104 2925 )
3105 2926 )
3106 2927
3107 2928
3108 2929 @command(
3109 2930 b'debugrebuilddirstate|debugrebuildstate',
3110 2931 [
3111 2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3112 2933 (
3113 2934 b'',
3114 2935 b'minimal',
3115 2936 None,
3116 2937 _(
3117 2938 b'only rebuild files that are inconsistent with '
3118 2939 b'the working copy parent'
3119 2940 ),
3120 2941 ),
3121 2942 ],
3122 2943 _(b'[-r REV]'),
3123 2944 )
3124 2945 def debugrebuilddirstate(ui, repo, rev, **opts):
3125 2946 """rebuild the dirstate as it would look like for the given revision
3126 2947
3127 2948 If no revision is specified the first current parent will be used.
3128 2949
3129 2950 The dirstate will be set to the files of the given revision.
3130 2951 The actual working directory content or existing dirstate
3131 2952 information such as adds or removes is not considered.
3132 2953
3133 2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
3134 2955 tracked but are not in the parent manifest, or that exist in the parent
3135 2956 manifest but are not in the dirstate. It will not change adds, removes, or
3136 2957 modified files that are in the working copy parent.
3137 2958
3138 2959 One use of this command is to make the next :hg:`status` invocation
3139 2960 check the actual file content.
3140 2961 """
3141 2962 ctx = scmutil.revsingle(repo, rev)
3142 2963 with repo.wlock():
3143 2964 if repo.currenttransaction() is not None:
3144 2965 msg = b'rebuild the dirstate outside of a transaction'
3145 2966 raise error.ProgrammingError(msg)
3146 2967 dirstate = repo.dirstate
3147 2968 changedfiles = None
3148 2969 # See command doc for what minimal does.
3149 2970 if opts.get('minimal'):
3150 2971 manifestfiles = set(ctx.manifest().keys())
3151 2972 dirstatefiles = set(dirstate)
3152 2973 manifestonly = manifestfiles - dirstatefiles
3153 2974 dsonly = dirstatefiles - manifestfiles
3154 2975 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3155 2976 changedfiles = manifestonly | dsnotadded
3156 2977
3157 2978 with dirstate.changing_parents(repo):
3158 2979 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3159 2980
3160 2981
3161 2982 @command(
3162 2983 b'debugrebuildfncache',
3163 2984 [
3164 2985 (
3165 2986 b'',
3166 2987 b'only-data',
3167 2988 False,
3168 2989 _(b'only look for wrong .d files (much faster)'),
3169 2990 )
3170 2991 ],
3171 2992 b'',
3172 2993 )
3173 2994 def debugrebuildfncache(ui, repo, **opts):
3174 2995 """rebuild the fncache file"""
3175 2996 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3176 2997
3177 2998
3178 2999 @command(
3179 3000 b'debugrename',
3180 3001 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3181 3002 _(b'[-r REV] [FILE]...'),
3182 3003 )
3183 3004 def debugrename(ui, repo, *pats, **opts):
3184 3005 """dump rename information"""
3185 3006
3186 3007 ctx = scmutil.revsingle(repo, opts.get('rev'))
3187 3008 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3188 3009 for abs in ctx.walk(m):
3189 3010 fctx = ctx[abs]
3190 3011 o = fctx.filelog().renamed(fctx.filenode())
3191 3012 rel = repo.pathto(abs)
3192 3013 if o:
3193 3014 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3194 3015 else:
3195 3016 ui.write(_(b"%s not renamed\n") % rel)
3196 3017
3197 3018
3198 3019 @command(b'debugrequires|debugrequirements', [], b'')
3199 3020 def debugrequirements(ui, repo):
3200 3021 """print the current repo requirements"""
3201 3022 for r in sorted(repo.requirements):
3202 3023 ui.write(b"%s\n" % r)
3203 3024
3204 3025
3205 3026 @command(
3206 3027 b'debugrevlog',
3207 3028 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3208 3029 _(b'-c|-m|FILE'),
3209 3030 optionalrepo=True,
3210 3031 )
3211 3032 def debugrevlog(ui, repo, file_=None, **opts):
3212 3033 """show data and statistics about a revlog"""
3213 3034 r = cmdutil.openrevlog(
3214 3035 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3215 3036 )
3216 3037
3217 3038 if opts.get("dump"):
3218 3039 revlog_debug.dump(ui, r)
3219 3040 else:
3220 3041 revlog_debug.debug_revlog(ui, r)
3221 3042 return 0
3222 3043
3223 3044
3224 3045 @command(
3225 3046 b'debugrevlogindex',
3226 3047 cmdutil.debugrevlogopts
3227 3048 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3228 3049 _(b'[-f FORMAT] -c|-m|FILE'),
3229 3050 optionalrepo=True,
3230 3051 )
3231 3052 def debugrevlogindex(ui, repo, file_=None, **opts):
3232 3053 """dump the contents of a revlog index"""
3233 3054 r = cmdutil.openrevlog(
3234 3055 repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts)
3235 3056 )
3236 3057 format = opts.get('format', 0)
3237 3058 if format not in (0, 1):
3238 3059 raise error.Abort(_(b"unknown format %d") % format)
3239 3060
3240 3061 if ui.debugflag:
3241 3062 shortfn = hex
3242 3063 else:
3243 3064 shortfn = short
3244 3065
3245 3066 # There might not be anything in r, so have a sane default
3246 3067 idlen = 12
3247 3068 for i in r:
3248 3069 idlen = len(shortfn(r.node(i)))
3249 3070 break
3250 3071
3251 3072 if format == 0:
3252 3073 if ui.verbose:
3253 3074 ui.writenoi18n(
3254 3075 b" rev offset length linkrev %s %s p2\n"
3255 3076 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3256 3077 )
3257 3078 else:
3258 3079 ui.writenoi18n(
3259 3080 b" rev linkrev %s %s p2\n"
3260 3081 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3261 3082 )
3262 3083 elif format == 1:
3263 3084 if ui.verbose:
3264 3085 ui.writenoi18n(
3265 3086 (
3266 3087 b" rev flag offset length size link p1"
3267 3088 b" p2 %s\n"
3268 3089 )
3269 3090 % b"nodeid".rjust(idlen)
3270 3091 )
3271 3092 else:
3272 3093 ui.writenoi18n(
3273 3094 b" rev flag size link p1 p2 %s\n"
3274 3095 % b"nodeid".rjust(idlen)
3275 3096 )
3276 3097
3277 3098 for i in r:
3278 3099 node = r.node(i)
3279 3100 if format == 0:
3280 3101 try:
3281 3102 pp = r.parents(node)
3282 3103 except Exception:
3283 3104 pp = [repo.nullid, repo.nullid]
3284 3105 if ui.verbose:
3285 3106 ui.write(
3286 3107 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3287 3108 % (
3288 3109 i,
3289 3110 r.start(i),
3290 3111 r.length(i),
3291 3112 r.linkrev(i),
3292 3113 shortfn(node),
3293 3114 shortfn(pp[0]),
3294 3115 shortfn(pp[1]),
3295 3116 )
3296 3117 )
3297 3118 else:
3298 3119 ui.write(
3299 3120 b"% 6d % 7d %s %s %s\n"
3300 3121 % (
3301 3122 i,
3302 3123 r.linkrev(i),
3303 3124 shortfn(node),
3304 3125 shortfn(pp[0]),
3305 3126 shortfn(pp[1]),
3306 3127 )
3307 3128 )
3308 3129 elif format == 1:
3309 3130 pr = r.parentrevs(i)
3310 3131 if ui.verbose:
3311 3132 ui.write(
3312 3133 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3313 3134 % (
3314 3135 i,
3315 3136 r.flags(i),
3316 3137 r.start(i),
3317 3138 r.length(i),
3318 3139 r.rawsize(i),
3319 3140 r.linkrev(i),
3320 3141 pr[0],
3321 3142 pr[1],
3322 3143 shortfn(node),
3323 3144 )
3324 3145 )
3325 3146 else:
3326 3147 ui.write(
3327 3148 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3328 3149 % (
3329 3150 i,
3330 3151 r.flags(i),
3331 3152 r.rawsize(i),
3332 3153 r.linkrev(i),
3333 3154 pr[0],
3334 3155 pr[1],
3335 3156 shortfn(node),
3336 3157 )
3337 3158 )
3338 3159
3339 3160
3340 3161 @command(
3341 3162 b'debugrevspec',
3342 3163 [
3343 3164 (
3344 3165 b'',
3345 3166 b'optimize',
3346 3167 None,
3347 3168 _(b'print parsed tree after optimizing (DEPRECATED)'),
3348 3169 ),
3349 3170 (
3350 3171 b'',
3351 3172 b'show-revs',
3352 3173 True,
3353 3174 _(b'print list of result revisions (default)'),
3354 3175 ),
3355 3176 (
3356 3177 b's',
3357 3178 b'show-set',
3358 3179 None,
3359 3180 _(b'print internal representation of result set'),
3360 3181 ),
3361 3182 (
3362 3183 b'p',
3363 3184 b'show-stage',
3364 3185 [],
3365 3186 _(b'print parsed tree at the given stage'),
3366 3187 _(b'NAME'),
3367 3188 ),
3368 3189 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3369 3190 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3370 3191 ],
3371 3192 b'REVSPEC',
3372 3193 )
3373 3194 def debugrevspec(ui, repo, expr, **opts):
3374 3195 """parse and apply a revision specification
3375 3196
3376 3197 Use -p/--show-stage option to print the parsed tree at the given stages.
3377 3198 Use -p all to print tree at every stage.
3378 3199
3379 3200 Use --no-show-revs option with -s or -p to print only the set
3380 3201 representation or the parsed tree respectively.
3381 3202
3382 3203 Use --verify-optimized to compare the optimized result with the unoptimized
3383 3204 one. Returns 1 if the optimized result differs.
3384 3205 """
3385 3206 aliases = ui.configitems(b'revsetalias')
3386 3207 stages = [
3387 3208 (b'parsed', lambda tree: tree),
3388 3209 (
3389 3210 b'expanded',
3390 3211 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3391 3212 ),
3392 3213 (b'concatenated', revsetlang.foldconcat),
3393 3214 (b'analyzed', revsetlang.analyze),
3394 3215 (b'optimized', revsetlang.optimize),
3395 3216 ]
3396 3217 if opts['no_optimized']:
3397 3218 stages = stages[:-1]
3398 3219 if opts['verify_optimized'] and opts['no_optimized']:
3399 3220 raise error.Abort(
3400 3221 _(b'cannot use --verify-optimized with --no-optimized')
3401 3222 )
3402 3223 stagenames = {n for n, f in stages}
3403 3224
3404 3225 showalways = set()
3405 3226 showchanged = set()
3406 3227 if ui.verbose and not opts['show_stage']:
3407 3228 # show parsed tree by --verbose (deprecated)
3408 3229 showalways.add(b'parsed')
3409 3230 showchanged.update([b'expanded', b'concatenated'])
3410 3231 if opts['optimize']:
3411 3232 showalways.add(b'optimized')
3412 3233 if opts['show_stage'] and opts['optimize']:
3413 3234 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3414 3235 if opts['show_stage'] == [b'all']:
3415 3236 showalways.update(stagenames)
3416 3237 else:
3417 3238 for n in opts['show_stage']:
3418 3239 if n not in stagenames:
3419 3240 raise error.Abort(_(b'invalid stage name: %s') % n)
3420 3241 showalways.update(opts['show_stage'])
3421 3242
3422 3243 treebystage = {}
3423 3244 printedtree = None
3424 3245 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3425 3246 for n, f in stages:
3426 3247 treebystage[n] = tree = f(tree)
3427 3248 if n in showalways or (n in showchanged and tree != printedtree):
3428 3249 if opts['show_stage'] or n != b'parsed':
3429 3250 ui.write(b"* %s:\n" % n)
3430 3251 ui.write(revsetlang.prettyformat(tree), b"\n")
3431 3252 printedtree = tree
3432 3253
3433 3254 if opts['verify_optimized']:
3434 3255 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3435 3256 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3436 3257 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3437 3258 ui.writenoi18n(
3438 3259 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3439 3260 )
3440 3261 ui.writenoi18n(
3441 3262 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3442 3263 )
3443 3264 arevs = list(arevs)
3444 3265 brevs = list(brevs)
3445 3266 if arevs == brevs:
3446 3267 return 0
3447 3268 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3448 3269 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3449 3270 sm = difflib.SequenceMatcher(None, arevs, brevs)
3450 3271 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3451 3272 if tag in ('delete', 'replace'):
3452 3273 for c in arevs[alo:ahi]:
3453 3274 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3454 3275 if tag in ('insert', 'replace'):
3455 3276 for c in brevs[blo:bhi]:
3456 3277 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3457 3278 if tag == 'equal':
3458 3279 for c in arevs[alo:ahi]:
3459 3280 ui.write(b' %d\n' % c)
3460 3281 return 1
3461 3282
3462 3283 func = revset.makematcher(tree)
3463 3284 revs = func(repo)
3464 3285 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3465 3286 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3466 3287 if not opts['show_revs']:
3467 3288 return
3468 3289 for c in revs:
3469 3290 ui.write(b"%d\n" % c)
3470 3291
3471 3292
3472 3293 @command(
3473 3294 b'debugserve',
3474 3295 [
3475 3296 (
3476 3297 b'',
3477 3298 b'sshstdio',
3478 3299 False,
3479 3300 _(b'run an SSH server bound to process handles'),
3480 3301 ),
3481 3302 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3482 3303 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3483 3304 ],
3484 3305 b'',
3485 3306 )
3486 3307 def debugserve(ui, repo, **opts):
3487 3308 """run a server with advanced settings
3488 3309
3489 3310 This command is similar to :hg:`serve`. It exists partially as a
3490 3311 workaround to the fact that ``hg serve --stdio`` must have specific
3491 3312 arguments for security reasons.
3492 3313 """
3493 3314 if not opts['sshstdio']:
3494 3315 raise error.Abort(_(b'only --sshstdio is currently supported'))
3495 3316
3496 3317 logfh = None
3497 3318
3498 3319 if opts['logiofd'] and opts['logiofile']:
3499 3320 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3500 3321
3501 3322 if opts['logiofd']:
3502 3323 # Ideally we would be line buffered. But line buffering in binary
3503 3324 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3504 3325 # buffering could have performance impacts. But since this isn't
3505 3326 # performance critical code, it should be fine.
3506 3327 try:
3507 3328 logfh = os.fdopen(int(opts['logiofd']), 'ab', 0)
3508 3329 except OSError as e:
3509 3330 if e.errno != errno.ESPIPE:
3510 3331 raise
3511 3332 # can't seek a pipe, so `ab` mode fails on py3
3512 3333 logfh = os.fdopen(int(opts['logiofd']), 'wb', 0)
3513 3334 elif opts['logiofile']:
3514 3335 logfh = open(opts['logiofile'], b'ab', 0)
3515 3336
3516 3337 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3517 3338 s.serve_forever()
3518 3339
3519 3340
3520 3341 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3521 3342 def debugsetparents(ui, repo, rev1, rev2=None):
3522 3343 """manually set the parents of the current working directory (DANGEROUS)
3523 3344
3524 3345 This command is not what you are looking for and should not be used. Using
3525 3346 this command will most certainly results in slight corruption of the file
3526 3347 level histories withing your repository. DO NOT USE THIS COMMAND.
3527 3348
3528 3349 The command update the p1 and p2 field in the dirstate, and not touching
3529 3350 anything else. This useful for writing repository conversion tools, but
3530 3351 should be used with extreme care. For example, neither the working
3531 3352 directory nor the dirstate is updated, so file status may be incorrect
3532 3353 after running this command. Only used if you are one of the few people that
3533 3354 deeply unstand both conversion tools and file level histories. If you are
3534 3355 reading this help, you are not one of this people (most of them sailed west
3535 3356 from Mithlond anyway.
3536 3357
3537 3358 So one last time DO NOT USE THIS COMMAND.
3538 3359
3539 3360 Returns 0 on success.
3540 3361 """
3541 3362
3542 3363 node1 = scmutil.revsingle(repo, rev1).node()
3543 3364 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3544 3365
3545 3366 with repo.wlock():
3546 3367 repo.setparents(node1, node2)
3547 3368
3548 3369
3549 3370 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3550 3371 def debugsidedata(ui, repo, file_, rev=None, **opts):
3551 3372 """dump the side data for a cl/manifest/file revision
3552 3373
3553 3374 Use --verbose to dump the sidedata content."""
3554 3375 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
3555 3376 if rev is not None:
3556 3377 raise error.InputError(
3557 3378 _(b'cannot specify a revision with other arguments')
3558 3379 )
3559 3380 file_, rev = None, file_
3560 3381 elif rev is None:
3561 3382 raise error.InputError(_(b'please specify a revision'))
3562 3383 r = cmdutil.openstorage(
3563 3384 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
3564 3385 )
3565 3386 r = getattr(r, '_revlog', r)
3566 3387 try:
3567 3388 sidedata = r.sidedata(r.lookup(rev))
3568 3389 except KeyError:
3569 3390 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3570 3391 if sidedata:
3571 3392 sidedata = list(sidedata.items())
3572 3393 sidedata.sort()
3573 3394 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3574 3395 for key, value in sidedata:
3575 3396 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3576 3397 if ui.verbose:
3577 3398 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3578 3399
3579 3400
3580 3401 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3581 3402 def debugssl(ui, repo, source=None, **opts):
3582 3403 """test a secure connection to a server
3583 3404
3584 3405 This builds the certificate chain for the server on Windows, installing the
3585 3406 missing intermediates and trusted root via Windows Update if necessary. It
3586 3407 does nothing on other platforms.
3587 3408
3588 3409 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3589 3410 that server is used. See :hg:`help urls` for more information.
3590 3411
3591 3412 If the update succeeds, retry the original operation. Otherwise, the cause
3592 3413 of the SSL error is likely another issue.
3593 3414 """
3594 3415 if not pycompat.iswindows:
3595 3416 raise error.Abort(
3596 3417 _(b'certificate chain building is only possible on Windows')
3597 3418 )
3598 3419
3599 3420 if not source:
3600 3421 if not repo:
3601 3422 raise error.Abort(
3602 3423 _(
3603 3424 b"there is no Mercurial repository here, and no "
3604 3425 b"server specified"
3605 3426 )
3606 3427 )
3607 3428 source = b"default"
3608 3429
3609 3430 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3610 3431 url = path.url
3611 3432
3612 3433 defaultport = {b'https': 443, b'ssh': 22}
3613 3434 if url.scheme in defaultport:
3614 3435 try:
3615 3436 addr = (url.host, int(url.port or defaultport[url.scheme]))
3616 3437 except ValueError:
3617 3438 raise error.Abort(_(b"malformed port number in URL"))
3618 3439 else:
3619 3440 raise error.Abort(_(b"only https and ssh connections are supported"))
3620 3441
3621 3442 from . import win32
3622 3443
3623 3444 s = ssl.wrap_socket(
3624 3445 socket.socket(),
3625 3446 ssl_version=ssl.PROTOCOL_TLS,
3626 3447 cert_reqs=ssl.CERT_NONE,
3627 3448 ca_certs=None,
3628 3449 )
3629 3450
3630 3451 try:
3631 3452 s.connect(addr)
3632 3453 cert = s.getpeercert(True)
3633 3454
3634 3455 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3635 3456
3636 3457 complete = win32.checkcertificatechain(cert, build=False)
3637 3458
3638 3459 if not complete:
3639 3460 ui.status(_(b'certificate chain is incomplete, updating... '))
3640 3461
3641 3462 if not win32.checkcertificatechain(cert):
3642 3463 ui.status(_(b'failed.\n'))
3643 3464 else:
3644 3465 ui.status(_(b'done.\n'))
3645 3466 else:
3646 3467 ui.status(_(b'full certificate chain is available\n'))
3647 3468 finally:
3648 3469 s.close()
3649 3470
3650 3471
3651 3472 @command(
3652 3473 b'debug::stable-tail-sort',
3653 3474 [
3654 3475 (
3655 3476 b'T',
3656 3477 b'template',
3657 3478 b'{rev}\n',
3658 3479 _(b'display with template'),
3659 3480 _(b'TEMPLATE'),
3660 3481 ),
3661 3482 ],
3662 3483 b'REV',
3663 3484 )
3664 3485 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3665 3486 """display the stable-tail sort of the ancestors of a given node"""
3666 3487 rev = logcmdutil.revsingle(repo, revspec).rev()
3667 3488 cl = repo.changelog
3668 3489
3669 3490 displayer = logcmdutil.maketemplater(ui, repo, template)
3670 3491 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3671 3492 for ancestor_rev in sorted_revs:
3672 3493 displayer.show(repo[ancestor_rev])
3673 3494
3674 3495
3675 3496 @command(
3676 3497 b'debug::stable-tail-sort-leaps',
3677 3498 [
3678 3499 (
3679 3500 b'T',
3680 3501 b'template',
3681 3502 b'{rev}',
3682 3503 _(b'display with template'),
3683 3504 _(b'TEMPLATE'),
3684 3505 ),
3685 3506 (b's', b'specific', False, _(b'restrict to specific leaps')),
3686 3507 ],
3687 3508 b'REV',
3688 3509 )
3689 3510 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3690 3511 """display the leaps in the stable-tail sort of a node, one per line"""
3691 3512 rev = logcmdutil.revsingle(repo, rspec).rev()
3692 3513
3693 3514 if specific:
3694 3515 get_leaps = stabletailsort._find_specific_leaps_naive
3695 3516 else:
3696 3517 get_leaps = stabletailsort._find_all_leaps_naive
3697 3518
3698 3519 displayer = logcmdutil.maketemplater(ui, repo, template)
3699 3520 for source, target in get_leaps(repo.changelog, rev):
3700 3521 displayer.show(repo[source])
3701 3522 displayer.show(repo[target])
3702 3523 ui.write(b'\n')
3703 3524
3704 3525
3705 3526 @command(
3706 3527 b"debugbackupbundle",
3707 3528 [
3708 3529 (
3709 3530 b"",
3710 3531 b"recover",
3711 3532 b"",
3712 3533 b"brings the specified changeset back into the repository",
3713 3534 )
3714 3535 ]
3715 3536 + cmdutil.logopts,
3716 3537 _(b"hg debugbackupbundle [--recover HASH]"),
3717 3538 )
3718 3539 def debugbackupbundle(ui, repo, *pats, **opts):
3719 3540 """lists the changesets available in backup bundles
3720 3541
3721 3542 Without any arguments, this command prints a list of the changesets in each
3722 3543 backup bundle.
3723 3544
3724 3545 --recover takes a changeset hash and unbundles the first bundle that
3725 3546 contains that hash, which puts that changeset back in your repository.
3726 3547
3727 3548 --verbose will print the entire commit message and the bundle path for that
3728 3549 backup.
3729 3550 """
3730 3551 backups = list(
3731 3552 filter(
3732 3553 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3733 3554 )
3734 3555 )
3735 3556 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3736 3557
3737 3558 opts["bundle"] = b""
3738 3559 opts["force"] = None
3739 3560 limit = logcmdutil.getlimit(pycompat.byteskwargs(opts))
3740 3561
3741 3562 def display(other, chlist, displayer):
3742 3563 if opts.get("newest_first"):
3743 3564 chlist.reverse()
3744 3565 count = 0
3745 3566 for n in chlist:
3746 3567 if limit is not None and count >= limit:
3747 3568 break
3748 3569 parents = [
3749 3570 True for p in other.changelog.parents(n) if p != repo.nullid
3750 3571 ]
3751 3572 if opts.get("no_merges") and len(parents) == 2:
3752 3573 continue
3753 3574 count += 1
3754 3575 displayer.show(other[n])
3755 3576
3756 3577 recovernode = opts.get("recover")
3757 3578 if recovernode:
3758 3579 if scmutil.isrevsymbol(repo, recovernode):
3759 3580 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3760 3581 return
3761 3582 elif backups:
3762 3583 msg = _(
3763 3584 b"Recover changesets using: hg debugbackupbundle --recover "
3764 3585 b"<changeset hash>\n\nAvailable backup changesets:"
3765 3586 )
3766 3587 ui.status(msg, label=b"status.removed")
3767 3588 else:
3768 3589 ui.status(_(b"no backup changesets found\n"))
3769 3590 return
3770 3591
3771 3592 for backup in backups:
3772 3593 # Much of this is copied from the hg incoming logic
3773 3594 source = os.path.relpath(backup, encoding.getcwd())
3774 3595 path = urlutil.get_unique_pull_path_obj(
3775 3596 b'debugbackupbundle',
3776 3597 ui,
3777 3598 source,
3778 3599 )
3779 3600 try:
3780 3601 other = hg.peer(repo, pycompat.byteskwargs(opts), path)
3781 3602 except error.LookupError as ex:
3782 3603 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3783 3604 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3784 3605 ui.warn(msg, hint=hint)
3785 3606 continue
3786 3607 branches = (path.branch, opts.get('branch', []))
3787 3608 revs, checkout = hg.addbranchrevs(
3788 3609 repo, other, branches, opts.get("rev")
3789 3610 )
3790 3611
3791 3612 if revs:
3792 3613 revs = [other.lookup(rev) for rev in revs]
3793 3614
3794 3615 with ui.silent():
3795 3616 try:
3796 3617 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3797 3618 ui, repo, other, revs, opts["bundle"], opts["force"]
3798 3619 )
3799 3620 except error.LookupError:
3800 3621 continue
3801 3622
3802 3623 try:
3803 3624 if not chlist:
3804 3625 continue
3805 3626 if recovernode:
3806 3627 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 3628 if scmutil.isrevsymbol(other, recovernode):
3808 3629 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 3630 f = hg.openpath(ui, path.loc)
3810 3631 gen = exchange.readbundle(ui, f, path.loc)
3811 3632 if isinstance(gen, bundle2.unbundle20):
3812 3633 bundle2.applybundle(
3813 3634 repo,
3814 3635 gen,
3815 3636 tr,
3816 3637 source=b"unbundle",
3817 3638 url=b"bundle:" + path.loc,
3818 3639 )
3819 3640 else:
3820 3641 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3821 3642 break
3822 3643 else:
3823 3644 backupdate = encoding.strtolocal(
3824 3645 time.strftime(
3825 3646 "%a %H:%M, %Y-%m-%d",
3826 3647 time.localtime(os.path.getmtime(path.loc)),
3827 3648 )
3828 3649 )
3829 3650 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 3651 if ui.verbose:
3831 3652 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3832 3653 else:
3833 3654 opts[
3834 3655 "template"
3835 3656 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 3657 displayer = logcmdutil.changesetdisplayer(
3837 3658 ui, other, pycompat.byteskwargs(opts), False
3838 3659 )
3839 3660 display(other, chlist, displayer)
3840 3661 displayer.close()
3841 3662 finally:
3842 3663 cleanupfn()
3843 3664
3844 3665
3845 3666 @command(
3846 3667 b'debugsub',
3847 3668 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 3669 _(b'[-r REV] [REV]'),
3849 3670 )
3850 3671 def debugsub(ui, repo, rev=None):
3851 3672 ctx = scmutil.revsingle(repo, rev, None)
3852 3673 for k, v in sorted(ctx.substate.items()):
3853 3674 ui.writenoi18n(b'path %s\n' % k)
3854 3675 ui.writenoi18n(b' source %s\n' % v[0])
3855 3676 ui.writenoi18n(b' revision %s\n' % v[1])
3856 3677
3857 3678
3858 3679 @command(
3859 3680 b'debugshell',
3860 3681 [
3861 3682 (
3862 3683 b'c',
3863 3684 b'command',
3864 3685 b'',
3865 3686 _(b'program passed in as a string'),
3866 3687 _(b'COMMAND'),
3867 3688 )
3868 3689 ],
3869 3690 _(b'[-c COMMAND]'),
3870 3691 optionalrepo=True,
3871 3692 )
3872 3693 def debugshell(ui, repo, **opts):
3873 3694 """run an interactive Python interpreter
3874 3695
3875 3696 The local namespace is provided with a reference to the ui and
3876 3697 the repo instance (if available).
3877 3698 """
3878 3699 import code
3879 3700
3880 3701 imported_objects = {
3881 3702 'ui': ui,
3882 3703 'repo': repo,
3883 3704 }
3884 3705
3885 3706 # py2exe disables initialization of the site module, which is responsible
3886 3707 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3887 3708 # the stuff that site normally does here, so that the interpreter can be
3888 3709 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3889 3710 # py.exe, or py2exe.
3890 3711 if getattr(sys, "frozen", None) == 'console_exe':
3891 3712 try:
3892 3713 import site
3893 3714
3894 3715 site.setcopyright()
3895 3716 site.sethelper()
3896 3717 site.setquit()
3897 3718 except ImportError:
3898 3719 site = None # Keep PyCharm happy
3899 3720
3900 3721 command = opts.get('command')
3901 3722 if command:
3902 3723 compiled = code.compile_command(encoding.strfromlocal(command))
3903 3724 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3904 3725 return
3905 3726
3906 3727 code.interact(local=imported_objects)
3907 3728
3908 3729
3909 3730 @command(
3910 3731 b'debug-revlog-stats',
3911 3732 [
3912 3733 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3913 3734 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3914 3735 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3915 3736 ]
3916 3737 + cmdutil.formatteropts,
3917 3738 )
3918 3739 def debug_revlog_stats(ui, repo, **opts):
3919 3740 """display statistics about revlogs in the store"""
3920 3741 changelog = opts["changelog"]
3921 3742 manifest = opts["manifest"]
3922 3743 filelogs = opts["filelogs"]
3923 3744
3924 3745 if changelog is None and manifest is None and filelogs is None:
3925 3746 changelog = True
3926 3747 manifest = True
3927 3748 filelogs = True
3928 3749
3929 3750 repo = repo.unfiltered()
3930 3751 fm = ui.formatter(b'debug-revlog-stats', pycompat.byteskwargs(opts))
3931 3752 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3932 3753 fm.end()
3933 3754
3934 3755
3935 3756 @command(
3936 3757 b'debugsuccessorssets',
3937 3758 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3938 3759 _(b'[REV]'),
3939 3760 )
3940 3761 def debugsuccessorssets(ui, repo, *revs, **opts):
3941 3762 """show set of successors for revision
3942 3763
3943 3764 A successors set of changeset A is a consistent group of revisions that
3944 3765 succeed A. It contains non-obsolete changesets only unless closests
3945 3766 successors set is set.
3946 3767
3947 3768 In most cases a changeset A has a single successors set containing a single
3948 3769 successor (changeset A replaced by A').
3949 3770
3950 3771 A changeset that is made obsolete with no successors are called "pruned".
3951 3772 Such changesets have no successors sets at all.
3952 3773
3953 3774 A changeset that has been "split" will have a successors set containing
3954 3775 more than one successor.
3955 3776
3956 3777 A changeset that has been rewritten in multiple different ways is called
3957 3778 "divergent". Such changesets have multiple successor sets (each of which
3958 3779 may also be split, i.e. have multiple successors).
3959 3780
3960 3781 Results are displayed as follows::
3961 3782
3962 3783 <rev1>
3963 3784 <successors-1A>
3964 3785 <rev2>
3965 3786 <successors-2A>
3966 3787 <successors-2B1> <successors-2B2> <successors-2B3>
3967 3788
3968 3789 Here rev2 has two possible (i.e. divergent) successors sets. The first
3969 3790 holds one element, whereas the second holds three (i.e. the changeset has
3970 3791 been split).
3971 3792 """
3972 3793 # passed to successorssets caching computation from one call to another
3973 3794 cache = {}
3974 3795 ctx2str = bytes
3975 3796 node2str = short
3976 3797 for rev in logcmdutil.revrange(repo, revs):
3977 3798 ctx = repo[rev]
3978 3799 ui.write(b'%s\n' % ctx2str(ctx))
3979 3800 for succsset in obsutil.successorssets(
3980 3801 repo, ctx.node(), closest=opts['closest'], cache=cache
3981 3802 ):
3982 3803 if succsset:
3983 3804 ui.write(b' ')
3984 3805 ui.write(node2str(succsset[0]))
3985 3806 for node in succsset[1:]:
3986 3807 ui.write(b' ')
3987 3808 ui.write(node2str(node))
3988 3809 ui.write(b'\n')
3989 3810
3990 3811
3991 3812 @command(b'debugtagscache', [])
3992 3813 def debugtagscache(ui, repo):
3993 3814 """display the contents of .hg/cache/hgtagsfnodes1"""
3994 3815 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3995 3816 flog = repo.file(b'.hgtags')
3996 3817 for r in repo:
3997 3818 node = repo[r].node()
3998 3819 tagsnode = cache.getfnode(node, computemissing=False)
3999 3820 if tagsnode:
4000 3821 tagsnodedisplay = hex(tagsnode)
4001 3822 if not flog.hasnode(tagsnode):
4002 3823 tagsnodedisplay += b' (unknown node)'
4003 3824 elif tagsnode is None:
4004 3825 tagsnodedisplay = b'missing'
4005 3826 else:
4006 3827 tagsnodedisplay = b'invalid'
4007 3828
4008 3829 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4009 3830
4010 3831
4011 3832 @command(
4012 3833 b'debugtemplate',
4013 3834 [
4014 3835 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4015 3836 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4016 3837 ],
4017 3838 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4018 3839 optionalrepo=True,
4019 3840 )
4020 3841 def debugtemplate(ui, repo, tmpl, **opts):
4021 3842 """parse and apply a template
4022 3843
4023 3844 If -r/--rev is given, the template is processed as a log template and
4024 3845 applied to the given changesets. Otherwise, it is processed as a generic
4025 3846 template.
4026 3847
4027 3848 Use --verbose to print the parsed tree.
4028 3849 """
4029 3850 revs = None
4030 3851 if opts['rev']:
4031 3852 if repo is None:
4032 3853 raise error.RepoError(
4033 3854 _(b'there is no Mercurial repository here (.hg not found)')
4034 3855 )
4035 3856 revs = logcmdutil.revrange(repo, opts['rev'])
4036 3857
4037 3858 props = {}
4038 3859 for d in opts['define']:
4039 3860 try:
4040 3861 k, v = (e.strip() for e in d.split(b'=', 1))
4041 3862 if not k or k == b'ui':
4042 3863 raise ValueError
4043 3864 props[k] = v
4044 3865 except ValueError:
4045 3866 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4046 3867
4047 3868 if ui.verbose:
4048 3869 aliases = ui.configitems(b'templatealias')
4049 3870 tree = templater.parse(tmpl)
4050 3871 ui.note(templater.prettyformat(tree), b'\n')
4051 3872 newtree = templater.expandaliases(tree, aliases)
4052 3873 if newtree != tree:
4053 3874 ui.notenoi18n(
4054 3875 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4055 3876 )
4056 3877
4057 3878 if revs is None:
4058 3879 tres = formatter.templateresources(ui, repo)
4059 3880 t = formatter.maketemplater(ui, tmpl, resources=tres)
4060 3881 if ui.verbose:
4061 3882 kwds, funcs = t.symbolsuseddefault()
4062 3883 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4063 3884 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4064 3885 ui.write(t.renderdefault(props))
4065 3886 else:
4066 3887 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4067 3888 if ui.verbose:
4068 3889 kwds, funcs = displayer.t.symbolsuseddefault()
4069 3890 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4070 3891 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4071 3892 for r in revs:
4072 3893 displayer.show(repo[r], **pycompat.strkwargs(props))
4073 3894 displayer.close()
4074 3895
4075 3896
4076 3897 @command(
4077 3898 b'debuguigetpass',
4078 3899 [
4079 3900 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4080 3901 ],
4081 3902 _(b'[-p TEXT]'),
4082 3903 norepo=True,
4083 3904 )
4084 3905 def debuguigetpass(ui, prompt=b''):
4085 3906 """show prompt to type password"""
4086 3907 r = ui.getpass(prompt)
4087 3908 if r is None:
4088 3909 r = b"<default response>"
4089 3910 ui.writenoi18n(b'response: %s\n' % r)
4090 3911
4091 3912
4092 3913 @command(
4093 3914 b'debuguiprompt',
4094 3915 [
4095 3916 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4096 3917 ],
4097 3918 _(b'[-p TEXT]'),
4098 3919 norepo=True,
4099 3920 )
4100 3921 def debuguiprompt(ui, prompt=b''):
4101 3922 """show plain prompt"""
4102 3923 r = ui.prompt(prompt)
4103 3924 ui.writenoi18n(b'response: %s\n' % r)
4104 3925
4105 3926
4106 3927 @command(b'debugupdatecaches', [])
4107 3928 def debugupdatecaches(ui, repo, *pats, **opts):
4108 3929 """warm all known caches in the repository"""
4109 3930 with repo.wlock(), repo.lock():
4110 3931 repo.updatecaches(caches=repository.CACHES_ALL)
4111 3932
4112 3933
4113 3934 @command(
4114 3935 b'debugupgraderepo',
4115 3936 [
4116 3937 (
4117 3938 b'o',
4118 3939 b'optimize',
4119 3940 [],
4120 3941 _(b'extra optimization to perform'),
4121 3942 _(b'NAME'),
4122 3943 ),
4123 3944 (b'', b'run', False, _(b'performs an upgrade')),
4124 3945 (b'', b'backup', True, _(b'keep the old repository content around')),
4125 3946 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4126 3947 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4127 3948 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4128 3949 ],
4129 3950 )
4130 3951 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4131 3952 """upgrade a repository to use different features
4132 3953
4133 3954 If no arguments are specified, the repository is evaluated for upgrade
4134 3955 and a list of problems and potential optimizations is printed.
4135 3956
4136 3957 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4137 3958 can be influenced via additional arguments. More details will be provided
4138 3959 by the command output when run without ``--run``.
4139 3960
4140 3961 During the upgrade, the repository will be locked and no writes will be
4141 3962 allowed.
4142 3963
4143 3964 At the end of the upgrade, the repository may not be readable while new
4144 3965 repository data is swapped in. This window will be as long as it takes to
4145 3966 rename some directories inside the ``.hg`` directory. On most machines, this
4146 3967 should complete almost instantaneously and the chances of a consumer being
4147 3968 unable to access the repository should be low.
4148 3969
4149 3970 By default, all revlogs will be upgraded. You can restrict this using flags
4150 3971 such as `--manifest`:
4151 3972
4152 3973 * `--manifest`: only optimize the manifest
4153 3974 * `--no-manifest`: optimize all revlog but the manifest
4154 3975 * `--changelog`: optimize the changelog only
4155 3976 * `--no-changelog --no-manifest`: optimize filelogs only
4156 3977 * `--filelogs`: optimize the filelogs only
4157 3978 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4158 3979 """
4159 3980 return upgrade.upgraderepo(
4160 3981 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4161 3982 )
4162 3983
4163 3984
4164 3985 @command(
4165 3986 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4166 3987 )
4167 3988 def debugwalk(ui, repo, *pats, **opts):
4168 3989 """show how files match on given patterns"""
4169 3990 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
4170 3991 if ui.verbose:
4171 3992 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4172 3993 items = list(repo[None].walk(m))
4173 3994 if not items:
4174 3995 return
4175 3996 f = lambda fn: fn
4176 3997 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4177 3998 f = lambda fn: util.normpath(fn)
4178 3999 fmt = b'f %%-%ds %%-%ds %%s' % (
4179 4000 max([len(abs) for abs in items]),
4180 4001 max([len(repo.pathto(abs)) for abs in items]),
4181 4002 )
4182 4003 for abs in items:
4183 4004 line = fmt % (
4184 4005 abs,
4185 4006 f(repo.pathto(abs)),
4186 4007 m.exact(abs) and b'exact' or b'',
4187 4008 )
4188 4009 ui.write(b"%s\n" % line.rstrip())
4189 4010
4190 4011
4191 4012 @command(b'debugwhyunstable', [], _(b'REV'))
4192 4013 def debugwhyunstable(ui, repo, rev):
4193 4014 """explain instabilities of a changeset"""
4194 4015 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4195 4016 dnodes = b''
4196 4017 if entry.get(b'divergentnodes'):
4197 4018 dnodes = (
4198 4019 b' '.join(
4199 4020 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4200 4021 for ctx in entry[b'divergentnodes']
4201 4022 )
4202 4023 + b' '
4203 4024 )
4204 4025 ui.write(
4205 4026 b'%s: %s%s %s\n'
4206 4027 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4207 4028 )
4208 4029
4209 4030
4210 4031 @command(
4211 4032 b'debugwireargs',
4212 4033 [
4213 4034 (b'', b'three', b'', b'three'),
4214 4035 (b'', b'four', b'', b'four'),
4215 4036 (b'', b'five', b'', b'five'),
4216 4037 ]
4217 4038 + cmdutil.remoteopts,
4218 4039 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4219 4040 norepo=True,
4220 4041 )
4221 4042 def debugwireargs(ui, repopath, *vals, **opts):
4222 4043 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
4223 4044 try:
4224 4045 for opt in cmdutil.remoteopts:
4225 4046 del opts[pycompat.sysstr(opt[1])]
4226 4047 args = {}
4227 4048 for k, v in opts.items():
4228 4049 if v:
4229 4050 args[k] = v
4230 4051
4231 4052 # run twice to check that we don't mess up the stream for the next command
4232 4053 res1 = repo.debugwireargs(*vals, **args)
4233 4054 res2 = repo.debugwireargs(*vals, **args)
4234 4055 ui.write(b"%s\n" % res1)
4235 4056 if res1 != res2:
4236 4057 ui.warn(b"%s\n" % res2)
4237 4058 finally:
4238 4059 repo.close()
4239 4060
4240 4061
4241 4062 def _parsewirelangblocks(fh):
4242 4063 activeaction = None
4243 4064 blocklines = []
4244 4065 lastindent = 0
4245 4066
4246 4067 for line in fh:
4247 4068 line = line.rstrip()
4248 4069 if not line:
4249 4070 continue
4250 4071
4251 4072 if line.startswith(b'#'):
4252 4073 continue
4253 4074
4254 4075 if not line.startswith(b' '):
4255 4076 # New block. Flush previous one.
4256 4077 if activeaction:
4257 4078 yield activeaction, blocklines
4258 4079
4259 4080 activeaction = line
4260 4081 blocklines = []
4261 4082 lastindent = 0
4262 4083 continue
4263 4084
4264 4085 # Else we start with an indent.
4265 4086
4266 4087 if not activeaction:
4267 4088 raise error.Abort(_(b'indented line outside of block'))
4268 4089
4269 4090 indent = len(line) - len(line.lstrip())
4270 4091
4271 4092 # If this line is indented more than the last line, concatenate it.
4272 4093 if indent > lastindent and blocklines:
4273 4094 blocklines[-1] += line.lstrip()
4274 4095 else:
4275 4096 blocklines.append(line)
4276 4097 lastindent = indent
4277 4098
4278 4099 # Flush last block.
4279 4100 if activeaction:
4280 4101 yield activeaction, blocklines
4281 4102
4282 4103
4283 4104 @command(
4284 4105 b'debugwireproto',
4285 4106 [
4286 4107 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4287 4108 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4288 4109 (
4289 4110 b'',
4290 4111 b'noreadstderr',
4291 4112 False,
4292 4113 _(b'do not read from stderr of the remote'),
4293 4114 ),
4294 4115 (
4295 4116 b'',
4296 4117 b'nologhandshake',
4297 4118 False,
4298 4119 _(b'do not log I/O related to the peer handshake'),
4299 4120 ),
4300 4121 ]
4301 4122 + cmdutil.remoteopts,
4302 4123 _(b'[PATH]'),
4303 4124 optionalrepo=True,
4304 4125 )
4305 4126 def debugwireproto(ui, repo, path=None, **opts):
4306 4127 """send wire protocol commands to a server
4307 4128
4308 4129 This command can be used to issue wire protocol commands to remote
4309 4130 peers and to debug the raw data being exchanged.
4310 4131
4311 4132 ``--localssh`` will start an SSH server against the current repository
4312 4133 and connect to that. By default, the connection will perform a handshake
4313 4134 and establish an appropriate peer instance.
4314 4135
4315 4136 ``--peer`` can be used to bypass the handshake protocol and construct a
4316 4137 peer instance using the specified class type. Valid values are ``raw``,
4317 4138 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4318 4139 don't support higher-level command actions.
4319 4140
4320 4141 ``--noreadstderr`` can be used to disable automatic reading from stderr
4321 4142 of the peer (for SSH connections only). Disabling automatic reading of
4322 4143 stderr is useful for making output more deterministic.
4323 4144
4324 4145 Commands are issued via a mini language which is specified via stdin.
4325 4146 The language consists of individual actions to perform. An action is
4326 4147 defined by a block. A block is defined as a line with no leading
4327 4148 space followed by 0 or more lines with leading space. Blocks are
4328 4149 effectively a high-level command with additional metadata.
4329 4150
4330 4151 Lines beginning with ``#`` are ignored.
4331 4152
4332 4153 The following sections denote available actions.
4333 4154
4334 4155 raw
4335 4156 ---
4336 4157
4337 4158 Send raw data to the server.
4338 4159
4339 4160 The block payload contains the raw data to send as one atomic send
4340 4161 operation. The data may not actually be delivered in a single system
4341 4162 call: it depends on the abilities of the transport being used.
4342 4163
4343 4164 Each line in the block is de-indented and concatenated. Then, that
4344 4165 value is evaluated as a Python b'' literal. This allows the use of
4345 4166 backslash escaping, etc.
4346 4167
4347 4168 raw+
4348 4169 ----
4349 4170
4350 4171 Behaves like ``raw`` except flushes output afterwards.
4351 4172
4352 4173 command <X>
4353 4174 -----------
4354 4175
4355 4176 Send a request to run a named command, whose name follows the ``command``
4356 4177 string.
4357 4178
4358 4179 Arguments to the command are defined as lines in this block. The format of
4359 4180 each line is ``<key> <value>``. e.g.::
4360 4181
4361 4182 command listkeys
4362 4183 namespace bookmarks
4363 4184
4364 4185 If the value begins with ``eval:``, it will be interpreted as a Python
4365 4186 literal expression. Otherwise values are interpreted as Python b'' literals.
4366 4187 This allows sending complex types and encoding special byte sequences via
4367 4188 backslash escaping.
4368 4189
4369 4190 The following arguments have special meaning:
4370 4191
4371 4192 ``PUSHFILE``
4372 4193 When defined, the *push* mechanism of the peer will be used instead
4373 4194 of the static request-response mechanism and the content of the
4374 4195 file specified in the value of this argument will be sent as the
4375 4196 command payload.
4376 4197
4377 4198 This can be used to submit a local bundle file to the remote.
4378 4199
4379 4200 batchbegin
4380 4201 ----------
4381 4202
4382 4203 Instruct the peer to begin a batched send.
4383 4204
4384 4205 All ``command`` blocks are queued for execution until the next
4385 4206 ``batchsubmit`` block.
4386 4207
4387 4208 batchsubmit
4388 4209 -----------
4389 4210
4390 4211 Submit previously queued ``command`` blocks as a batch request.
4391 4212
4392 4213 This action MUST be paired with a ``batchbegin`` action.
4393 4214
4394 4215 httprequest <method> <path>
4395 4216 ---------------------------
4396 4217
4397 4218 (HTTP peer only)
4398 4219
4399 4220 Send an HTTP request to the peer.
4400 4221
4401 4222 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4402 4223
4403 4224 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4404 4225 headers to add to the request. e.g. ``Accept: foo``.
4405 4226
4406 4227 The following arguments are special:
4407 4228
4408 4229 ``BODYFILE``
4409 4230 The content of the file defined as the value to this argument will be
4410 4231 transferred verbatim as the HTTP request body.
4411 4232
4412 4233 ``frame <type> <flags> <payload>``
4413 4234 Send a unified protocol frame as part of the request body.
4414 4235
4415 4236 All frames will be collected and sent as the body to the HTTP
4416 4237 request.
4417 4238
4418 4239 close
4419 4240 -----
4420 4241
4421 4242 Close the connection to the server.
4422 4243
4423 4244 flush
4424 4245 -----
4425 4246
4426 4247 Flush data written to the server.
4427 4248
4428 4249 readavailable
4429 4250 -------------
4430 4251
4431 4252 Close the write end of the connection and read all available data from
4432 4253 the server.
4433 4254
4434 4255 If the connection to the server encompasses multiple pipes, we poll both
4435 4256 pipes and read available data.
4436 4257
4437 4258 readline
4438 4259 --------
4439 4260
4440 4261 Read a line of output from the server. If there are multiple output
4441 4262 pipes, reads only the main pipe.
4442 4263
4443 4264 ereadline
4444 4265 ---------
4445 4266
4446 4267 Like ``readline``, but read from the stderr pipe, if available.
4447 4268
4448 4269 read <X>
4449 4270 --------
4450 4271
4451 4272 ``read()`` N bytes from the server's main output pipe.
4452 4273
4453 4274 eread <X>
4454 4275 ---------
4455 4276
4456 4277 ``read()`` N bytes from the server's stderr pipe, if available.
4457 4278
4458 4279 Specifying Unified Frame-Based Protocol Frames
4459 4280 ----------------------------------------------
4460 4281
4461 4282 It is possible to emit a *Unified Frame-Based Protocol* by using special
4462 4283 syntax.
4463 4284
4464 4285 A frame is composed as a type, flags, and payload. These can be parsed
4465 4286 from a string of the form:
4466 4287
4467 4288 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4468 4289
4469 4290 ``request-id`` and ``stream-id`` are integers defining the request and
4470 4291 stream identifiers.
4471 4292
4472 4293 ``type`` can be an integer value for the frame type or the string name
4473 4294 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4474 4295 ``command-name``.
4475 4296
4476 4297 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4477 4298 components. Each component (and there can be just one) can be an integer
4478 4299 or a flag name for stream flags or frame flags, respectively. Values are
4479 4300 resolved to integers and then bitwise OR'd together.
4480 4301
4481 4302 ``payload`` represents the raw frame payload. If it begins with
4482 4303 ``cbor:``, the following string is evaluated as Python code and the
4483 4304 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4484 4305 as a Python byte string literal.
4485 4306 """
4486 4307 if opts['localssh'] and not repo:
4487 4308 raise error.Abort(_(b'--localssh requires a repository'))
4488 4309
4489 4310 if opts['peer'] and opts['peer'] not in (
4490 4311 b'raw',
4491 4312 b'ssh1',
4492 4313 ):
4493 4314 raise error.Abort(
4494 4315 _(b'invalid value for --peer'),
4495 4316 hint=_(b'valid values are "raw" and "ssh1"'),
4496 4317 )
4497 4318
4498 4319 if path and opts['localssh']:
4499 4320 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4500 4321
4501 4322 if ui.interactive():
4502 4323 ui.write(_(b'(waiting for commands on stdin)\n'))
4503 4324
4504 4325 blocks = list(_parsewirelangblocks(ui.fin))
4505 4326
4506 4327 proc = None
4507 4328 stdin = None
4508 4329 stdout = None
4509 4330 stderr = None
4510 4331 opener = None
4511 4332
4512 4333 if opts['localssh']:
4513 4334 # We start the SSH server in its own process so there is process
4514 4335 # separation. This prevents a whole class of potential bugs around
4515 4336 # shared state from interfering with server operation.
4516 4337 args = procutil.hgcmd() + [
4517 4338 b'-R',
4518 4339 repo.root,
4519 4340 b'debugserve',
4520 4341 b'--sshstdio',
4521 4342 ]
4522 4343 proc = subprocess.Popen(
4523 4344 pycompat.rapply(procutil.tonativestr, args),
4524 4345 stdin=subprocess.PIPE,
4525 4346 stdout=subprocess.PIPE,
4526 4347 stderr=subprocess.PIPE,
4527 4348 bufsize=0,
4528 4349 )
4529 4350
4530 4351 stdin = proc.stdin
4531 4352 stdout = proc.stdout
4532 4353 stderr = proc.stderr
4533 4354
4534 4355 # We turn the pipes into observers so we can log I/O.
4535 4356 if ui.verbose or opts['peer'] == b'raw':
4536 4357 stdin = util.makeloggingfileobject(
4537 4358 ui, proc.stdin, b'i', logdata=True
4538 4359 )
4539 4360 stdout = util.makeloggingfileobject(
4540 4361 ui, proc.stdout, b'o', logdata=True
4541 4362 )
4542 4363 stderr = util.makeloggingfileobject(
4543 4364 ui, proc.stderr, b'e', logdata=True
4544 4365 )
4545 4366
4546 4367 # --localssh also implies the peer connection settings.
4547 4368
4548 4369 url = b'ssh://localserver'
4549 4370 autoreadstderr = not opts['noreadstderr']
4550 4371
4551 4372 if opts['peer'] == b'ssh1':
4552 4373 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4553 4374 peer = sshpeer.sshv1peer(
4554 4375 ui,
4555 4376 url,
4556 4377 proc,
4557 4378 stdin,
4558 4379 stdout,
4559 4380 stderr,
4560 4381 None,
4561 4382 autoreadstderr=autoreadstderr,
4562 4383 )
4563 4384 elif opts['peer'] == b'raw':
4564 4385 ui.write(_(b'using raw connection to peer\n'))
4565 4386 peer = None
4566 4387 else:
4567 4388 ui.write(_(b'creating ssh peer from handshake results\n'))
4568 4389 peer = sshpeer._make_peer(
4569 4390 ui,
4570 4391 url,
4571 4392 proc,
4572 4393 stdin,
4573 4394 stdout,
4574 4395 stderr,
4575 4396 autoreadstderr=autoreadstderr,
4576 4397 )
4577 4398
4578 4399 elif path:
4579 4400 # We bypass hg.peer() so we can proxy the sockets.
4580 4401 # TODO consider not doing this because we skip
4581 4402 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4582 4403 u = urlutil.url(path)
4583 4404 if u.scheme != b'http':
4584 4405 raise error.Abort(_(b'only http:// paths are currently supported'))
4585 4406
4586 4407 url, authinfo = u.authinfo()
4587 4408 openerargs = {
4588 4409 'useragent': b'Mercurial debugwireproto',
4589 4410 }
4590 4411
4591 4412 # Turn pipes/sockets into observers so we can log I/O.
4592 4413 if ui.verbose:
4593 4414 openerargs.update(
4594 4415 {
4595 4416 'loggingfh': ui,
4596 4417 'loggingname': b's',
4597 4418 'loggingopts': {
4598 4419 'logdata': True,
4599 4420 'logdataapis': False,
4600 4421 },
4601 4422 }
4602 4423 )
4603 4424
4604 4425 if ui.debugflag:
4605 4426 openerargs['loggingopts']['logdataapis'] = True
4606 4427
4607 4428 # Don't send default headers when in raw mode. This allows us to
4608 4429 # bypass most of the behavior of our URL handling code so we can
4609 4430 # have near complete control over what's sent on the wire.
4610 4431 if opts['peer'] == b'raw':
4611 4432 openerargs['sendaccept'] = False
4612 4433
4613 4434 opener = urlmod.opener(ui, authinfo, **openerargs)
4614 4435
4615 4436 if opts['peer'] == b'raw':
4616 4437 ui.write(_(b'using raw connection to peer\n'))
4617 4438 peer = None
4618 4439 elif opts['peer']:
4619 4440 raise error.Abort(
4620 4441 _(b'--peer %s not supported with HTTP peers') % opts['peer']
4621 4442 )
4622 4443 else:
4623 4444 peer_path = urlutil.try_path(ui, path)
4624 4445 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4625 4446
4626 4447 # We /could/ populate stdin/stdout with sock.makefile()...
4627 4448 else:
4628 4449 raise error.Abort(_(b'unsupported connection configuration'))
4629 4450
4630 4451 batchedcommands = None
4631 4452
4632 4453 # Now perform actions based on the parsed wire language instructions.
4633 4454 for action, lines in blocks:
4634 4455 if action in (b'raw', b'raw+'):
4635 4456 if not stdin:
4636 4457 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4637 4458
4638 4459 # Concatenate the data together.
4639 4460 data = b''.join(l.lstrip() for l in lines)
4640 4461 data = stringutil.unescapestr(data)
4641 4462 stdin.write(data)
4642 4463
4643 4464 if action == b'raw+':
4644 4465 stdin.flush()
4645 4466 elif action == b'flush':
4646 4467 if not stdin:
4647 4468 raise error.Abort(_(b'cannot call flush on this peer'))
4648 4469 stdin.flush()
4649 4470 elif action.startswith(b'command'):
4650 4471 if not peer:
4651 4472 raise error.Abort(
4652 4473 _(
4653 4474 b'cannot send commands unless peer instance '
4654 4475 b'is available'
4655 4476 )
4656 4477 )
4657 4478
4658 4479 command = action.split(b' ', 1)[1]
4659 4480
4660 4481 args = {}
4661 4482 for line in lines:
4662 4483 # We need to allow empty values.
4663 4484 fields = line.lstrip().split(b' ', 1)
4664 4485 if len(fields) == 1:
4665 4486 key = fields[0]
4666 4487 value = b''
4667 4488 else:
4668 4489 key, value = fields
4669 4490
4670 4491 if value.startswith(b'eval:'):
4671 4492 value = stringutil.evalpythonliteral(value[5:])
4672 4493 else:
4673 4494 value = stringutil.unescapestr(value)
4674 4495
4675 4496 args[key] = value
4676 4497
4677 4498 if batchedcommands is not None:
4678 4499 batchedcommands.append((command, args))
4679 4500 continue
4680 4501
4681 4502 ui.status(_(b'sending %s command\n') % command)
4682 4503
4683 4504 if b'PUSHFILE' in args:
4684 4505 with open(args[b'PUSHFILE'], 'rb') as fh:
4685 4506 del args[b'PUSHFILE']
4686 4507 res, output = peer._callpush(
4687 4508 command, fh, **pycompat.strkwargs(args)
4688 4509 )
4689 4510 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4690 4511 ui.status(
4691 4512 _(b'remote output: %s\n') % stringutil.escapestr(output)
4692 4513 )
4693 4514 else:
4694 4515 with peer.commandexecutor() as e:
4695 4516 res = e.callcommand(command, args).result()
4696 4517
4697 4518 ui.status(
4698 4519 _(b'response: %s\n')
4699 4520 % stringutil.pprint(res, bprefix=True, indent=2)
4700 4521 )
4701 4522
4702 4523 elif action == b'batchbegin':
4703 4524 if batchedcommands is not None:
4704 4525 raise error.Abort(_(b'nested batchbegin not allowed'))
4705 4526
4706 4527 batchedcommands = []
4707 4528 elif action == b'batchsubmit':
4708 4529 # There is a batching API we could go through. But it would be
4709 4530 # difficult to normalize requests into function calls. It is easier
4710 4531 # to bypass this layer and normalize to commands + args.
4711 4532 ui.status(
4712 4533 _(b'sending batch with %d sub-commands\n')
4713 4534 % len(batchedcommands)
4714 4535 )
4715 4536 assert peer is not None
4716 4537 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4717 4538 ui.status(
4718 4539 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4719 4540 )
4720 4541
4721 4542 batchedcommands = None
4722 4543
4723 4544 elif action.startswith(b'httprequest '):
4724 4545 if not opener:
4725 4546 raise error.Abort(
4726 4547 _(b'cannot use httprequest without an HTTP peer')
4727 4548 )
4728 4549
4729 4550 request = action.split(b' ', 2)
4730 4551 if len(request) != 3:
4731 4552 raise error.Abort(
4732 4553 _(
4733 4554 b'invalid httprequest: expected format is '
4734 4555 b'"httprequest <method> <path>'
4735 4556 )
4736 4557 )
4737 4558
4738 4559 method, httppath = request[1:]
4739 4560 headers = {}
4740 4561 body = None
4741 4562 frames = []
4742 4563 for line in lines:
4743 4564 line = line.lstrip()
4744 4565 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4745 4566 if m:
4746 4567 # Headers need to use native strings.
4747 4568 key = pycompat.strurl(m.group(1))
4748 4569 value = pycompat.strurl(m.group(2))
4749 4570 headers[key] = value
4750 4571 continue
4751 4572
4752 4573 if line.startswith(b'BODYFILE '):
4753 4574 with open(line.split(b' ', 1), b'rb') as fh:
4754 4575 body = fh.read()
4755 4576 elif line.startswith(b'frame '):
4756 4577 frame = wireprotoframing.makeframefromhumanstring(
4757 4578 line[len(b'frame ') :]
4758 4579 )
4759 4580
4760 4581 frames.append(frame)
4761 4582 else:
4762 4583 raise error.Abort(
4763 4584 _(b'unknown argument to httprequest: %s') % line
4764 4585 )
4765 4586
4766 4587 url = path + httppath
4767 4588
4768 4589 if frames:
4769 4590 body = b''.join(bytes(f) for f in frames)
4770 4591
4771 4592 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4772 4593
4773 4594 # urllib.Request insists on using has_data() as a proxy for
4774 4595 # determining the request method. Override that to use our
4775 4596 # explicitly requested method.
4776 4597 req.get_method = lambda: pycompat.sysstr(method)
4777 4598
4778 4599 try:
4779 4600 res = opener.open(req)
4780 4601 body = res.read()
4781 4602 except util.urlerr.urlerror as e:
4782 4603 # read() method must be called, but only exists in Python 2
4783 4604 getattr(e, 'read', lambda: None)()
4784 4605 continue
4785 4606
4786 4607 ct = res.headers.get('Content-Type')
4787 4608 if ct == 'application/mercurial-cbor':
4788 4609 ui.write(
4789 4610 _(b'cbor> %s\n')
4790 4611 % stringutil.pprint(
4791 4612 cborutil.decodeall(body), bprefix=True, indent=2
4792 4613 )
4793 4614 )
4794 4615
4795 4616 elif action == b'close':
4796 4617 assert peer is not None
4797 4618 peer.close()
4798 4619 elif action == b'readavailable':
4799 4620 if not stdout or not stderr:
4800 4621 raise error.Abort(
4801 4622 _(b'readavailable not available on this peer')
4802 4623 )
4803 4624
4804 4625 stdin.close()
4805 4626 stdout.read()
4806 4627 stderr.read()
4807 4628
4808 4629 elif action == b'readline':
4809 4630 if not stdout:
4810 4631 raise error.Abort(_(b'readline not available on this peer'))
4811 4632 stdout.readline()
4812 4633 elif action == b'ereadline':
4813 4634 if not stderr:
4814 4635 raise error.Abort(_(b'ereadline not available on this peer'))
4815 4636 stderr.readline()
4816 4637 elif action.startswith(b'read '):
4817 4638 count = int(action.split(b' ', 1)[1])
4818 4639 if not stdout:
4819 4640 raise error.Abort(_(b'read not available on this peer'))
4820 4641 stdout.read(count)
4821 4642 elif action.startswith(b'eread '):
4822 4643 count = int(action.split(b' ', 1)[1])
4823 4644 if not stderr:
4824 4645 raise error.Abort(_(b'eread not available on this peer'))
4825 4646 stderr.read(count)
4826 4647 else:
4827 4648 raise error.Abort(_(b'unknown action: %s') % action)
4828 4649
4829 4650 if batchedcommands is not None:
4830 4651 raise error.Abort(_(b'unclosed "batchbegin" request'))
4831 4652
4832 4653 if peer:
4833 4654 peer.close()
4834 4655
4835 4656 if proc:
4836 4657 proc.kill()
@@ -1,712 +1,884 b''
1 1 # revlogutils/debug.py - utility used for revlog debuging
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 # Copyright 2022 Octobus <contact@octobus.net>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import collections
10 10 import string
11 11
12 12 from .. import (
13 13 mdiff,
14 14 node as nodemod,
15 15 revlogutils,
16 16 )
17 17
18 18 from . import (
19 19 constants,
20 20 deltas as deltautil,
21 21 )
22 22
23 23 INDEX_ENTRY_DEBUG_COLUMN = []
24 24
25 25 NODE_SIZE = object()
26 26
27 27
28 28 class _column_base:
29 29 """constains the definition of a revlog column
30 30
31 31 name: the column header,
32 32 value_func: the function called to get a value,
33 33 size: the width of the column,
34 34 verbose_only: only include the column in verbose mode.
35 35 """
36 36
37 37 def __init__(self, name, value_func, size=None, verbose=False):
38 38 self.name = name
39 39 self.value_func = value_func
40 40 if size is not NODE_SIZE:
41 41 if size is None:
42 42 size = 8 # arbitrary default
43 43 size = max(len(name), size)
44 44 self._size = size
45 45 self.verbose_only = verbose
46 46
47 47 def get_size(self, node_size):
48 48 if self._size is NODE_SIZE:
49 49 return node_size
50 50 else:
51 51 return self._size
52 52
53 53
54 54 def debug_column(name, size=None, verbose=False):
55 55 """decorated function is registered as a column
56 56
57 57 name: the name of the column,
58 58 size: the expected size of the column.
59 59 """
60 60
61 61 def register(func):
62 62 entry = _column_base(
63 63 name=name,
64 64 value_func=func,
65 65 size=size,
66 66 verbose=verbose,
67 67 )
68 68 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
69 69 return entry
70 70
71 71 return register
72 72
73 73
74 74 @debug_column(b"rev", size=6)
75 75 def _rev(index, rev, entry, hexfn):
76 76 return b"%d" % rev
77 77
78 78
79 79 @debug_column(b"rank", size=6, verbose=True)
80 80 def rank(index, rev, entry, hexfn):
81 81 return b"%d" % entry[constants.ENTRY_RANK]
82 82
83 83
84 84 @debug_column(b"linkrev", size=6)
85 85 def _linkrev(index, rev, entry, hexfn):
86 86 return b"%d" % entry[constants.ENTRY_LINK_REV]
87 87
88 88
89 89 @debug_column(b"nodeid", size=NODE_SIZE)
90 90 def _nodeid(index, rev, entry, hexfn):
91 91 return hexfn(entry[constants.ENTRY_NODE_ID])
92 92
93 93
94 94 @debug_column(b"p1-rev", size=6, verbose=True)
95 95 def _p1_rev(index, rev, entry, hexfn):
96 96 return b"%d" % entry[constants.ENTRY_PARENT_1]
97 97
98 98
99 99 @debug_column(b"p1-nodeid", size=NODE_SIZE)
100 100 def _p1_node(index, rev, entry, hexfn):
101 101 parent = entry[constants.ENTRY_PARENT_1]
102 102 p_entry = index[parent]
103 103 return hexfn(p_entry[constants.ENTRY_NODE_ID])
104 104
105 105
106 106 @debug_column(b"p2-rev", size=6, verbose=True)
107 107 def _p2_rev(index, rev, entry, hexfn):
108 108 return b"%d" % entry[constants.ENTRY_PARENT_2]
109 109
110 110
111 111 @debug_column(b"p2-nodeid", size=NODE_SIZE)
112 112 def _p2_node(index, rev, entry, hexfn):
113 113 parent = entry[constants.ENTRY_PARENT_2]
114 114 p_entry = index[parent]
115 115 return hexfn(p_entry[constants.ENTRY_NODE_ID])
116 116
117 117
118 118 @debug_column(b"full-size", size=20, verbose=True)
119 119 def full_size(index, rev, entry, hexfn):
120 120 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
121 121
122 122
123 123 @debug_column(b"delta-base", size=6, verbose=True)
124 124 def delta_base(index, rev, entry, hexfn):
125 125 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
126 126
127 127
128 128 @debug_column(b"flags", size=2, verbose=True)
129 129 def flags(index, rev, entry, hexfn):
130 130 field = entry[constants.ENTRY_DATA_OFFSET]
131 131 field &= 0xFFFF
132 132 return b"%d" % field
133 133
134 134
135 135 @debug_column(b"comp-mode", size=4, verbose=True)
136 136 def compression_mode(index, rev, entry, hexfn):
137 137 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
138 138
139 139
140 140 @debug_column(b"data-offset", size=20, verbose=True)
141 141 def data_offset(index, rev, entry, hexfn):
142 142 field = entry[constants.ENTRY_DATA_OFFSET]
143 143 field >>= 16
144 144 return b"%d" % field
145 145
146 146
147 147 @debug_column(b"chunk-size", size=10, verbose=True)
148 148 def data_chunk_size(index, rev, entry, hexfn):
149 149 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
150 150
151 151
152 152 @debug_column(b"sd-comp-mode", size=7, verbose=True)
153 153 def sidedata_compression_mode(index, rev, entry, hexfn):
154 154 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
155 155 if compression == constants.COMP_MODE_PLAIN:
156 156 return b"plain"
157 157 elif compression == constants.COMP_MODE_DEFAULT:
158 158 return b"default"
159 159 elif compression == constants.COMP_MODE_INLINE:
160 160 return b"inline"
161 161 else:
162 162 return b"%d" % compression
163 163
164 164
165 165 @debug_column(b"sidedata-offset", size=20, verbose=True)
166 166 def sidedata_offset(index, rev, entry, hexfn):
167 167 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
168 168
169 169
170 170 @debug_column(b"sd-chunk-size", size=10, verbose=True)
171 171 def sidedata_chunk_size(index, rev, entry, hexfn):
172 172 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
173 173
174 174
175 175 def debug_index(
176 176 ui,
177 177 repo,
178 178 formatter,
179 179 revlog,
180 180 full_node,
181 181 ):
182 182 """display index data for a revlog"""
183 183 if full_node:
184 184 hexfn = nodemod.hex
185 185 else:
186 186 hexfn = nodemod.short
187 187
188 188 idlen = 12
189 189 for i in revlog:
190 190 idlen = len(hexfn(revlog.node(i)))
191 191 break
192 192
193 193 fm = formatter
194 194
195 195 header_pieces = []
196 196 for column in INDEX_ENTRY_DEBUG_COLUMN:
197 197 if column.verbose_only and not ui.verbose:
198 198 continue
199 199 size = column.get_size(idlen)
200 200 name = column.name
201 201 header_pieces.append(name.rjust(size))
202 202
203 203 fm.plain(b' '.join(header_pieces) + b'\n')
204 204
205 205 index = revlog.index
206 206
207 207 for rev in revlog:
208 208 fm.startitem()
209 209 entry = index[rev]
210 210 first = True
211 211 for column in INDEX_ENTRY_DEBUG_COLUMN:
212 212 if column.verbose_only and not ui.verbose:
213 213 continue
214 214 if not first:
215 215 fm.plain(b' ')
216 216 first = False
217 217
218 218 size = column.get_size(idlen)
219 219 value = column.value_func(index, rev, entry, hexfn)
220 220 display = b"%%%ds" % size
221 221 fm.write(column.name, display, value)
222 222 fm.plain(b'\n')
223 223
224 224 fm.end()
225 225
226 226
227 227 def dump(ui, revlog):
228 228 """perform the work for `hg debugrevlog --dump"""
229 229 # XXX seems redundant with debug index ?
230 230 r = revlog
231 231 numrevs = len(r)
232 232 ui.write(
233 233 (
234 234 b"# rev p1rev p2rev start end deltastart base p1 p2"
235 235 b" rawsize totalsize compression heads chainlen\n"
236 236 )
237 237 )
238 238 ts = 0
239 239 heads = set()
240 240
241 241 for rev in range(numrevs):
242 242 dbase = r.deltaparent(rev)
243 243 if dbase == -1:
244 244 dbase = rev
245 245 cbase = r.chainbase(rev)
246 246 clen = r.chainlen(rev)
247 247 p1, p2 = r.parentrevs(rev)
248 248 rs = r.rawsize(rev)
249 249 ts = ts + rs
250 250 heads -= set(r.parentrevs(rev))
251 251 heads.add(rev)
252 252 try:
253 253 compression = ts / r.end(rev)
254 254 except ZeroDivisionError:
255 255 compression = 0
256 256 ui.write(
257 257 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
258 258 b"%11d %5d %8d\n"
259 259 % (
260 260 rev,
261 261 p1,
262 262 p2,
263 263 r.start(rev),
264 264 r.end(rev),
265 265 r.start(dbase),
266 266 r.start(cbase),
267 267 r.start(p1),
268 268 r.start(p2),
269 269 rs,
270 270 ts,
271 271 compression,
272 272 len(heads),
273 273 clen,
274 274 )
275 275 )
276 276
277 277
278 278 def debug_revlog(ui, revlog):
279 279 """code for `hg debugrevlog`"""
280 280 r = revlog
281 281 format = r._format_version
282 282 v = r._format_flags
283 283 flags = []
284 284 gdelta = False
285 285 if v & constants.FLAG_INLINE_DATA:
286 286 flags.append(b'inline')
287 287 if v & constants.FLAG_GENERALDELTA:
288 288 gdelta = True
289 289 flags.append(b'generaldelta')
290 290 if not flags:
291 291 flags = [b'(none)']
292 292
293 293 ### the total size of stored content if incompressed.
294 294 full_text_total_size = 0
295 295 ### tracks merge vs single parent
296 296 nummerges = 0
297 297
298 298 ### tracks ways the "delta" are build
299 299 # nodelta
300 300 numempty = 0
301 301 numemptytext = 0
302 302 numemptydelta = 0
303 303 # full file content
304 304 numfull = 0
305 305 # intermediate snapshot against a prior snapshot
306 306 numsemi = 0
307 307 # snapshot count per depth
308 308 numsnapdepth = collections.defaultdict(lambda: 0)
309 309 # number of snapshots with a non-ancestor delta
310 310 numsnapdepth_nad = collections.defaultdict(lambda: 0)
311 311 # delta against previous revision
312 312 numprev = 0
313 313 # delta against prev, where prev is a non-ancestor
314 314 numprev_nad = 0
315 315 # delta against first or second parent (not prev)
316 316 nump1 = 0
317 317 nump2 = 0
318 318 # delta against neither prev nor parents
319 319 numother = 0
320 320 # delta against other that is a non-ancestor
321 321 numother_nad = 0
322 322 # delta against prev that are also first or second parent
323 323 # (details of `numprev`)
324 324 nump1prev = 0
325 325 nump2prev = 0
326 326
327 327 # data about delta chain of each revs
328 328 chainlengths = []
329 329 chainbases = []
330 330 chainspans = []
331 331
332 332 # data about each revision
333 333 datasize = [None, 0, 0]
334 334 fullsize = [None, 0, 0]
335 335 semisize = [None, 0, 0]
336 336 # snapshot count per depth
337 337 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
338 338 deltasize = [None, 0, 0]
339 339 chunktypecounts = {}
340 340 chunktypesizes = {}
341 341
342 342 def addsize(size, l):
343 343 if l[0] is None or size < l[0]:
344 344 l[0] = size
345 345 if size > l[1]:
346 346 l[1] = size
347 347 l[2] += size
348 348
349 349 with r.reading():
350 350 numrevs = len(r)
351 351 for rev in range(numrevs):
352 352 p1, p2 = r.parentrevs(rev)
353 353 delta = r.deltaparent(rev)
354 354 if format > 0:
355 355 s = r.rawsize(rev)
356 356 full_text_total_size += s
357 357 addsize(s, datasize)
358 358 if p2 != nodemod.nullrev:
359 359 nummerges += 1
360 360 size = r.length(rev)
361 361 if delta == nodemod.nullrev:
362 362 chainlengths.append(0)
363 363 chainbases.append(r.start(rev))
364 364 chainspans.append(size)
365 365 if size == 0:
366 366 numempty += 1
367 367 numemptytext += 1
368 368 else:
369 369 numfull += 1
370 370 numsnapdepth[0] += 1
371 371 addsize(size, fullsize)
372 372 addsize(size, snapsizedepth[0])
373 373 else:
374 374 nad = (
375 375 delta != p1
376 376 and delta != p2
377 377 and not r.isancestorrev(delta, rev)
378 378 )
379 379 chainlengths.append(chainlengths[delta] + 1)
380 380 baseaddr = chainbases[delta]
381 381 revaddr = r.start(rev)
382 382 chainbases.append(baseaddr)
383 383 chainspans.append((revaddr - baseaddr) + size)
384 384 if size == 0:
385 385 numempty += 1
386 386 numemptydelta += 1
387 387 elif r.issnapshot(rev):
388 388 addsize(size, semisize)
389 389 numsemi += 1
390 390 depth = r.snapshotdepth(rev)
391 391 numsnapdepth[depth] += 1
392 392 if nad:
393 393 numsnapdepth_nad[depth] += 1
394 394 addsize(size, snapsizedepth[depth])
395 395 else:
396 396 addsize(size, deltasize)
397 397 if delta == rev - 1:
398 398 numprev += 1
399 399 if delta == p1:
400 400 nump1prev += 1
401 401 elif delta == p2:
402 402 nump2prev += 1
403 403 elif nad:
404 404 numprev_nad += 1
405 405 elif delta == p1:
406 406 nump1 += 1
407 407 elif delta == p2:
408 408 nump2 += 1
409 409 elif delta != nodemod.nullrev:
410 410 numother += 1
411 411 numother_nad += 1
412 412
413 413 # Obtain data on the raw chunks in the revlog.
414 414 if hasattr(r, '_getsegmentforrevs'):
415 415 segment = r._getsegmentforrevs(rev, rev)[1]
416 416 else:
417 417 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
418 418 if segment:
419 419 chunktype = bytes(segment[0:1])
420 420 else:
421 421 chunktype = b'empty'
422 422
423 423 if chunktype not in chunktypecounts:
424 424 chunktypecounts[chunktype] = 0
425 425 chunktypesizes[chunktype] = 0
426 426
427 427 chunktypecounts[chunktype] += 1
428 428 chunktypesizes[chunktype] += size
429 429
430 430 # Adjust size min value for empty cases
431 431 for size in (datasize, fullsize, semisize, deltasize):
432 432 if size[0] is None:
433 433 size[0] = 0
434 434
435 435 numdeltas = numrevs - numfull - numempty - numsemi
436 436 numoprev = numprev - nump1prev - nump2prev - numprev_nad
437 437 num_other_ancestors = numother - numother_nad
438 438 totalrawsize = datasize[2]
439 439 datasize[2] /= numrevs
440 440 fulltotal = fullsize[2]
441 441 if numfull == 0:
442 442 fullsize[2] = 0
443 443 else:
444 444 fullsize[2] /= numfull
445 445 semitotal = semisize[2]
446 446 snaptotal = {}
447 447 if numsemi > 0:
448 448 semisize[2] /= numsemi
449 449 for depth in snapsizedepth:
450 450 snaptotal[depth] = snapsizedepth[depth][2]
451 451 snapsizedepth[depth][2] /= numsnapdepth[depth]
452 452
453 453 deltatotal = deltasize[2]
454 454 if numdeltas > 0:
455 455 deltasize[2] /= numdeltas
456 456 totalsize = fulltotal + semitotal + deltatotal
457 457 avgchainlen = sum(chainlengths) / numrevs
458 458 maxchainlen = max(chainlengths)
459 459 maxchainspan = max(chainspans)
460 460 compratio = 1
461 461 if totalsize:
462 462 compratio = totalrawsize / totalsize
463 463
464 464 basedfmtstr = b'%%%dd\n'
465 465 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
466 466
467 467 def dfmtstr(max):
468 468 return basedfmtstr % len(str(max))
469 469
470 470 def pcfmtstr(max, padding=0):
471 471 return basepcfmtstr % (len(str(max)), b' ' * padding)
472 472
473 473 def pcfmt(value, total):
474 474 if total:
475 475 return (value, 100 * float(value) / total)
476 476 else:
477 477 return value, 100.0
478 478
479 479 ui.writenoi18n(b'format : %d\n' % format)
480 480 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
481 481
482 482 ui.write(b'\n')
483 483 fmt = pcfmtstr(totalsize)
484 484 fmt2 = dfmtstr(totalsize)
485 485 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
486 486 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
487 487 ui.writenoi18n(
488 488 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
489 489 )
490 490 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
491 491 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
492 492 ui.writenoi18n(
493 493 b' text : '
494 494 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
495 495 )
496 496 ui.writenoi18n(
497 497 b' delta : '
498 498 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
499 499 )
500 500 ui.writenoi18n(
501 501 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
502 502 )
503 503 for depth in sorted(numsnapdepth):
504 504 base = b' lvl-%-3d : ' % depth
505 505 count = fmt % pcfmt(numsnapdepth[depth], numrevs)
506 506 pieces = [base, count]
507 507 if numsnapdepth_nad[depth]:
508 508 pieces[-1] = count = count[:-1] # drop the final '\n'
509 509 more = b' non-ancestor-bases: '
510 510 anc_count = fmt
511 511 anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
512 512 pieces.append(more)
513 513 pieces.append(anc_count)
514 514 ui.write(b''.join(pieces))
515 515 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
516 516 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
517 517 ui.writenoi18n(
518 518 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
519 519 )
520 520 for depth in sorted(numsnapdepth):
521 521 ui.write(
522 522 (b' lvl-%-3d : ' % depth)
523 523 + fmt % pcfmt(snaptotal[depth], totalsize)
524 524 )
525 525 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
526 526
527 527 letters = string.ascii_letters.encode('ascii')
528 528
529 529 def fmtchunktype(chunktype):
530 530 if chunktype == b'empty':
531 531 return b' %s : ' % chunktype
532 532 elif chunktype in letters:
533 533 return b' 0x%s (%s) : ' % (nodemod.hex(chunktype), chunktype)
534 534 else:
535 535 return b' 0x%s : ' % nodemod.hex(chunktype)
536 536
537 537 ui.write(b'\n')
538 538 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
539 539 for chunktype in sorted(chunktypecounts):
540 540 ui.write(fmtchunktype(chunktype))
541 541 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
542 542 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
543 543 for chunktype in sorted(chunktypecounts):
544 544 ui.write(fmtchunktype(chunktype))
545 545 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
546 546
547 547 ui.write(b'\n')
548 548 b_total = b"%d" % full_text_total_size
549 549 p_total = []
550 550 while len(b_total) > 3:
551 551 p_total.append(b_total[-3:])
552 552 b_total = b_total[:-3]
553 553 p_total.append(b_total)
554 554 p_total.reverse()
555 555 b_total = b' '.join(p_total)
556 556
557 557 ui.write(b'\n')
558 558 ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
559 559 ui.write(b'\n')
560 560 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
561 561 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
562 562 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
563 563 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
564 564 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
565 565
566 566 if format > 0:
567 567 ui.write(b'\n')
568 568 ui.writenoi18n(
569 569 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
570 570 % tuple(datasize)
571 571 )
572 572 ui.writenoi18n(
573 573 b'full revision size (min/max/avg) : %d / %d / %d\n'
574 574 % tuple(fullsize)
575 575 )
576 576 ui.writenoi18n(
577 577 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
578 578 % tuple(semisize)
579 579 )
580 580 for depth in sorted(snapsizedepth):
581 581 if depth == 0:
582 582 continue
583 583 ui.writenoi18n(
584 584 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
585 585 % ((depth,) + tuple(snapsizedepth[depth]))
586 586 )
587 587 ui.writenoi18n(
588 588 b'delta size (min/max/avg) : %d / %d / %d\n'
589 589 % tuple(deltasize)
590 590 )
591 591
592 592 if numdeltas > 0:
593 593 ui.write(b'\n')
594 594 fmt = pcfmtstr(numdeltas)
595 595 fmt2 = pcfmtstr(numdeltas, 4)
596 596 ui.writenoi18n(
597 597 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
598 598 )
599 599 if numprev > 0:
600 600 ui.writenoi18n(
601 601 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
602 602 )
603 603 ui.writenoi18n(
604 604 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
605 605 )
606 606 ui.writenoi18n(
607 607 b' other-ancestor : ' + fmt2 % pcfmt(numoprev, numprev)
608 608 )
609 609 ui.writenoi18n(
610 610 b' unrelated : ' + fmt2 % pcfmt(numoprev, numprev)
611 611 )
612 612 if gdelta:
613 613 ui.writenoi18n(
614 614 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
615 615 )
616 616 ui.writenoi18n(
617 617 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
618 618 )
619 619 ui.writenoi18n(
620 620 b'deltas against ancs : '
621 621 + fmt % pcfmt(num_other_ancestors, numdeltas)
622 622 )
623 623 ui.writenoi18n(
624 624 b'deltas against other : '
625 625 + fmt % pcfmt(numother_nad, numdeltas)
626 626 )
627 627
628 628
629 629 def debug_delta_find(ui, revlog, rev, base_rev=nodemod.nullrev):
630 630 """display the search process for a delta"""
631 631 deltacomputer = deltautil.deltacomputer(
632 632 revlog,
633 633 write_debug=ui.write,
634 634 debug_search=not ui.quiet,
635 635 )
636 636
637 637 node = revlog.node(rev)
638 638 p1r, p2r = revlog.parentrevs(rev)
639 639 p1 = revlog.node(p1r)
640 640 p2 = revlog.node(p2r)
641 641 full_text = revlog.revision(rev)
642 642 btext = [full_text]
643 643 textlen = len(btext[0])
644 644 cachedelta = None
645 645 flags = revlog.flags(rev)
646 646
647 647 if base_rev != nodemod.nullrev:
648 648 base_text = revlog.revision(base_rev)
649 649 delta = mdiff.textdiff(base_text, full_text)
650 650
651 651 cachedelta = (base_rev, delta, constants.DELTA_BASE_REUSE_TRY)
652 652 btext = [None]
653 653
654 654 revinfo = revlogutils.revisioninfo(
655 655 node,
656 656 p1,
657 657 p2,
658 658 btext,
659 659 textlen,
660 660 cachedelta,
661 661 flags,
662 662 )
663 663
664 664 fh = revlog._datafp()
665 665 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
666 666
667 667
668 668 def debug_revlog_stats(
669 669 repo, fm, changelog: bool, manifest: bool, filelogs: bool
670 670 ):
671 671 """Format revlog statistics for debugging purposes
672 672
673 673 fm: the output formatter.
674 674 """
675 675 fm.plain(b'rev-count data-size inl type target \n')
676 676
677 677 revlog_entries = [e for e in repo.store.walk() if e.is_revlog]
678 678 revlog_entries.sort(key=lambda e: (e.revlog_type, e.target_id))
679 679
680 680 for entry in revlog_entries:
681 681 if not changelog and entry.is_changelog:
682 682 continue
683 683 elif not manifest and entry.is_manifestlog:
684 684 continue
685 685 elif not filelogs and entry.is_filelog:
686 686 continue
687 687 rlog = entry.get_revlog_instance(repo).get_revlog()
688 688 fm.startitem()
689 689 nb_rev = len(rlog)
690 690 inline = rlog._inline
691 691 data_size = rlog._get_data_offset(nb_rev - 1)
692 692
693 693 target = rlog.target
694 694 revlog_type = b'unknown'
695 695 revlog_target = b''
696 696 if target[0] == constants.KIND_CHANGELOG:
697 697 revlog_type = b'changelog'
698 698 elif target[0] == constants.KIND_MANIFESTLOG:
699 699 revlog_type = b'manifest'
700 700 revlog_target = target[1]
701 701 elif target[0] == constants.KIND_FILELOG:
702 702 revlog_type = b'file'
703 703 revlog_target = target[1]
704 704
705 705 fm.write(b'revlog.rev-count', b'%9d', nb_rev)
706 706 fm.write(b'revlog.data-size', b'%12d', data_size)
707 707
708 708 fm.write(b'revlog.inline', b' %-3s', b'yes' if inline else b'no')
709 709 fm.write(b'revlog.type', b' %-9s', revlog_type)
710 710 fm.write(b'revlog.target', b' %s', revlog_target)
711 711
712 712 fm.plain(b'\n')
713
714
715 def debug_delta_chain(revlog):
716 r = revlog
717 index = r.index
718 start = r.start
719 length = r.length
720 generaldelta = r.delta_config.general_delta
721 withsparseread = r.data_config.with_sparse_read
722
723 # security to avoid crash on corrupted revlogs
724 total_revs = len(index)
725
726 chain_size_cache = {}
727
728 def revinfo(rev):
729 e = index[rev]
730 compsize = e[constants.ENTRY_DATA_COMPRESSED_LENGTH]
731 uncompsize = e[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
732
733 base = e[constants.ENTRY_DELTA_BASE]
734 p1 = e[constants.ENTRY_PARENT_1]
735 p2 = e[constants.ENTRY_PARENT_2]
736
737 # If the parents of a revision has an empty delta, we never try to
738 # delta against that parent, but directly against the delta base of
739 # that parent (recursively). It avoids adding a useless entry in the
740 # chain.
741 #
742 # However we need to detect that as a special case for delta-type, that
743 # is not simply "other".
744 p1_base = p1
745 if p1 != nodemod.nullrev and p1 < total_revs:
746 e1 = index[p1]
747 while e1[constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
748 new_base = e1[constants.ENTRY_DELTA_BASE]
749 if (
750 new_base == p1_base
751 or new_base == nodemod.nullrev
752 or new_base >= total_revs
753 ):
754 break
755 p1_base = new_base
756 e1 = index[p1_base]
757 p2_base = p2
758 if p2 != nodemod.nullrev and p2 < total_revs:
759 e2 = index[p2]
760 while e2[constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
761 new_base = e2[constants.ENTRY_DELTA_BASE]
762 if (
763 new_base == p2_base
764 or new_base == nodemod.nullrev
765 or new_base >= total_revs
766 ):
767 break
768 p2_base = new_base
769 e2 = index[p2_base]
770
771 if generaldelta:
772 if base == p1:
773 deltatype = b'p1'
774 elif base == p2:
775 deltatype = b'p2'
776 elif base == rev:
777 deltatype = b'base'
778 elif base == p1_base:
779 deltatype = b'skip1'
780 elif base == p2_base:
781 deltatype = b'skip2'
782 elif r.issnapshot(rev):
783 deltatype = b'snap'
784 elif base == rev - 1:
785 deltatype = b'prev'
786 else:
787 deltatype = b'other'
788 else:
789 if base == rev:
790 deltatype = b'base'
791 else:
792 deltatype = b'prev'
793
794 chain = r._deltachain(rev)[0]
795 chain_size = 0
796 for iter_rev in reversed(chain):
797 cached = chain_size_cache.get(iter_rev)
798 if cached is not None:
799 chain_size += cached
800 break
801 e = index[iter_rev]
802 chain_size += e[constants.ENTRY_DATA_COMPRESSED_LENGTH]
803 chain_size_cache[rev] = chain_size
804
805 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
806
807 header = (
808 b' rev p1 p2 chain# chainlen prev delta '
809 b'size rawsize chainsize ratio lindist extradist '
810 b'extraratio'
811 )
812 if withsparseread:
813 header += b' readsize largestblk rddensity srchunks'
814 header += b'\n'
815 yield header
816
817 chainbases = {}
818 for rev in r:
819 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
820 chainbase = chain[0]
821 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
822 basestart = start(chainbase)
823 revstart = start(rev)
824 lineardist = revstart + comp - basestart
825 extradist = lineardist - chainsize
826 try:
827 prevrev = chain[-2]
828 except IndexError:
829 prevrev = -1
830
831 if uncomp != 0:
832 chainratio = float(chainsize) / float(uncomp)
833 else:
834 chainratio = chainsize
835
836 if chainsize != 0:
837 extraratio = float(extradist) / float(chainsize)
838 else:
839 extraratio = extradist
840
841 # label, display-format, data-key, value
842 entry = [
843 (b'rev', b'%7d', 'rev', rev),
844 (b'p1', b'%7d', 'p1', p1),
845 (b'p2', b'%7d', 'p2', p2),
846 (b'chainid', b'%7d', 'chainid', chainid),
847 (b'chainlen', b'%8d', 'chainlen', len(chain)),
848 (b'prevrev', b'%8d', 'prevrev', prevrev),
849 (b'deltatype', b'%7s', 'deltatype', deltatype),
850 (b'compsize', b'%10d', 'compsize', comp),
851 (b'uncompsize', b'%10d', 'uncompsize', uncomp),
852 (b'chainsize', b'%10d', 'chainsize', chainsize),
853 (b'chainratio', b'%9.5f', 'chainratio', chainratio),
854 (b'lindist', b'%9d', 'lindist', lineardist),
855 (b'extradist', b'%9d', 'extradist', extradist),
856 (b'extraratio', b'%10.5f', 'extraratio', extraratio),
857 ]
858 if withsparseread:
859 readsize = 0
860 largestblock = 0
861 srchunks = 0
862
863 for revschunk in deltautil.slicechunk(r, chain):
864 srchunks += 1
865 blkend = start(revschunk[-1]) + length(revschunk[-1])
866 blksize = blkend - start(revschunk[0])
867
868 readsize += blksize
869 if largestblock < blksize:
870 largestblock = blksize
871
872 if readsize:
873 readdensity = float(chainsize) / float(readsize)
874 else:
875 readdensity = 1
876 entry.extend(
877 [
878 (b'readsize', b'%10d', 'readsize', readsize),
879 (b'largestblock', b'%10d', 'largestblock', largestblock),
880 (b'readdensity', b'%9.5f', 'readdensity', readdensity),
881 (b'srchunks', b'%8d', 'srchunks', srchunks),
882 ]
883 )
884 yield entry
General Comments 0
You need to be logged in to leave comments. Login now