##// END OF EJS Templates
debug-revlog: move the --dump code in `revlogutils` module...
marmoute -
r50554:7c0a3838 default
parent child Browse files
Show More
@@ -1,5091 +1,5047
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mdiff,
63 63 mergestate as mergestatemod,
64 64 metadata,
65 65 obsolete,
66 66 obsutil,
67 67 pathutil,
68 68 phases,
69 69 policy,
70 70 pvec,
71 71 pycompat,
72 72 registrar,
73 73 repair,
74 74 repoview,
75 75 requirements,
76 76 revlog,
77 77 revlogutils,
78 78 revset,
79 79 revsetlang,
80 80 scmutil,
81 81 setdiscovery,
82 82 simplemerge,
83 83 sshpeer,
84 84 sslutil,
85 85 streamclone,
86 86 strip,
87 87 tags as tagsmod,
88 88 templater,
89 89 treediscovery,
90 90 upgrade,
91 91 url as urlmod,
92 92 util,
93 93 vfs as vfsmod,
94 94 wireprotoframing,
95 95 wireprotoserver,
96 96 )
97 97 from .interfaces import repository
98 98 from .utils import (
99 99 cborutil,
100 100 compression,
101 101 dateutil,
102 102 procutil,
103 103 stringutil,
104 104 urlutil,
105 105 )
106 106
107 107 from .revlogutils import (
108 108 constants as revlog_constants,
109 109 debug as revlog_debug,
110 110 deltas as deltautil,
111 111 nodemap,
112 112 rewrite,
113 113 sidedata,
114 114 )
115 115
116 116 release = lockmod.release
117 117
118 118 table = {}
119 119 table.update(strip.command._table)
120 120 command = registrar.command(table)
121 121
122 122
123 123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 124 def debugancestor(ui, repo, *args):
125 125 """find the ancestor revision of two revisions in a given index"""
126 126 if len(args) == 3:
127 127 index, rev1, rev2 = args
128 128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 129 lookup = r.lookup
130 130 elif len(args) == 2:
131 131 if not repo:
132 132 raise error.Abort(
133 133 _(b'there is no Mercurial repository here (.hg not found)')
134 134 )
135 135 rev1, rev2 = args
136 136 r = repo.changelog
137 137 lookup = repo.lookup
138 138 else:
139 139 raise error.Abort(_(b'either two or three arguments required'))
140 140 a = r.ancestor(lookup(rev1), lookup(rev2))
141 141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142 142
143 143
144 144 @command(b'debugantivirusrunning', [])
145 145 def debugantivirusrunning(ui, repo):
146 146 """attempt to trigger an antivirus scanner to see if one is active"""
147 147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 148 f.write(
149 149 util.b85decode(
150 150 # This is a base85-armored version of the EICAR test file. See
151 151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 154 )
155 155 )
156 156 # Give an AV engine time to scan the file.
157 157 time.sleep(2)
158 158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159 159
160 160
161 161 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 162 def debugapplystreamclonebundle(ui, repo, fname):
163 163 """apply a stream clone bundle file"""
164 164 f = hg.openpath(ui, fname)
165 165 gen = exchange.readbundle(ui, f, fname)
166 166 gen.apply(repo)
167 167
168 168
169 169 @command(
170 170 b'debugbuilddag',
171 171 [
172 172 (
173 173 b'm',
174 174 b'mergeable-file',
175 175 None,
176 176 _(b'add single file mergeable changes'),
177 177 ),
178 178 (
179 179 b'o',
180 180 b'overwritten-file',
181 181 None,
182 182 _(b'add single file all revs overwrite'),
183 183 ),
184 184 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 185 (
186 186 b'',
187 187 b'from-existing',
188 188 None,
189 189 _(b'continue from a non-empty repository'),
190 190 ),
191 191 ],
192 192 _(b'[OPTION]... [TEXT]'),
193 193 )
194 194 def debugbuilddag(
195 195 ui,
196 196 repo,
197 197 text=None,
198 198 mergeable_file=False,
199 199 overwritten_file=False,
200 200 new_file=False,
201 201 from_existing=False,
202 202 ):
203 203 """builds a repo with a given DAG from scratch in the current empty repo
204 204
205 205 The description of the DAG is read from stdin if not given on the
206 206 command line.
207 207
208 208 Elements:
209 209
210 210 - "+n" is a linear run of n nodes based on the current default parent
211 211 - "." is a single node based on the current default parent
212 212 - "$" resets the default parent to null (implied at the start);
213 213 otherwise the default parent is always the last node created
214 214 - "<p" sets the default parent to the backref p
215 215 - "*p" is a fork at parent p, which is a backref
216 216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 217 - "/p2" is a merge of the preceding node and p2
218 218 - ":tag" defines a local tag for the preceding node
219 219 - "@branch" sets the named branch for subsequent nodes
220 220 - "#...\\n" is a comment up to the end of the line
221 221
222 222 Whitespace between the above elements is ignored.
223 223
224 224 A backref is either
225 225
226 226 - a number n, which references the node curr-n, where curr is the current
227 227 node, or
228 228 - the name of a local tag you placed earlier using ":tag", or
229 229 - empty to denote the default parent.
230 230
231 231 All string valued-elements are either strictly alphanumeric, or must
232 232 be enclosed in double quotes ("..."), with "\\" as escape character.
233 233 """
234 234
235 235 if text is None:
236 236 ui.status(_(b"reading DAG from stdin\n"))
237 237 text = ui.fin.read()
238 238
239 239 cl = repo.changelog
240 240 if len(cl) > 0 and not from_existing:
241 241 raise error.Abort(_(b'repository is not empty'))
242 242
243 243 # determine number of revs in DAG
244 244 total = 0
245 245 for type, data in dagparser.parsedag(text):
246 246 if type == b'n':
247 247 total += 1
248 248
249 249 if mergeable_file:
250 250 linesperrev = 2
251 251 # make a file with k lines per rev
252 252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 253 initialmergedlines.append(b"")
254 254
255 255 tags = []
256 256 progress = ui.makeprogress(
257 257 _(b'building'), unit=_(b'revisions'), total=total
258 258 )
259 259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 260 at = -1
261 261 atbranch = b'default'
262 262 nodeids = []
263 263 id = 0
264 264 progress.update(id)
265 265 for type, data in dagparser.parsedag(text):
266 266 if type == b'n':
267 267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 268 id, ps = data
269 269
270 270 files = []
271 271 filecontent = {}
272 272
273 273 p2 = None
274 274 if mergeable_file:
275 275 fn = b"mf"
276 276 p1 = repo[ps[0]]
277 277 if len(ps) > 1:
278 278 p2 = repo[ps[1]]
279 279 pa = p1.ancestor(p2)
280 280 base, local, other = [
281 281 x[fn].data() for x in (pa, p1, p2)
282 282 ]
283 283 m3 = simplemerge.Merge3Text(base, local, other)
284 284 ml = [
285 285 l.strip()
286 286 for l in simplemerge.render_minimized(m3)[0]
287 287 ]
288 288 ml.append(b"")
289 289 elif at > 0:
290 290 ml = p1[fn].data().split(b"\n")
291 291 else:
292 292 ml = initialmergedlines
293 293 ml[id * linesperrev] += b" r%i" % id
294 294 mergedtext = b"\n".join(ml)
295 295 files.append(fn)
296 296 filecontent[fn] = mergedtext
297 297
298 298 if overwritten_file:
299 299 fn = b"of"
300 300 files.append(fn)
301 301 filecontent[fn] = b"r%i\n" % id
302 302
303 303 if new_file:
304 304 fn = b"nf%i" % id
305 305 files.append(fn)
306 306 filecontent[fn] = b"r%i\n" % id
307 307 if len(ps) > 1:
308 308 if not p2:
309 309 p2 = repo[ps[1]]
310 310 for fn in p2:
311 311 if fn.startswith(b"nf"):
312 312 files.append(fn)
313 313 filecontent[fn] = p2[fn].data()
314 314
315 315 def fctxfn(repo, cx, path):
316 316 if path in filecontent:
317 317 return context.memfilectx(
318 318 repo, cx, path, filecontent[path]
319 319 )
320 320 return None
321 321
322 322 if len(ps) == 0 or ps[0] < 0:
323 323 pars = [None, None]
324 324 elif len(ps) == 1:
325 325 pars = [nodeids[ps[0]], None]
326 326 else:
327 327 pars = [nodeids[p] for p in ps]
328 328 cx = context.memctx(
329 329 repo,
330 330 pars,
331 331 b"r%i" % id,
332 332 files,
333 333 fctxfn,
334 334 date=(id, 0),
335 335 user=b"debugbuilddag",
336 336 extra={b'branch': atbranch},
337 337 )
338 338 nodeid = repo.commitctx(cx)
339 339 nodeids.append(nodeid)
340 340 at = id
341 341 elif type == b'l':
342 342 id, name = data
343 343 ui.note((b'tag %s\n' % name))
344 344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 345 elif type == b'a':
346 346 ui.note((b'branch %s\n' % data))
347 347 atbranch = data
348 348 progress.update(id)
349 349
350 350 if tags:
351 351 repo.vfs.write(b"localtags", b"".join(tags))
352 352
353 353
354 354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 355 indent_string = b' ' * indent
356 356 if all:
357 357 ui.writenoi18n(
358 358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 359 % indent_string
360 360 )
361 361
362 362 def showchunks(named):
363 363 ui.write(b"\n%s%s\n" % (indent_string, named))
364 364 for deltadata in gen.deltaiter():
365 365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 366 ui.write(
367 367 b"%s%s %s %s %s %s %d\n"
368 368 % (
369 369 indent_string,
370 370 hex(node),
371 371 hex(p1),
372 372 hex(p2),
373 373 hex(cs),
374 374 hex(deltabase),
375 375 len(delta),
376 376 )
377 377 )
378 378
379 379 gen.changelogheader()
380 380 showchunks(b"changelog")
381 381 gen.manifestheader()
382 382 showchunks(b"manifest")
383 383 for chunkdata in iter(gen.filelogheader, {}):
384 384 fname = chunkdata[b'filename']
385 385 showchunks(fname)
386 386 else:
387 387 if isinstance(gen, bundle2.unbundle20):
388 388 raise error.Abort(_(b'use debugbundle2 for this file'))
389 389 gen.changelogheader()
390 390 for deltadata in gen.deltaiter():
391 391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 393
394 394
395 395 def _debugobsmarkers(ui, part, indent=0, **opts):
396 396 """display version and markers contained in 'data'"""
397 397 opts = pycompat.byteskwargs(opts)
398 398 data = part.read()
399 399 indent_string = b' ' * indent
400 400 try:
401 401 version, markers = obsolete._readmarkers(data)
402 402 except error.UnknownVersion as exc:
403 403 msg = b"%sunsupported version: %s (%d bytes)\n"
404 404 msg %= indent_string, exc.version, len(data)
405 405 ui.write(msg)
406 406 else:
407 407 msg = b"%sversion: %d (%d bytes)\n"
408 408 msg %= indent_string, version, len(data)
409 409 ui.write(msg)
410 410 fm = ui.formatter(b'debugobsolete', opts)
411 411 for rawmarker in sorted(markers):
412 412 m = obsutil.marker(None, rawmarker)
413 413 fm.startitem()
414 414 fm.plain(indent_string)
415 415 cmdutil.showmarker(fm, m)
416 416 fm.end()
417 417
418 418
419 419 def _debugphaseheads(ui, data, indent=0):
420 420 """display version and markers contained in 'data'"""
421 421 indent_string = b' ' * indent
422 422 headsbyphase = phases.binarydecode(data)
423 423 for phase in phases.allphases:
424 424 for head in headsbyphase[phase]:
425 425 ui.write(indent_string)
426 426 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
427 427
428 428
429 429 def _quasirepr(thing):
430 430 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
431 431 return b'{%s}' % (
432 432 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
433 433 )
434 434 return pycompat.bytestr(repr(thing))
435 435
436 436
437 437 def _debugbundle2(ui, gen, all=None, **opts):
438 438 """lists the contents of a bundle2"""
439 439 if not isinstance(gen, bundle2.unbundle20):
440 440 raise error.Abort(_(b'not a bundle2 file'))
441 441 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
442 442 parttypes = opts.get('part_type', [])
443 443 for part in gen.iterparts():
444 444 if parttypes and part.type not in parttypes:
445 445 continue
446 446 msg = b'%s -- %s (mandatory: %r)\n'
447 447 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
448 448 if part.type == b'changegroup':
449 449 version = part.params.get(b'version', b'01')
450 450 cg = changegroup.getunbundler(version, part, b'UN')
451 451 if not ui.quiet:
452 452 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
453 453 if part.type == b'obsmarkers':
454 454 if not ui.quiet:
455 455 _debugobsmarkers(ui, part, indent=4, **opts)
456 456 if part.type == b'phase-heads':
457 457 if not ui.quiet:
458 458 _debugphaseheads(ui, part, indent=4)
459 459
460 460
461 461 @command(
462 462 b'debugbundle',
463 463 [
464 464 (b'a', b'all', None, _(b'show all details')),
465 465 (b'', b'part-type', [], _(b'show only the named part type')),
466 466 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
467 467 ],
468 468 _(b'FILE'),
469 469 norepo=True,
470 470 )
471 471 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
472 472 """lists the contents of a bundle"""
473 473 with hg.openpath(ui, bundlepath) as f:
474 474 if spec:
475 475 spec = exchange.getbundlespec(ui, f)
476 476 ui.write(b'%s\n' % spec)
477 477 return
478 478
479 479 gen = exchange.readbundle(ui, f, bundlepath)
480 480 if isinstance(gen, bundle2.unbundle20):
481 481 return _debugbundle2(ui, gen, all=all, **opts)
482 482 _debugchangegroup(ui, gen, all=all, **opts)
483 483
484 484
485 485 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
486 486 def debugcapabilities(ui, path, **opts):
487 487 """lists the capabilities of a remote peer"""
488 488 opts = pycompat.byteskwargs(opts)
489 489 peer = hg.peer(ui, opts, path)
490 490 try:
491 491 caps = peer.capabilities()
492 492 ui.writenoi18n(b'Main capabilities:\n')
493 493 for c in sorted(caps):
494 494 ui.write(b' %s\n' % c)
495 495 b2caps = bundle2.bundle2caps(peer)
496 496 if b2caps:
497 497 ui.writenoi18n(b'Bundle2 capabilities:\n')
498 498 for key, values in sorted(b2caps.items()):
499 499 ui.write(b' %s\n' % key)
500 500 for v in values:
501 501 ui.write(b' %s\n' % v)
502 502 finally:
503 503 peer.close()
504 504
505 505
506 506 @command(
507 507 b'debugchangedfiles',
508 508 [
509 509 (
510 510 b'',
511 511 b'compute',
512 512 False,
513 513 b"compute information instead of reading it from storage",
514 514 ),
515 515 ],
516 516 b'REV',
517 517 )
518 518 def debugchangedfiles(ui, repo, rev, **opts):
519 519 """list the stored files changes for a revision"""
520 520 ctx = logcmdutil.revsingle(repo, rev, None)
521 521 files = None
522 522
523 523 if opts['compute']:
524 524 files = metadata.compute_all_files_changes(ctx)
525 525 else:
526 526 sd = repo.changelog.sidedata(ctx.rev())
527 527 files_block = sd.get(sidedata.SD_FILES)
528 528 if files_block is not None:
529 529 files = metadata.decode_files_sidedata(sd)
530 530 if files is not None:
531 531 for f in sorted(files.touched):
532 532 if f in files.added:
533 533 action = b"added"
534 534 elif f in files.removed:
535 535 action = b"removed"
536 536 elif f in files.merged:
537 537 action = b"merged"
538 538 elif f in files.salvaged:
539 539 action = b"salvaged"
540 540 else:
541 541 action = b"touched"
542 542
543 543 copy_parent = b""
544 544 copy_source = b""
545 545 if f in files.copied_from_p1:
546 546 copy_parent = b"p1"
547 547 copy_source = files.copied_from_p1[f]
548 548 elif f in files.copied_from_p2:
549 549 copy_parent = b"p2"
550 550 copy_source = files.copied_from_p2[f]
551 551
552 552 data = (action, copy_parent, f, copy_source)
553 553 template = b"%-8s %2s: %s, %s;\n"
554 554 ui.write(template % data)
555 555
556 556
557 557 @command(b'debugcheckstate', [], b'')
558 558 def debugcheckstate(ui, repo):
559 559 """validate the correctness of the current dirstate"""
560 560 parent1, parent2 = repo.dirstate.parents()
561 561 m1 = repo[parent1].manifest()
562 562 m2 = repo[parent2].manifest()
563 563 errors = 0
564 564 for err in repo.dirstate.verify(m1, m2):
565 565 ui.warn(err[0] % err[1:])
566 566 errors += 1
567 567 if errors:
568 568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 569 raise error.Abort(errstr)
570 570
571 571
572 572 @command(
573 573 b'debugcolor',
574 574 [(b'', b'style', None, _(b'show all configured styles'))],
575 575 b'hg debugcolor',
576 576 )
577 577 def debugcolor(ui, repo, **opts):
578 578 """show available color, effects or style"""
579 579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 580 if opts.get('style'):
581 581 return _debugdisplaystyle(ui)
582 582 else:
583 583 return _debugdisplaycolor(ui)
584 584
585 585
586 586 def _debugdisplaycolor(ui):
587 587 ui = ui.copy()
588 588 ui._styles.clear()
589 589 for effect in color._activeeffects(ui).keys():
590 590 ui._styles[effect] = effect
591 591 if ui._terminfoparams:
592 592 for k, v in ui.configitems(b'color'):
593 593 if k.startswith(b'color.'):
594 594 ui._styles[k] = k[6:]
595 595 elif k.startswith(b'terminfo.'):
596 596 ui._styles[k] = k[9:]
597 597 ui.write(_(b'available colors:\n'))
598 598 # sort label with a '_' after the other to group '_background' entry.
599 599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 600 for colorname, label in items:
601 601 ui.write(b'%s\n' % colorname, label=label)
602 602
603 603
604 604 def _debugdisplaystyle(ui):
605 605 ui.write(_(b'available style:\n'))
606 606 if not ui._styles:
607 607 return
608 608 width = max(len(s) for s in ui._styles)
609 609 for label, effects in sorted(ui._styles.items()):
610 610 ui.write(b'%s' % label, label=label)
611 611 if effects:
612 612 # 50
613 613 ui.write(b': ')
614 614 ui.write(b' ' * (max(0, width - len(label))))
615 615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 616 ui.write(b'\n')
617 617
618 618
619 619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 620 def debugcreatestreamclonebundle(ui, repo, fname):
621 621 """create a stream clone bundle file
622 622
623 623 Stream bundles are special bundles that are essentially archives of
624 624 revlog files. They are commonly used for cloning very quickly.
625 625 """
626 626 # TODO we may want to turn this into an abort when this functionality
627 627 # is moved into `hg bundle`.
628 628 if phases.hassecret(repo):
629 629 ui.warn(
630 630 _(
631 631 b'(warning: stream clone bundle will contain secret '
632 632 b'revisions)\n'
633 633 )
634 634 )
635 635
636 636 requirements, gen = streamclone.generatebundlev1(repo)
637 637 changegroup.writechunks(ui, gen, fname)
638 638
639 639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 640
641 641
642 642 @command(
643 643 b'debugdag',
644 644 [
645 645 (b't', b'tags', None, _(b'use tags as labels')),
646 646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 647 (b'', b'dots', None, _(b'use dots for runs')),
648 648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 649 ],
650 650 _(b'[OPTION]... [FILE [REV]...]'),
651 651 optionalrepo=True,
652 652 )
653 653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 654 """format the changelog or an index DAG as a concise textual description
655 655
656 656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 657 revision numbers, they get labeled in the output as rN.
658 658
659 659 Otherwise, the changelog DAG of the current repo is emitted.
660 660 """
661 661 spaces = opts.get('spaces')
662 662 dots = opts.get('dots')
663 663 if file_:
664 664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 665 revs = {int(r) for r in revs}
666 666
667 667 def events():
668 668 for r in rlog:
669 669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 670 if r in revs:
671 671 yield b'l', (r, b"r%i" % r)
672 672
673 673 elif repo:
674 674 cl = repo.changelog
675 675 tags = opts.get('tags')
676 676 branches = opts.get('branches')
677 677 if tags:
678 678 labels = {}
679 679 for l, n in repo.tags().items():
680 680 labels.setdefault(cl.rev(n), []).append(l)
681 681
682 682 def events():
683 683 b = b"default"
684 684 for r in cl:
685 685 if branches:
686 686 newb = cl.read(cl.node(r))[5][b'branch']
687 687 if newb != b:
688 688 yield b'a', newb
689 689 b = newb
690 690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 691 if tags:
692 692 ls = labels.get(r)
693 693 if ls:
694 694 for l in ls:
695 695 yield b'l', (r, l)
696 696
697 697 else:
698 698 raise error.Abort(_(b'need repo for changelog dag'))
699 699
700 700 for line in dagparser.dagtextlines(
701 701 events(),
702 702 addspaces=spaces,
703 703 wraplabels=True,
704 704 wrapannotations=True,
705 705 wrapnonlinear=dots,
706 706 usedots=dots,
707 707 maxlinewidth=70,
708 708 ):
709 709 ui.write(line)
710 710 ui.write(b"\n")
711 711
712 712
713 713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 714 def debugdata(ui, repo, file_, rev=None, **opts):
715 715 """dump the contents of a data file revision"""
716 716 opts = pycompat.byteskwargs(opts)
717 717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 718 if rev is not None:
719 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 720 file_, rev = None, file_
721 721 elif rev is None:
722 722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 724 try:
725 725 ui.write(r.rawdata(r.lookup(rev)))
726 726 except KeyError:
727 727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 728
729 729
730 730 @command(
731 731 b'debugdate',
732 732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 733 _(b'[-e] DATE [RANGE]'),
734 734 norepo=True,
735 735 optionalrepo=True,
736 736 )
737 737 def debugdate(ui, date, range=None, **opts):
738 738 """parse and display a date"""
739 739 if opts["extended"]:
740 740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 741 else:
742 742 d = dateutil.parsedate(date)
743 743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 745 if range:
746 746 m = dateutil.matchdate(range)
747 747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 748
749 749
750 750 @command(
751 751 b'debugdeltachain',
752 752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 753 _(b'-c|-m|FILE'),
754 754 optionalrepo=True,
755 755 )
756 756 def debugdeltachain(ui, repo, file_=None, **opts):
757 757 """dump information about delta chains in a revlog
758 758
759 759 Output can be templatized. Available template keywords are:
760 760
761 761 :``rev``: revision number
762 762 :``p1``: parent 1 revision number (for reference)
763 763 :``p2``: parent 2 revision number (for reference)
764 764 :``chainid``: delta chain identifier (numbered by unique base)
765 765 :``chainlen``: delta chain length to this revision
766 766 :``prevrev``: previous revision in delta chain
767 767 :``deltatype``: role of delta / how it was computed
768 768 - base: a full snapshot
769 769 - snap: an intermediate snapshot
770 770 - p1: a delta against the first parent
771 771 - p2: a delta against the second parent
772 772 - skip1: a delta against the same base as p1
773 773 (when p1 has empty delta
774 774 - skip2: a delta against the same base as p2
775 775 (when p2 has empty delta
776 776 - prev: a delta against the previous revision
777 777 - other: a delta against an arbitrary revision
778 778 :``compsize``: compressed size of revision
779 779 :``uncompsize``: uncompressed size of revision
780 780 :``chainsize``: total size of compressed revisions in chain
781 781 :``chainratio``: total chain size divided by uncompressed revision size
782 782 (new delta chains typically start at ratio 2.00)
783 783 :``lindist``: linear distance from base revision in delta chain to end
784 784 of this revision
785 785 :``extradist``: total size of revisions not part of this delta chain from
786 786 base of delta chain to end of this revision; a measurement
787 787 of how much extra data we need to read/seek across to read
788 788 the delta chain for this revision
789 789 :``extraratio``: extradist divided by chainsize; another representation of
790 790 how much unrelated data is needed to load this delta chain
791 791
792 792 If the repository is configured to use the sparse read, additional keywords
793 793 are available:
794 794
795 795 :``readsize``: total size of data read from the disk for a revision
796 796 (sum of the sizes of all the blocks)
797 797 :``largestblock``: size of the largest block of data read from the disk
798 798 :``readdensity``: density of useful bytes in the data read from the disk
799 799 :``srchunks``: in how many data hunks the whole revision would be read
800 800
801 801 The sparse read can be enabled with experimental.sparse-read = True
802 802 """
803 803 opts = pycompat.byteskwargs(opts)
804 804 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
805 805 index = r.index
806 806 start = r.start
807 807 length = r.length
808 808 generaldelta = r._generaldelta
809 809 withsparseread = getattr(r, '_withsparseread', False)
810 810
811 811 # security to avoid crash on corrupted revlogs
812 812 total_revs = len(index)
813 813
814 814 def revinfo(rev):
815 815 e = index[rev]
816 816 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
817 817 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
818 818 chainsize = 0
819 819
820 820 base = e[revlog_constants.ENTRY_DELTA_BASE]
821 821 p1 = e[revlog_constants.ENTRY_PARENT_1]
822 822 p2 = e[revlog_constants.ENTRY_PARENT_2]
823 823
824 824 # If the parents of a revision has an empty delta, we never try to delta
825 825 # against that parent, but directly against the delta base of that
826 826 # parent (recursively). It avoids adding a useless entry in the chain.
827 827 #
828 828 # However we need to detect that as a special case for delta-type, that
829 829 # is not simply "other".
830 830 p1_base = p1
831 831 if p1 != nullrev and p1 < total_revs:
832 832 e1 = index[p1]
833 833 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
834 834 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
835 835 if (
836 836 new_base == p1_base
837 837 or new_base == nullrev
838 838 or new_base >= total_revs
839 839 ):
840 840 break
841 841 p1_base = new_base
842 842 e1 = index[p1_base]
843 843 p2_base = p2
844 844 if p2 != nullrev and p2 < total_revs:
845 845 e2 = index[p2]
846 846 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
847 847 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
848 848 if (
849 849 new_base == p2_base
850 850 or new_base == nullrev
851 851 or new_base >= total_revs
852 852 ):
853 853 break
854 854 p2_base = new_base
855 855 e2 = index[p2_base]
856 856
857 857 if generaldelta:
858 858 if base == p1:
859 859 deltatype = b'p1'
860 860 elif base == p2:
861 861 deltatype = b'p2'
862 862 elif base == rev:
863 863 deltatype = b'base'
864 864 elif base == p1_base:
865 865 deltatype = b'skip1'
866 866 elif base == p2_base:
867 867 deltatype = b'skip2'
868 868 elif r.issnapshot(rev):
869 869 deltatype = b'snap'
870 870 elif base == rev - 1:
871 871 deltatype = b'prev'
872 872 else:
873 873 deltatype = b'other'
874 874 else:
875 875 if base == rev:
876 876 deltatype = b'base'
877 877 else:
878 878 deltatype = b'prev'
879 879
880 880 chain = r._deltachain(rev)[0]
881 881 for iterrev in chain:
882 882 e = index[iterrev]
883 883 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
884 884
885 885 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
886 886
887 887 fm = ui.formatter(b'debugdeltachain', opts)
888 888
889 889 fm.plain(
890 890 b' rev p1 p2 chain# chainlen prev delta '
891 891 b'size rawsize chainsize ratio lindist extradist '
892 892 b'extraratio'
893 893 )
894 894 if withsparseread:
895 895 fm.plain(b' readsize largestblk rddensity srchunks')
896 896 fm.plain(b'\n')
897 897
898 898 chainbases = {}
899 899 for rev in r:
900 900 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 901 chainbase = chain[0]
902 902 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 903 basestart = start(chainbase)
904 904 revstart = start(rev)
905 905 lineardist = revstart + comp - basestart
906 906 extradist = lineardist - chainsize
907 907 try:
908 908 prevrev = chain[-2]
909 909 except IndexError:
910 910 prevrev = -1
911 911
912 912 if uncomp != 0:
913 913 chainratio = float(chainsize) / float(uncomp)
914 914 else:
915 915 chainratio = chainsize
916 916
917 917 if chainsize != 0:
918 918 extraratio = float(extradist) / float(chainsize)
919 919 else:
920 920 extraratio = extradist
921 921
922 922 fm.startitem()
923 923 fm.write(
924 924 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 925 b'uncompsize chainsize chainratio lindist extradist '
926 926 b'extraratio',
927 927 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 928 rev,
929 929 p1,
930 930 p2,
931 931 chainid,
932 932 len(chain),
933 933 prevrev,
934 934 deltatype,
935 935 comp,
936 936 uncomp,
937 937 chainsize,
938 938 chainratio,
939 939 lineardist,
940 940 extradist,
941 941 extraratio,
942 942 rev=rev,
943 943 chainid=chainid,
944 944 chainlen=len(chain),
945 945 prevrev=prevrev,
946 946 deltatype=deltatype,
947 947 compsize=comp,
948 948 uncompsize=uncomp,
949 949 chainsize=chainsize,
950 950 chainratio=chainratio,
951 951 lindist=lineardist,
952 952 extradist=extradist,
953 953 extraratio=extraratio,
954 954 )
955 955 if withsparseread:
956 956 readsize = 0
957 957 largestblock = 0
958 958 srchunks = 0
959 959
960 960 for revschunk in deltautil.slicechunk(r, chain):
961 961 srchunks += 1
962 962 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 963 blksize = blkend - start(revschunk[0])
964 964
965 965 readsize += blksize
966 966 if largestblock < blksize:
967 967 largestblock = blksize
968 968
969 969 if readsize:
970 970 readdensity = float(chainsize) / float(readsize)
971 971 else:
972 972 readdensity = 1
973 973
974 974 fm.write(
975 975 b'readsize largestblock readdensity srchunks',
976 976 b' %10d %10d %9.5f %8d',
977 977 readsize,
978 978 largestblock,
979 979 readdensity,
980 980 srchunks,
981 981 readsize=readsize,
982 982 largestblock=largestblock,
983 983 readdensity=readdensity,
984 984 srchunks=srchunks,
985 985 )
986 986
987 987 fm.plain(b'\n')
988 988
989 989 fm.end()
990 990
991 991
992 992 @command(
993 993 b'debug-delta-find',
994 994 cmdutil.debugrevlogopts
995 995 + cmdutil.formatteropts
996 996 + [
997 997 (
998 998 b'',
999 999 b'source',
1000 1000 b'full',
1001 1001 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1002 1002 ),
1003 1003 ],
1004 1004 _(b'-c|-m|FILE REV'),
1005 1005 optionalrepo=True,
1006 1006 )
1007 1007 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1008 1008 """display the computation to get to a valid delta for storing REV
1009 1009
1010 1010 This command will replay the process used to find the "best" delta to store
1011 1011 a revision and display information about all the steps used to get to that
1012 1012 result.
1013 1013
1014 1014 By default, the process is fed with a the full-text for the revision. This
1015 1015 can be controlled with the --source flag.
1016 1016
1017 1017 The revision use the revision number of the target storage (not changelog
1018 1018 revision number).
1019 1019
1020 1020 note: the process is initiated from a full text of the revision to store.
1021 1021 """
1022 1022 opts = pycompat.byteskwargs(opts)
1023 1023 if arg_2 is None:
1024 1024 file_ = None
1025 1025 rev = arg_1
1026 1026 else:
1027 1027 file_ = arg_1
1028 1028 rev = arg_2
1029 1029
1030 1030 rev = int(rev)
1031 1031
1032 1032 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1033 1033
1034 1034 deltacomputer = deltautil.deltacomputer(
1035 1035 revlog,
1036 1036 write_debug=ui.write,
1037 1037 debug_search=not ui.quiet,
1038 1038 )
1039 1039
1040 1040 node = revlog.node(rev)
1041 1041 p1r, p2r = revlog.parentrevs(rev)
1042 1042 p1 = revlog.node(p1r)
1043 1043 p2 = revlog.node(p2r)
1044 1044 full_text = revlog.revision(rev)
1045 1045 btext = [full_text]
1046 1046 textlen = len(btext[0])
1047 1047 cachedelta = None
1048 1048 flags = revlog.flags(rev)
1049 1049
1050 1050 if source != b'full':
1051 1051 if source == b'storage':
1052 1052 base_rev = revlog.deltaparent(rev)
1053 1053 elif source == b'p1':
1054 1054 base_rev = p1r
1055 1055 elif source == b'p2':
1056 1056 base_rev = p2r
1057 1057 elif source == b'prev':
1058 1058 base_rev = rev - 1
1059 1059 else:
1060 1060 raise error.InputError(b"invalid --source value: %s" % source)
1061 1061
1062 1062 if base_rev != nullrev:
1063 1063 base_text = revlog.revision(base_rev)
1064 1064 delta = mdiff.textdiff(base_text, full_text)
1065 1065
1066 1066 cachedelta = (base_rev, delta)
1067 1067 btext = [None]
1068 1068
1069 1069 revinfo = revlogutils.revisioninfo(
1070 1070 node,
1071 1071 p1,
1072 1072 p2,
1073 1073 btext,
1074 1074 textlen,
1075 1075 cachedelta,
1076 1076 flags,
1077 1077 )
1078 1078
1079 1079 fh = revlog._datafp()
1080 1080 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1081 1081
1082 1082
1083 1083 @command(
1084 1084 b'debugdirstate|debugstate',
1085 1085 [
1086 1086 (
1087 1087 b'',
1088 1088 b'nodates',
1089 1089 None,
1090 1090 _(b'do not display the saved mtime (DEPRECATED)'),
1091 1091 ),
1092 1092 (b'', b'dates', True, _(b'display the saved mtime')),
1093 1093 (b'', b'datesort', None, _(b'sort by saved mtime')),
1094 1094 (
1095 1095 b'',
1096 1096 b'docket',
1097 1097 False,
1098 1098 _(b'display the docket (metadata file) instead'),
1099 1099 ),
1100 1100 (
1101 1101 b'',
1102 1102 b'all',
1103 1103 False,
1104 1104 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1105 1105 ),
1106 1106 ],
1107 1107 _(b'[OPTION]...'),
1108 1108 )
1109 1109 def debugstate(ui, repo, **opts):
1110 1110 """show the contents of the current dirstate"""
1111 1111
1112 1112 if opts.get("docket"):
1113 1113 if not repo.dirstate._use_dirstate_v2:
1114 1114 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1115 1115
1116 1116 docket = repo.dirstate._map.docket
1117 1117 (
1118 1118 start_offset,
1119 1119 root_nodes,
1120 1120 nodes_with_entry,
1121 1121 nodes_with_copy,
1122 1122 unused_bytes,
1123 1123 _unused,
1124 1124 ignore_pattern,
1125 1125 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1126 1126
1127 1127 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1128 1128 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1129 1129 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1130 1130 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1131 1131 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1132 1132 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1133 1133 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1134 1134 ui.write(
1135 1135 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1136 1136 )
1137 1137 return
1138 1138
1139 1139 nodates = not opts['dates']
1140 1140 if opts.get('nodates') is not None:
1141 1141 nodates = True
1142 1142 datesort = opts.get('datesort')
1143 1143
1144 1144 if datesort:
1145 1145
1146 1146 def keyfunc(entry):
1147 1147 filename, _state, _mode, _size, mtime = entry
1148 1148 return (mtime, filename)
1149 1149
1150 1150 else:
1151 1151 keyfunc = None # sort by filename
1152 1152 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1153 1153 entries.sort(key=keyfunc)
1154 1154 for entry in entries:
1155 1155 filename, state, mode, size, mtime = entry
1156 1156 if mtime == -1:
1157 1157 timestr = b'unset '
1158 1158 elif nodates:
1159 1159 timestr = b'set '
1160 1160 else:
1161 1161 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1162 1162 timestr = encoding.strtolocal(timestr)
1163 1163 if mode & 0o20000:
1164 1164 mode = b'lnk'
1165 1165 else:
1166 1166 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1167 1167 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1168 1168 for f in repo.dirstate.copies():
1169 1169 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1170 1170
1171 1171
1172 1172 @command(
1173 1173 b'debugdirstateignorepatternshash',
1174 1174 [],
1175 1175 _(b''),
1176 1176 )
1177 1177 def debugdirstateignorepatternshash(ui, repo, **opts):
1178 1178 """show the hash of ignore patterns stored in dirstate if v2,
1179 1179 or nothing for dirstate-v2
1180 1180 """
1181 1181 if repo.dirstate._use_dirstate_v2:
1182 1182 docket = repo.dirstate._map.docket
1183 1183 hash_len = 20 # 160 bits for SHA-1
1184 1184 hash_bytes = docket.tree_metadata[-hash_len:]
1185 1185 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1186 1186
1187 1187
1188 1188 @command(
1189 1189 b'debugdiscovery',
1190 1190 [
1191 1191 (b'', b'old', None, _(b'use old-style discovery')),
1192 1192 (
1193 1193 b'',
1194 1194 b'nonheads',
1195 1195 None,
1196 1196 _(b'use old-style discovery with non-heads included'),
1197 1197 ),
1198 1198 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1199 1199 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1200 1200 (
1201 1201 b'',
1202 1202 b'local-as-revs',
1203 1203 b"",
1204 1204 b'treat local has having these revisions only',
1205 1205 ),
1206 1206 (
1207 1207 b'',
1208 1208 b'remote-as-revs',
1209 1209 b"",
1210 1210 b'use local as remote, with only these revisions',
1211 1211 ),
1212 1212 ]
1213 1213 + cmdutil.remoteopts
1214 1214 + cmdutil.formatteropts,
1215 1215 _(b'[--rev REV] [OTHER]'),
1216 1216 )
1217 1217 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1218 1218 """runs the changeset discovery protocol in isolation
1219 1219
1220 1220 The local peer can be "replaced" by a subset of the local repository by
1221 1221 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1222 1222 can be "replaced" by a subset of the local repository using the
1223 1223 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1224 1224 discovery situations.
1225 1225
1226 1226 The following developer oriented config are relevant for people playing with this command:
1227 1227
1228 1228 * devel.discovery.exchange-heads=True
1229 1229
1230 1230 If False, the discovery will not start with
1231 1231 remote head fetching and local head querying.
1232 1232
1233 1233 * devel.discovery.grow-sample=True
1234 1234
1235 1235 If False, the sample size used in set discovery will not be increased
1236 1236 through the process
1237 1237
1238 1238 * devel.discovery.grow-sample.dynamic=True
1239 1239
1240 1240 When discovery.grow-sample.dynamic is True, the default, the sample size is
1241 1241 adapted to the shape of the undecided set (it is set to the max of:
1242 1242 <target-size>, len(roots(undecided)), len(heads(undecided)
1243 1243
1244 1244 * devel.discovery.grow-sample.rate=1.05
1245 1245
1246 1246 the rate at which the sample grow
1247 1247
1248 1248 * devel.discovery.randomize=True
1249 1249
1250 1250 If andom sampling during discovery are deterministic. It is meant for
1251 1251 integration tests.
1252 1252
1253 1253 * devel.discovery.sample-size=200
1254 1254
1255 1255 Control the initial size of the discovery sample
1256 1256
1257 1257 * devel.discovery.sample-size.initial=100
1258 1258
1259 1259 Control the initial size of the discovery for initial change
1260 1260 """
1261 1261 opts = pycompat.byteskwargs(opts)
1262 1262 unfi = repo.unfiltered()
1263 1263
1264 1264 # setup potential extra filtering
1265 1265 local_revs = opts[b"local_as_revs"]
1266 1266 remote_revs = opts[b"remote_as_revs"]
1267 1267
1268 1268 # make sure tests are repeatable
1269 1269 random.seed(int(opts[b'seed']))
1270 1270
1271 1271 if not remote_revs:
1272 1272
1273 1273 remoteurl, branches = urlutil.get_unique_pull_path(
1274 1274 b'debugdiscovery', repo, ui, remoteurl
1275 1275 )
1276 1276 remote = hg.peer(repo, opts, remoteurl)
1277 1277 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1278 1278 else:
1279 1279 branches = (None, [])
1280 1280 remote_filtered_revs = logcmdutil.revrange(
1281 1281 unfi, [b"not (::(%s))" % remote_revs]
1282 1282 )
1283 1283 remote_filtered_revs = frozenset(remote_filtered_revs)
1284 1284
1285 1285 def remote_func(x):
1286 1286 return remote_filtered_revs
1287 1287
1288 1288 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1289 1289
1290 1290 remote = repo.peer()
1291 1291 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1292 1292
1293 1293 if local_revs:
1294 1294 local_filtered_revs = logcmdutil.revrange(
1295 1295 unfi, [b"not (::(%s))" % local_revs]
1296 1296 )
1297 1297 local_filtered_revs = frozenset(local_filtered_revs)
1298 1298
1299 1299 def local_func(x):
1300 1300 return local_filtered_revs
1301 1301
1302 1302 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1303 1303 repo = repo.filtered(b'debug-discovery-local-filter')
1304 1304
1305 1305 data = {}
1306 1306 if opts.get(b'old'):
1307 1307
1308 1308 def doit(pushedrevs, remoteheads, remote=remote):
1309 1309 if not util.safehasattr(remote, b'branches'):
1310 1310 # enable in-client legacy support
1311 1311 remote = localrepo.locallegacypeer(remote.local())
1312 1312 if remote_revs:
1313 1313 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1314 1314 remote._repo = r
1315 1315 common, _in, hds = treediscovery.findcommonincoming(
1316 1316 repo, remote, force=True, audit=data
1317 1317 )
1318 1318 common = set(common)
1319 1319 if not opts.get(b'nonheads'):
1320 1320 ui.writenoi18n(
1321 1321 b"unpruned common: %s\n"
1322 1322 % b" ".join(sorted(short(n) for n in common))
1323 1323 )
1324 1324
1325 1325 clnode = repo.changelog.node
1326 1326 common = repo.revs(b'heads(::%ln)', common)
1327 1327 common = {clnode(r) for r in common}
1328 1328 return common, hds
1329 1329
1330 1330 else:
1331 1331
1332 1332 def doit(pushedrevs, remoteheads, remote=remote):
1333 1333 nodes = None
1334 1334 if pushedrevs:
1335 1335 revs = logcmdutil.revrange(repo, pushedrevs)
1336 1336 nodes = [repo[r].node() for r in revs]
1337 1337 common, any, hds = setdiscovery.findcommonheads(
1338 1338 ui,
1339 1339 repo,
1340 1340 remote,
1341 1341 ancestorsof=nodes,
1342 1342 audit=data,
1343 1343 abortwhenunrelated=False,
1344 1344 )
1345 1345 return common, hds
1346 1346
1347 1347 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1348 1348 localrevs = opts[b'rev']
1349 1349
1350 1350 fm = ui.formatter(b'debugdiscovery', opts)
1351 1351 if fm.strict_format:
1352 1352
1353 1353 @contextlib.contextmanager
1354 1354 def may_capture_output():
1355 1355 ui.pushbuffer()
1356 1356 yield
1357 1357 data[b'output'] = ui.popbuffer()
1358 1358
1359 1359 else:
1360 1360 may_capture_output = util.nullcontextmanager
1361 1361 with may_capture_output():
1362 1362 with util.timedcm('debug-discovery') as t:
1363 1363 common, hds = doit(localrevs, remoterevs)
1364 1364
1365 1365 # compute all statistics
1366 1366 if len(common) == 1 and repo.nullid in common:
1367 1367 common = set()
1368 1368 heads_common = set(common)
1369 1369 heads_remote = set(hds)
1370 1370 heads_local = set(repo.heads())
1371 1371 # note: they cannot be a local or remote head that is in common and not
1372 1372 # itself a head of common.
1373 1373 heads_common_local = heads_common & heads_local
1374 1374 heads_common_remote = heads_common & heads_remote
1375 1375 heads_common_both = heads_common & heads_remote & heads_local
1376 1376
1377 1377 all = repo.revs(b'all()')
1378 1378 common = repo.revs(b'::%ln', common)
1379 1379 roots_common = repo.revs(b'roots(::%ld)', common)
1380 1380 missing = repo.revs(b'not ::%ld', common)
1381 1381 heads_missing = repo.revs(b'heads(%ld)', missing)
1382 1382 roots_missing = repo.revs(b'roots(%ld)', missing)
1383 1383 assert len(common) + len(missing) == len(all)
1384 1384
1385 1385 initial_undecided = repo.revs(
1386 1386 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1387 1387 )
1388 1388 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1389 1389 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1390 1390 common_initial_undecided = initial_undecided & common
1391 1391 missing_initial_undecided = initial_undecided & missing
1392 1392
1393 1393 data[b'elapsed'] = t.elapsed
1394 1394 data[b'nb-common-heads'] = len(heads_common)
1395 1395 data[b'nb-common-heads-local'] = len(heads_common_local)
1396 1396 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1397 1397 data[b'nb-common-heads-both'] = len(heads_common_both)
1398 1398 data[b'nb-common-roots'] = len(roots_common)
1399 1399 data[b'nb-head-local'] = len(heads_local)
1400 1400 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1401 1401 data[b'nb-head-remote'] = len(heads_remote)
1402 1402 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1403 1403 heads_common_remote
1404 1404 )
1405 1405 data[b'nb-revs'] = len(all)
1406 1406 data[b'nb-revs-common'] = len(common)
1407 1407 data[b'nb-revs-missing'] = len(missing)
1408 1408 data[b'nb-missing-heads'] = len(heads_missing)
1409 1409 data[b'nb-missing-roots'] = len(roots_missing)
1410 1410 data[b'nb-ini_und'] = len(initial_undecided)
1411 1411 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1412 1412 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1413 1413 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1414 1414 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1415 1415
1416 1416 fm.startitem()
1417 1417 fm.data(**pycompat.strkwargs(data))
1418 1418 # display discovery summary
1419 1419 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1420 1420 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1421 1421 if b'total-round-trips-heads' in data:
1422 1422 fm.plain(
1423 1423 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1424 1424 )
1425 1425 if b'total-round-trips-branches' in data:
1426 1426 fm.plain(
1427 1427 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1428 1428 % data
1429 1429 )
1430 1430 if b'total-round-trips-between' in data:
1431 1431 fm.plain(
1432 1432 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1433 1433 )
1434 1434 fm.plain(b"queries: %(total-queries)9d\n" % data)
1435 1435 if b'total-queries-branches' in data:
1436 1436 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1437 1437 if b'total-queries-between' in data:
1438 1438 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1439 1439 fm.plain(b"heads summary:\n")
1440 1440 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1441 1441 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1442 1442 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1443 1443 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1444 1444 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1445 1445 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1446 1446 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1447 1447 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1448 1448 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1449 1449 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1450 1450 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1451 1451 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1452 1452 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1453 1453 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1454 1454 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1455 1455 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1456 1456 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1457 1457 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1458 1458 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1459 1459 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1460 1460 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1461 1461 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1462 1462
1463 1463 if ui.verbose:
1464 1464 fm.plain(
1465 1465 b"common heads: %s\n"
1466 1466 % b" ".join(sorted(short(n) for n in heads_common))
1467 1467 )
1468 1468 fm.end()
1469 1469
1470 1470
1471 1471 _chunksize = 4 << 10
1472 1472
1473 1473
1474 1474 @command(
1475 1475 b'debugdownload',
1476 1476 [
1477 1477 (b'o', b'output', b'', _(b'path')),
1478 1478 ],
1479 1479 optionalrepo=True,
1480 1480 )
1481 1481 def debugdownload(ui, repo, url, output=None, **opts):
1482 1482 """download a resource using Mercurial logic and config"""
1483 1483 fh = urlmod.open(ui, url, output)
1484 1484
1485 1485 dest = ui
1486 1486 if output:
1487 1487 dest = open(output, b"wb", _chunksize)
1488 1488 try:
1489 1489 data = fh.read(_chunksize)
1490 1490 while data:
1491 1491 dest.write(data)
1492 1492 data = fh.read(_chunksize)
1493 1493 finally:
1494 1494 if output:
1495 1495 dest.close()
1496 1496
1497 1497
1498 1498 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1499 1499 def debugextensions(ui, repo, **opts):
1500 1500 '''show information about active extensions'''
1501 1501 opts = pycompat.byteskwargs(opts)
1502 1502 exts = extensions.extensions(ui)
1503 1503 hgver = util.version()
1504 1504 fm = ui.formatter(b'debugextensions', opts)
1505 1505 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1506 1506 isinternal = extensions.ismoduleinternal(extmod)
1507 1507 extsource = None
1508 1508
1509 1509 if util.safehasattr(extmod, '__file__'):
1510 1510 extsource = pycompat.fsencode(extmod.__file__)
1511 1511 elif getattr(sys, 'oxidized', False):
1512 1512 extsource = pycompat.sysexecutable
1513 1513 if isinternal:
1514 1514 exttestedwith = [] # never expose magic string to users
1515 1515 else:
1516 1516 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1517 1517 extbuglink = getattr(extmod, 'buglink', None)
1518 1518
1519 1519 fm.startitem()
1520 1520
1521 1521 if ui.quiet or ui.verbose:
1522 1522 fm.write(b'name', b'%s\n', extname)
1523 1523 else:
1524 1524 fm.write(b'name', b'%s', extname)
1525 1525 if isinternal or hgver in exttestedwith:
1526 1526 fm.plain(b'\n')
1527 1527 elif not exttestedwith:
1528 1528 fm.plain(_(b' (untested!)\n'))
1529 1529 else:
1530 1530 lasttestedversion = exttestedwith[-1]
1531 1531 fm.plain(b' (%s!)\n' % lasttestedversion)
1532 1532
1533 1533 fm.condwrite(
1534 1534 ui.verbose and extsource,
1535 1535 b'source',
1536 1536 _(b' location: %s\n'),
1537 1537 extsource or b"",
1538 1538 )
1539 1539
1540 1540 if ui.verbose:
1541 1541 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1542 1542 fm.data(bundled=isinternal)
1543 1543
1544 1544 fm.condwrite(
1545 1545 ui.verbose and exttestedwith,
1546 1546 b'testedwith',
1547 1547 _(b' tested with: %s\n'),
1548 1548 fm.formatlist(exttestedwith, name=b'ver'),
1549 1549 )
1550 1550
1551 1551 fm.condwrite(
1552 1552 ui.verbose and extbuglink,
1553 1553 b'buglink',
1554 1554 _(b' bug reporting: %s\n'),
1555 1555 extbuglink or b"",
1556 1556 )
1557 1557
1558 1558 fm.end()
1559 1559
1560 1560
1561 1561 @command(
1562 1562 b'debugfileset',
1563 1563 [
1564 1564 (
1565 1565 b'r',
1566 1566 b'rev',
1567 1567 b'',
1568 1568 _(b'apply the filespec on this revision'),
1569 1569 _(b'REV'),
1570 1570 ),
1571 1571 (
1572 1572 b'',
1573 1573 b'all-files',
1574 1574 False,
1575 1575 _(b'test files from all revisions and working directory'),
1576 1576 ),
1577 1577 (
1578 1578 b's',
1579 1579 b'show-matcher',
1580 1580 None,
1581 1581 _(b'print internal representation of matcher'),
1582 1582 ),
1583 1583 (
1584 1584 b'p',
1585 1585 b'show-stage',
1586 1586 [],
1587 1587 _(b'print parsed tree at the given stage'),
1588 1588 _(b'NAME'),
1589 1589 ),
1590 1590 ],
1591 1591 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1592 1592 )
1593 1593 def debugfileset(ui, repo, expr, **opts):
1594 1594 '''parse and apply a fileset specification'''
1595 1595 from . import fileset
1596 1596
1597 1597 fileset.symbols # force import of fileset so we have predicates to optimize
1598 1598 opts = pycompat.byteskwargs(opts)
1599 1599 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1600 1600
1601 1601 stages = [
1602 1602 (b'parsed', pycompat.identity),
1603 1603 (b'analyzed', filesetlang.analyze),
1604 1604 (b'optimized', filesetlang.optimize),
1605 1605 ]
1606 1606 stagenames = {n for n, f in stages}
1607 1607
1608 1608 showalways = set()
1609 1609 if ui.verbose and not opts[b'show_stage']:
1610 1610 # show parsed tree by --verbose (deprecated)
1611 1611 showalways.add(b'parsed')
1612 1612 if opts[b'show_stage'] == [b'all']:
1613 1613 showalways.update(stagenames)
1614 1614 else:
1615 1615 for n in opts[b'show_stage']:
1616 1616 if n not in stagenames:
1617 1617 raise error.Abort(_(b'invalid stage name: %s') % n)
1618 1618 showalways.update(opts[b'show_stage'])
1619 1619
1620 1620 tree = filesetlang.parse(expr)
1621 1621 for n, f in stages:
1622 1622 tree = f(tree)
1623 1623 if n in showalways:
1624 1624 if opts[b'show_stage'] or n != b'parsed':
1625 1625 ui.write(b"* %s:\n" % n)
1626 1626 ui.write(filesetlang.prettyformat(tree), b"\n")
1627 1627
1628 1628 files = set()
1629 1629 if opts[b'all_files']:
1630 1630 for r in repo:
1631 1631 c = repo[r]
1632 1632 files.update(c.files())
1633 1633 files.update(c.substate)
1634 1634 if opts[b'all_files'] or ctx.rev() is None:
1635 1635 wctx = repo[None]
1636 1636 files.update(
1637 1637 repo.dirstate.walk(
1638 1638 scmutil.matchall(repo),
1639 1639 subrepos=list(wctx.substate),
1640 1640 unknown=True,
1641 1641 ignored=True,
1642 1642 )
1643 1643 )
1644 1644 files.update(wctx.substate)
1645 1645 else:
1646 1646 files.update(ctx.files())
1647 1647 files.update(ctx.substate)
1648 1648
1649 1649 m = ctx.matchfileset(repo.getcwd(), expr)
1650 1650 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1651 1651 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1652 1652 for f in sorted(files):
1653 1653 if not m(f):
1654 1654 continue
1655 1655 ui.write(b"%s\n" % f)
1656 1656
1657 1657
1658 1658 @command(
1659 1659 b"debug-repair-issue6528",
1660 1660 [
1661 1661 (
1662 1662 b'',
1663 1663 b'to-report',
1664 1664 b'',
1665 1665 _(b'build a report of affected revisions to this file'),
1666 1666 _(b'FILE'),
1667 1667 ),
1668 1668 (
1669 1669 b'',
1670 1670 b'from-report',
1671 1671 b'',
1672 1672 _(b'repair revisions listed in this report file'),
1673 1673 _(b'FILE'),
1674 1674 ),
1675 1675 (
1676 1676 b'',
1677 1677 b'paranoid',
1678 1678 False,
1679 1679 _(b'check that both detection methods do the same thing'),
1680 1680 ),
1681 1681 ]
1682 1682 + cmdutil.dryrunopts,
1683 1683 )
1684 1684 def debug_repair_issue6528(ui, repo, **opts):
1685 1685 """find affected revisions and repair them. See issue6528 for more details.
1686 1686
1687 1687 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1688 1688 computation of affected revisions for a given repository across clones.
1689 1689 The report format is line-based (with empty lines ignored):
1690 1690
1691 1691 ```
1692 1692 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1693 1693 ```
1694 1694
1695 1695 There can be multiple broken revisions per filelog, they are separated by
1696 1696 a comma with no spaces. The only space is between the revision(s) and the
1697 1697 filename.
1698 1698
1699 1699 Note that this does *not* mean that this repairs future affected revisions,
1700 1700 that needs a separate fix at the exchange level that was introduced in
1701 1701 Mercurial 5.9.1.
1702 1702
1703 1703 There is a `--paranoid` flag to test that the fast implementation is correct
1704 1704 by checking it against the slow implementation. Since this matter is quite
1705 1705 urgent and testing every edge-case is probably quite costly, we use this
1706 1706 method to test on large repositories as a fuzzing method of sorts.
1707 1707 """
1708 1708 cmdutil.check_incompatible_arguments(
1709 1709 opts, 'to_report', ['from_report', 'dry_run']
1710 1710 )
1711 1711 dry_run = opts.get('dry_run')
1712 1712 to_report = opts.get('to_report')
1713 1713 from_report = opts.get('from_report')
1714 1714 paranoid = opts.get('paranoid')
1715 1715 # TODO maybe add filelog pattern and revision pattern parameters to help
1716 1716 # narrow down the search for users that know what they're looking for?
1717 1717
1718 1718 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1719 1719 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1720 1720 raise error.Abort(_(msg))
1721 1721
1722 1722 rewrite.repair_issue6528(
1723 1723 ui,
1724 1724 repo,
1725 1725 dry_run=dry_run,
1726 1726 to_report=to_report,
1727 1727 from_report=from_report,
1728 1728 paranoid=paranoid,
1729 1729 )
1730 1730
1731 1731
1732 1732 @command(b'debugformat', [] + cmdutil.formatteropts)
1733 1733 def debugformat(ui, repo, **opts):
1734 1734 """display format information about the current repository
1735 1735
1736 1736 Use --verbose to get extra information about current config value and
1737 1737 Mercurial default."""
1738 1738 opts = pycompat.byteskwargs(opts)
1739 1739 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1740 1740 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1741 1741
1742 1742 def makeformatname(name):
1743 1743 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1744 1744
1745 1745 fm = ui.formatter(b'debugformat', opts)
1746 1746 if fm.isplain():
1747 1747
1748 1748 def formatvalue(value):
1749 1749 if util.safehasattr(value, b'startswith'):
1750 1750 return value
1751 1751 if value:
1752 1752 return b'yes'
1753 1753 else:
1754 1754 return b'no'
1755 1755
1756 1756 else:
1757 1757 formatvalue = pycompat.identity
1758 1758
1759 1759 fm.plain(b'format-variant')
1760 1760 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1761 1761 fm.plain(b' repo')
1762 1762 if ui.verbose:
1763 1763 fm.plain(b' config default')
1764 1764 fm.plain(b'\n')
1765 1765 for fv in upgrade.allformatvariant:
1766 1766 fm.startitem()
1767 1767 repovalue = fv.fromrepo(repo)
1768 1768 configvalue = fv.fromconfig(repo)
1769 1769
1770 1770 if repovalue != configvalue:
1771 1771 namelabel = b'formatvariant.name.mismatchconfig'
1772 1772 repolabel = b'formatvariant.repo.mismatchconfig'
1773 1773 elif repovalue != fv.default:
1774 1774 namelabel = b'formatvariant.name.mismatchdefault'
1775 1775 repolabel = b'formatvariant.repo.mismatchdefault'
1776 1776 else:
1777 1777 namelabel = b'formatvariant.name.uptodate'
1778 1778 repolabel = b'formatvariant.repo.uptodate'
1779 1779
1780 1780 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1781 1781 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1782 1782 if fv.default != configvalue:
1783 1783 configlabel = b'formatvariant.config.special'
1784 1784 else:
1785 1785 configlabel = b'formatvariant.config.default'
1786 1786 fm.condwrite(
1787 1787 ui.verbose,
1788 1788 b'config',
1789 1789 b' %6s',
1790 1790 formatvalue(configvalue),
1791 1791 label=configlabel,
1792 1792 )
1793 1793 fm.condwrite(
1794 1794 ui.verbose,
1795 1795 b'default',
1796 1796 b' %7s',
1797 1797 formatvalue(fv.default),
1798 1798 label=b'formatvariant.default',
1799 1799 )
1800 1800 fm.plain(b'\n')
1801 1801 fm.end()
1802 1802
1803 1803
1804 1804 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1805 1805 def debugfsinfo(ui, path=b"."):
1806 1806 """show information detected about current filesystem"""
1807 1807 ui.writenoi18n(b'path: %s\n' % path)
1808 1808 ui.writenoi18n(
1809 1809 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1810 1810 )
1811 1811 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1812 1812 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1813 1813 ui.writenoi18n(
1814 1814 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1815 1815 )
1816 1816 ui.writenoi18n(
1817 1817 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1818 1818 )
1819 1819 casesensitive = b'(unknown)'
1820 1820 try:
1821 1821 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1822 1822 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1823 1823 except OSError:
1824 1824 pass
1825 1825 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1826 1826
1827 1827
1828 1828 @command(
1829 1829 b'debuggetbundle',
1830 1830 [
1831 1831 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1832 1832 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1833 1833 (
1834 1834 b't',
1835 1835 b'type',
1836 1836 b'bzip2',
1837 1837 _(b'bundle compression type to use'),
1838 1838 _(b'TYPE'),
1839 1839 ),
1840 1840 ],
1841 1841 _(b'REPO FILE [-H|-C ID]...'),
1842 1842 norepo=True,
1843 1843 )
1844 1844 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1845 1845 """retrieves a bundle from a repo
1846 1846
1847 1847 Every ID must be a full-length hex node id string. Saves the bundle to the
1848 1848 given file.
1849 1849 """
1850 1850 opts = pycompat.byteskwargs(opts)
1851 1851 repo = hg.peer(ui, opts, repopath)
1852 1852 if not repo.capable(b'getbundle'):
1853 1853 raise error.Abort(b"getbundle() not supported by target repository")
1854 1854 args = {}
1855 1855 if common:
1856 1856 args['common'] = [bin(s) for s in common]
1857 1857 if head:
1858 1858 args['heads'] = [bin(s) for s in head]
1859 1859 # TODO: get desired bundlecaps from command line.
1860 1860 args['bundlecaps'] = None
1861 1861 bundle = repo.getbundle(b'debug', **args)
1862 1862
1863 1863 bundletype = opts.get(b'type', b'bzip2').lower()
1864 1864 btypes = {
1865 1865 b'none': b'HG10UN',
1866 1866 b'bzip2': b'HG10BZ',
1867 1867 b'gzip': b'HG10GZ',
1868 1868 b'bundle2': b'HG20',
1869 1869 }
1870 1870 bundletype = btypes.get(bundletype)
1871 1871 if bundletype not in bundle2.bundletypes:
1872 1872 raise error.Abort(_(b'unknown bundle type specified with --type'))
1873 1873 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1874 1874
1875 1875
1876 1876 @command(b'debugignore', [], b'[FILE]')
1877 1877 def debugignore(ui, repo, *files, **opts):
1878 1878 """display the combined ignore pattern and information about ignored files
1879 1879
1880 1880 With no argument display the combined ignore pattern.
1881 1881
1882 1882 Given space separated file names, shows if the given file is ignored and
1883 1883 if so, show the ignore rule (file and line number) that matched it.
1884 1884 """
1885 1885 ignore = repo.dirstate._ignore
1886 1886 if not files:
1887 1887 # Show all the patterns
1888 1888 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1889 1889 else:
1890 1890 m = scmutil.match(repo[None], pats=files)
1891 1891 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1892 1892 for f in m.files():
1893 1893 nf = util.normpath(f)
1894 1894 ignored = None
1895 1895 ignoredata = None
1896 1896 if nf != b'.':
1897 1897 if ignore(nf):
1898 1898 ignored = nf
1899 1899 ignoredata = repo.dirstate._ignorefileandline(nf)
1900 1900 else:
1901 1901 for p in pathutil.finddirs(nf):
1902 1902 if ignore(p):
1903 1903 ignored = p
1904 1904 ignoredata = repo.dirstate._ignorefileandline(p)
1905 1905 break
1906 1906 if ignored:
1907 1907 if ignored == nf:
1908 1908 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1909 1909 else:
1910 1910 ui.write(
1911 1911 _(
1912 1912 b"%s is ignored because of "
1913 1913 b"containing directory %s\n"
1914 1914 )
1915 1915 % (uipathfn(f), ignored)
1916 1916 )
1917 1917 ignorefile, lineno, line = ignoredata
1918 1918 ui.write(
1919 1919 _(b"(ignore rule in %s, line %d: '%s')\n")
1920 1920 % (ignorefile, lineno, line)
1921 1921 )
1922 1922 else:
1923 1923 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1924 1924
1925 1925
1926 1926 @command(
1927 1927 b'debug-revlog-index|debugindex',
1928 1928 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1929 1929 _(b'-c|-m|FILE'),
1930 1930 )
1931 1931 def debugindex(ui, repo, file_=None, **opts):
1932 1932 """dump index data for a revlog"""
1933 1933 opts = pycompat.byteskwargs(opts)
1934 1934 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1935 1935
1936 1936 fm = ui.formatter(b'debugindex', opts)
1937 1937
1938 1938 revlog = getattr(store, b'_revlog', store)
1939 1939
1940 1940 return revlog_debug.debug_index(
1941 1941 ui,
1942 1942 repo,
1943 1943 formatter=fm,
1944 1944 revlog=revlog,
1945 1945 full_node=ui.debugflag,
1946 1946 )
1947 1947
1948 1948
1949 1949 @command(
1950 1950 b'debugindexdot',
1951 1951 cmdutil.debugrevlogopts,
1952 1952 _(b'-c|-m|FILE'),
1953 1953 optionalrepo=True,
1954 1954 )
1955 1955 def debugindexdot(ui, repo, file_=None, **opts):
1956 1956 """dump an index DAG as a graphviz dot file"""
1957 1957 opts = pycompat.byteskwargs(opts)
1958 1958 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1959 1959 ui.writenoi18n(b"digraph G {\n")
1960 1960 for i in r:
1961 1961 node = r.node(i)
1962 1962 pp = r.parents(node)
1963 1963 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1964 1964 if pp[1] != repo.nullid:
1965 1965 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1966 1966 ui.write(b"}\n")
1967 1967
1968 1968
1969 1969 @command(b'debugindexstats', [])
1970 1970 def debugindexstats(ui, repo):
1971 1971 """show stats related to the changelog index"""
1972 1972 repo.changelog.shortest(repo.nullid, 1)
1973 1973 index = repo.changelog.index
1974 1974 if not util.safehasattr(index, b'stats'):
1975 1975 raise error.Abort(_(b'debugindexstats only works with native code'))
1976 1976 for k, v in sorted(index.stats().items()):
1977 1977 ui.write(b'%s: %d\n' % (k, v))
1978 1978
1979 1979
1980 1980 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1981 1981 def debuginstall(ui, **opts):
1982 1982 """test Mercurial installation
1983 1983
1984 1984 Returns 0 on success.
1985 1985 """
1986 1986 opts = pycompat.byteskwargs(opts)
1987 1987
1988 1988 problems = 0
1989 1989
1990 1990 fm = ui.formatter(b'debuginstall', opts)
1991 1991 fm.startitem()
1992 1992
1993 1993 # encoding might be unknown or wrong. don't translate these messages.
1994 1994 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1995 1995 err = None
1996 1996 try:
1997 1997 codecs.lookup(pycompat.sysstr(encoding.encoding))
1998 1998 except LookupError as inst:
1999 1999 err = stringutil.forcebytestr(inst)
2000 2000 problems += 1
2001 2001 fm.condwrite(
2002 2002 err,
2003 2003 b'encodingerror',
2004 2004 b" %s\n (check that your locale is properly set)\n",
2005 2005 err,
2006 2006 )
2007 2007
2008 2008 # Python
2009 2009 pythonlib = None
2010 2010 if util.safehasattr(os, '__file__'):
2011 2011 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
2012 2012 elif getattr(sys, 'oxidized', False):
2013 2013 pythonlib = pycompat.sysexecutable
2014 2014
2015 2015 fm.write(
2016 2016 b'pythonexe',
2017 2017 _(b"checking Python executable (%s)\n"),
2018 2018 pycompat.sysexecutable or _(b"unknown"),
2019 2019 )
2020 2020 fm.write(
2021 2021 b'pythonimplementation',
2022 2022 _(b"checking Python implementation (%s)\n"),
2023 2023 pycompat.sysbytes(platform.python_implementation()),
2024 2024 )
2025 2025 fm.write(
2026 2026 b'pythonver',
2027 2027 _(b"checking Python version (%s)\n"),
2028 2028 (b"%d.%d.%d" % sys.version_info[:3]),
2029 2029 )
2030 2030 fm.write(
2031 2031 b'pythonlib',
2032 2032 _(b"checking Python lib (%s)...\n"),
2033 2033 pythonlib or _(b"unknown"),
2034 2034 )
2035 2035
2036 2036 try:
2037 2037 from . import rustext # pytype: disable=import-error
2038 2038
2039 2039 rustext.__doc__ # trigger lazy import
2040 2040 except ImportError:
2041 2041 rustext = None
2042 2042
2043 2043 security = set(sslutil.supportedprotocols)
2044 2044 if sslutil.hassni:
2045 2045 security.add(b'sni')
2046 2046
2047 2047 fm.write(
2048 2048 b'pythonsecurity',
2049 2049 _(b"checking Python security support (%s)\n"),
2050 2050 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2051 2051 )
2052 2052
2053 2053 # These are warnings, not errors. So don't increment problem count. This
2054 2054 # may change in the future.
2055 2055 if b'tls1.2' not in security:
2056 2056 fm.plain(
2057 2057 _(
2058 2058 b' TLS 1.2 not supported by Python install; '
2059 2059 b'network connections lack modern security\n'
2060 2060 )
2061 2061 )
2062 2062 if b'sni' not in security:
2063 2063 fm.plain(
2064 2064 _(
2065 2065 b' SNI not supported by Python install; may have '
2066 2066 b'connectivity issues with some servers\n'
2067 2067 )
2068 2068 )
2069 2069
2070 2070 fm.plain(
2071 2071 _(
2072 2072 b"checking Rust extensions (%s)\n"
2073 2073 % (b'missing' if rustext is None else b'installed')
2074 2074 ),
2075 2075 )
2076 2076
2077 2077 # TODO print CA cert info
2078 2078
2079 2079 # hg version
2080 2080 hgver = util.version()
2081 2081 fm.write(
2082 2082 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2083 2083 )
2084 2084 fm.write(
2085 2085 b'hgverextra',
2086 2086 _(b"checking Mercurial custom build (%s)\n"),
2087 2087 b'+'.join(hgver.split(b'+')[1:]),
2088 2088 )
2089 2089
2090 2090 # compiled modules
2091 2091 hgmodules = None
2092 2092 if util.safehasattr(sys.modules[__name__], '__file__'):
2093 2093 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2094 2094 elif getattr(sys, 'oxidized', False):
2095 2095 hgmodules = pycompat.sysexecutable
2096 2096
2097 2097 fm.write(
2098 2098 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2099 2099 )
2100 2100 fm.write(
2101 2101 b'hgmodules',
2102 2102 _(b"checking installed modules (%s)...\n"),
2103 2103 hgmodules or _(b"unknown"),
2104 2104 )
2105 2105
2106 2106 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2107 2107 rustext = rustandc # for now, that's the only case
2108 2108 cext = policy.policy in (b'c', b'allow') or rustandc
2109 2109 nopure = cext or rustext
2110 2110 if nopure:
2111 2111 err = None
2112 2112 try:
2113 2113 if cext:
2114 2114 from .cext import ( # pytype: disable=import-error
2115 2115 base85,
2116 2116 bdiff,
2117 2117 mpatch,
2118 2118 osutil,
2119 2119 )
2120 2120
2121 2121 # quiet pyflakes
2122 2122 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2123 2123 if rustext:
2124 2124 from .rustext import ( # pytype: disable=import-error
2125 2125 ancestor,
2126 2126 dirstate,
2127 2127 )
2128 2128
2129 2129 dir(ancestor), dir(dirstate) # quiet pyflakes
2130 2130 except Exception as inst:
2131 2131 err = stringutil.forcebytestr(inst)
2132 2132 problems += 1
2133 2133 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2134 2134
2135 2135 compengines = util.compengines._engines.values()
2136 2136 fm.write(
2137 2137 b'compengines',
2138 2138 _(b'checking registered compression engines (%s)\n'),
2139 2139 fm.formatlist(
2140 2140 sorted(e.name() for e in compengines),
2141 2141 name=b'compengine',
2142 2142 fmt=b'%s',
2143 2143 sep=b', ',
2144 2144 ),
2145 2145 )
2146 2146 fm.write(
2147 2147 b'compenginesavail',
2148 2148 _(b'checking available compression engines (%s)\n'),
2149 2149 fm.formatlist(
2150 2150 sorted(e.name() for e in compengines if e.available()),
2151 2151 name=b'compengine',
2152 2152 fmt=b'%s',
2153 2153 sep=b', ',
2154 2154 ),
2155 2155 )
2156 2156 wirecompengines = compression.compengines.supportedwireengines(
2157 2157 compression.SERVERROLE
2158 2158 )
2159 2159 fm.write(
2160 2160 b'compenginesserver',
2161 2161 _(
2162 2162 b'checking available compression engines '
2163 2163 b'for wire protocol (%s)\n'
2164 2164 ),
2165 2165 fm.formatlist(
2166 2166 [e.name() for e in wirecompengines if e.wireprotosupport()],
2167 2167 name=b'compengine',
2168 2168 fmt=b'%s',
2169 2169 sep=b', ',
2170 2170 ),
2171 2171 )
2172 2172 re2 = b'missing'
2173 2173 if util._re2:
2174 2174 re2 = b'available'
2175 2175 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2176 2176 fm.data(re2=bool(util._re2))
2177 2177
2178 2178 # templates
2179 2179 p = templater.templatedir()
2180 2180 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2181 2181 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2182 2182 if p:
2183 2183 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2184 2184 if m:
2185 2185 # template found, check if it is working
2186 2186 err = None
2187 2187 try:
2188 2188 templater.templater.frommapfile(m)
2189 2189 except Exception as inst:
2190 2190 err = stringutil.forcebytestr(inst)
2191 2191 p = None
2192 2192 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2193 2193 else:
2194 2194 p = None
2195 2195 fm.condwrite(
2196 2196 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2197 2197 )
2198 2198 fm.condwrite(
2199 2199 not m,
2200 2200 b'defaulttemplatenotfound',
2201 2201 _(b" template '%s' not found\n"),
2202 2202 b"default",
2203 2203 )
2204 2204 if not p:
2205 2205 problems += 1
2206 2206 fm.condwrite(
2207 2207 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2208 2208 )
2209 2209
2210 2210 # editor
2211 2211 editor = ui.geteditor()
2212 2212 editor = util.expandpath(editor)
2213 2213 editorbin = procutil.shellsplit(editor)[0]
2214 2214 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2215 2215 cmdpath = procutil.findexe(editorbin)
2216 2216 fm.condwrite(
2217 2217 not cmdpath and editor == b'vi',
2218 2218 b'vinotfound',
2219 2219 _(
2220 2220 b" No commit editor set and can't find %s in PATH\n"
2221 2221 b" (specify a commit editor in your configuration"
2222 2222 b" file)\n"
2223 2223 ),
2224 2224 not cmdpath and editor == b'vi' and editorbin,
2225 2225 )
2226 2226 fm.condwrite(
2227 2227 not cmdpath and editor != b'vi',
2228 2228 b'editornotfound',
2229 2229 _(
2230 2230 b" Can't find editor '%s' in PATH\n"
2231 2231 b" (specify a commit editor in your configuration"
2232 2232 b" file)\n"
2233 2233 ),
2234 2234 not cmdpath and editorbin,
2235 2235 )
2236 2236 if not cmdpath and editor != b'vi':
2237 2237 problems += 1
2238 2238
2239 2239 # check username
2240 2240 username = None
2241 2241 err = None
2242 2242 try:
2243 2243 username = ui.username()
2244 2244 except error.Abort as e:
2245 2245 err = e.message
2246 2246 problems += 1
2247 2247
2248 2248 fm.condwrite(
2249 2249 username, b'username', _(b"checking username (%s)\n"), username
2250 2250 )
2251 2251 fm.condwrite(
2252 2252 err,
2253 2253 b'usernameerror',
2254 2254 _(
2255 2255 b"checking username...\n %s\n"
2256 2256 b" (specify a username in your configuration file)\n"
2257 2257 ),
2258 2258 err,
2259 2259 )
2260 2260
2261 2261 for name, mod in extensions.extensions():
2262 2262 handler = getattr(mod, 'debuginstall', None)
2263 2263 if handler is not None:
2264 2264 problems += handler(ui, fm)
2265 2265
2266 2266 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2267 2267 if not problems:
2268 2268 fm.data(problems=problems)
2269 2269 fm.condwrite(
2270 2270 problems,
2271 2271 b'problems',
2272 2272 _(b"%d problems detected, please check your install!\n"),
2273 2273 problems,
2274 2274 )
2275 2275 fm.end()
2276 2276
2277 2277 return problems
2278 2278
2279 2279
2280 2280 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2281 2281 def debugknown(ui, repopath, *ids, **opts):
2282 2282 """test whether node ids are known to a repo
2283 2283
2284 2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2285 2285 and 1s indicating unknown/known.
2286 2286 """
2287 2287 opts = pycompat.byteskwargs(opts)
2288 2288 repo = hg.peer(ui, opts, repopath)
2289 2289 if not repo.capable(b'known'):
2290 2290 raise error.Abort(b"known() not supported by target repository")
2291 2291 flags = repo.known([bin(s) for s in ids])
2292 2292 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2293 2293
2294 2294
2295 2295 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2296 2296 def debuglabelcomplete(ui, repo, *args):
2297 2297 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2298 2298 debugnamecomplete(ui, repo, *args)
2299 2299
2300 2300
2301 2301 @command(
2302 2302 b'debuglocks',
2303 2303 [
2304 2304 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2305 2305 (
2306 2306 b'W',
2307 2307 b'force-free-wlock',
2308 2308 None,
2309 2309 _(b'free the working state lock (DANGEROUS)'),
2310 2310 ),
2311 2311 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2312 2312 (
2313 2313 b'S',
2314 2314 b'set-wlock',
2315 2315 None,
2316 2316 _(b'set the working state lock until stopped'),
2317 2317 ),
2318 2318 ],
2319 2319 _(b'[OPTION]...'),
2320 2320 )
2321 2321 def debuglocks(ui, repo, **opts):
2322 2322 """show or modify state of locks
2323 2323
2324 2324 By default, this command will show which locks are held. This
2325 2325 includes the user and process holding the lock, the amount of time
2326 2326 the lock has been held, and the machine name where the process is
2327 2327 running if it's not local.
2328 2328
2329 2329 Locks protect the integrity of Mercurial's data, so should be
2330 2330 treated with care. System crashes or other interruptions may cause
2331 2331 locks to not be properly released, though Mercurial will usually
2332 2332 detect and remove such stale locks automatically.
2333 2333
2334 2334 However, detecting stale locks may not always be possible (for
2335 2335 instance, on a shared filesystem). Removing locks may also be
2336 2336 blocked by filesystem permissions.
2337 2337
2338 2338 Setting a lock will prevent other commands from changing the data.
2339 2339 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2340 2340 The set locks are removed when the command exits.
2341 2341
2342 2342 Returns 0 if no locks are held.
2343 2343
2344 2344 """
2345 2345
2346 2346 if opts.get('force_free_lock'):
2347 2347 repo.svfs.tryunlink(b'lock')
2348 2348 if opts.get('force_free_wlock'):
2349 2349 repo.vfs.tryunlink(b'wlock')
2350 2350 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2351 2351 return 0
2352 2352
2353 2353 locks = []
2354 2354 try:
2355 2355 if opts.get('set_wlock'):
2356 2356 try:
2357 2357 locks.append(repo.wlock(False))
2358 2358 except error.LockHeld:
2359 2359 raise error.Abort(_(b'wlock is already held'))
2360 2360 if opts.get('set_lock'):
2361 2361 try:
2362 2362 locks.append(repo.lock(False))
2363 2363 except error.LockHeld:
2364 2364 raise error.Abort(_(b'lock is already held'))
2365 2365 if len(locks):
2366 2366 try:
2367 2367 if ui.interactive():
2368 2368 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2369 2369 ui.promptchoice(prompt)
2370 2370 else:
2371 2371 msg = b"%d locks held, waiting for signal\n"
2372 2372 msg %= len(locks)
2373 2373 ui.status(msg)
2374 2374 while True: # XXX wait for a signal
2375 2375 time.sleep(0.1)
2376 2376 except KeyboardInterrupt:
2377 2377 msg = b"signal-received releasing locks\n"
2378 2378 ui.status(msg)
2379 2379 return 0
2380 2380 finally:
2381 2381 release(*locks)
2382 2382
2383 2383 now = time.time()
2384 2384 held = 0
2385 2385
2386 2386 def report(vfs, name, method):
2387 2387 # this causes stale locks to get reaped for more accurate reporting
2388 2388 try:
2389 2389 l = method(False)
2390 2390 except error.LockHeld:
2391 2391 l = None
2392 2392
2393 2393 if l:
2394 2394 l.release()
2395 2395 else:
2396 2396 try:
2397 2397 st = vfs.lstat(name)
2398 2398 age = now - st[stat.ST_MTIME]
2399 2399 user = util.username(st.st_uid)
2400 2400 locker = vfs.readlock(name)
2401 2401 if b":" in locker:
2402 2402 host, pid = locker.split(b':')
2403 2403 if host == socket.gethostname():
2404 2404 locker = b'user %s, process %s' % (user or b'None', pid)
2405 2405 else:
2406 2406 locker = b'user %s, process %s, host %s' % (
2407 2407 user or b'None',
2408 2408 pid,
2409 2409 host,
2410 2410 )
2411 2411 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2412 2412 return 1
2413 2413 except FileNotFoundError:
2414 2414 pass
2415 2415
2416 2416 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2417 2417 return 0
2418 2418
2419 2419 held += report(repo.svfs, b"lock", repo.lock)
2420 2420 held += report(repo.vfs, b"wlock", repo.wlock)
2421 2421
2422 2422 return held
2423 2423
2424 2424
2425 2425 @command(
2426 2426 b'debugmanifestfulltextcache',
2427 2427 [
2428 2428 (b'', b'clear', False, _(b'clear the cache')),
2429 2429 (
2430 2430 b'a',
2431 2431 b'add',
2432 2432 [],
2433 2433 _(b'add the given manifest nodes to the cache'),
2434 2434 _(b'NODE'),
2435 2435 ),
2436 2436 ],
2437 2437 b'',
2438 2438 )
2439 2439 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2440 2440 """show, clear or amend the contents of the manifest fulltext cache"""
2441 2441
2442 2442 def getcache():
2443 2443 r = repo.manifestlog.getstorage(b'')
2444 2444 try:
2445 2445 return r._fulltextcache
2446 2446 except AttributeError:
2447 2447 msg = _(
2448 2448 b"Current revlog implementation doesn't appear to have a "
2449 2449 b"manifest fulltext cache\n"
2450 2450 )
2451 2451 raise error.Abort(msg)
2452 2452
2453 2453 if opts.get('clear'):
2454 2454 with repo.wlock():
2455 2455 cache = getcache()
2456 2456 cache.clear(clear_persisted_data=True)
2457 2457 return
2458 2458
2459 2459 if add:
2460 2460 with repo.wlock():
2461 2461 m = repo.manifestlog
2462 2462 store = m.getstorage(b'')
2463 2463 for n in add:
2464 2464 try:
2465 2465 manifest = m[store.lookup(n)]
2466 2466 except error.LookupError as e:
2467 2467 raise error.Abort(
2468 2468 bytes(e), hint=b"Check your manifest node id"
2469 2469 )
2470 2470 manifest.read() # stores revisision in cache too
2471 2471 return
2472 2472
2473 2473 cache = getcache()
2474 2474 if not len(cache):
2475 2475 ui.write(_(b'cache empty\n'))
2476 2476 else:
2477 2477 ui.write(
2478 2478 _(
2479 2479 b'cache contains %d manifest entries, in order of most to '
2480 2480 b'least recent:\n'
2481 2481 )
2482 2482 % (len(cache),)
2483 2483 )
2484 2484 totalsize = 0
2485 2485 for nodeid in cache:
2486 2486 # Use cache.get to not update the LRU order
2487 2487 data = cache.peek(nodeid)
2488 2488 size = len(data)
2489 2489 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2490 2490 ui.write(
2491 2491 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2492 2492 )
2493 2493 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2494 2494 ui.write(
2495 2495 _(b'total cache data size %s, on-disk %s\n')
2496 2496 % (util.bytecount(totalsize), util.bytecount(ondisk))
2497 2497 )
2498 2498
2499 2499
2500 2500 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2501 2501 def debugmergestate(ui, repo, *args, **opts):
2502 2502 """print merge state
2503 2503
2504 2504 Use --verbose to print out information about whether v1 or v2 merge state
2505 2505 was chosen."""
2506 2506
2507 2507 if ui.verbose:
2508 2508 ms = mergestatemod.mergestate(repo)
2509 2509
2510 2510 # sort so that reasonable information is on top
2511 2511 v1records = ms._readrecordsv1()
2512 2512 v2records = ms._readrecordsv2()
2513 2513
2514 2514 if not v1records and not v2records:
2515 2515 pass
2516 2516 elif not v2records:
2517 2517 ui.writenoi18n(b'no version 2 merge state\n')
2518 2518 elif ms._v1v2match(v1records, v2records):
2519 2519 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2520 2520 else:
2521 2521 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2522 2522
2523 2523 opts = pycompat.byteskwargs(opts)
2524 2524 if not opts[b'template']:
2525 2525 opts[b'template'] = (
2526 2526 b'{if(commits, "", "no merge state found\n")}'
2527 2527 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2528 2528 b'{files % "file: {path} (state \\"{state}\\")\n'
2529 2529 b'{if(local_path, "'
2530 2530 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2531 2531 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2532 2532 b' other path: {other_path} (node {other_node})\n'
2533 2533 b'")}'
2534 2534 b'{if(rename_side, "'
2535 2535 b' rename side: {rename_side}\n'
2536 2536 b' renamed path: {renamed_path}\n'
2537 2537 b'")}'
2538 2538 b'{extras % " extra: {key} = {value}\n"}'
2539 2539 b'"}'
2540 2540 b'{extras % "extra: {file} ({key} = {value})\n"}'
2541 2541 )
2542 2542
2543 2543 ms = mergestatemod.mergestate.read(repo)
2544 2544
2545 2545 fm = ui.formatter(b'debugmergestate', opts)
2546 2546 fm.startitem()
2547 2547
2548 2548 fm_commits = fm.nested(b'commits')
2549 2549 if ms.active():
2550 2550 for name, node, label_index in (
2551 2551 (b'local', ms.local, 0),
2552 2552 (b'other', ms.other, 1),
2553 2553 ):
2554 2554 fm_commits.startitem()
2555 2555 fm_commits.data(name=name)
2556 2556 fm_commits.data(node=hex(node))
2557 2557 if ms._labels and len(ms._labels) > label_index:
2558 2558 fm_commits.data(label=ms._labels[label_index])
2559 2559 fm_commits.end()
2560 2560
2561 2561 fm_files = fm.nested(b'files')
2562 2562 if ms.active():
2563 2563 for f in ms:
2564 2564 fm_files.startitem()
2565 2565 fm_files.data(path=f)
2566 2566 state = ms._state[f]
2567 2567 fm_files.data(state=state[0])
2568 2568 if state[0] in (
2569 2569 mergestatemod.MERGE_RECORD_UNRESOLVED,
2570 2570 mergestatemod.MERGE_RECORD_RESOLVED,
2571 2571 ):
2572 2572 fm_files.data(local_key=state[1])
2573 2573 fm_files.data(local_path=state[2])
2574 2574 fm_files.data(ancestor_path=state[3])
2575 2575 fm_files.data(ancestor_node=state[4])
2576 2576 fm_files.data(other_path=state[5])
2577 2577 fm_files.data(other_node=state[6])
2578 2578 fm_files.data(local_flags=state[7])
2579 2579 elif state[0] in (
2580 2580 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2581 2581 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2582 2582 ):
2583 2583 fm_files.data(renamed_path=state[1])
2584 2584 fm_files.data(rename_side=state[2])
2585 2585 fm_extras = fm_files.nested(b'extras')
2586 2586 for k, v in sorted(ms.extras(f).items()):
2587 2587 fm_extras.startitem()
2588 2588 fm_extras.data(key=k)
2589 2589 fm_extras.data(value=v)
2590 2590 fm_extras.end()
2591 2591
2592 2592 fm_files.end()
2593 2593
2594 2594 fm_extras = fm.nested(b'extras')
2595 2595 for f, d in sorted(ms.allextras().items()):
2596 2596 if f in ms:
2597 2597 # If file is in mergestate, we have already processed it's extras
2598 2598 continue
2599 2599 for k, v in d.items():
2600 2600 fm_extras.startitem()
2601 2601 fm_extras.data(file=f)
2602 2602 fm_extras.data(key=k)
2603 2603 fm_extras.data(value=v)
2604 2604 fm_extras.end()
2605 2605
2606 2606 fm.end()
2607 2607
2608 2608
2609 2609 @command(b'debugnamecomplete', [], _(b'NAME...'))
2610 2610 def debugnamecomplete(ui, repo, *args):
2611 2611 '''complete "names" - tags, open branch names, bookmark names'''
2612 2612
2613 2613 names = set()
2614 2614 # since we previously only listed open branches, we will handle that
2615 2615 # specially (after this for loop)
2616 2616 for name, ns in repo.names.items():
2617 2617 if name != b'branches':
2618 2618 names.update(ns.listnames(repo))
2619 2619 names.update(
2620 2620 tag
2621 2621 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2622 2622 if not closed
2623 2623 )
2624 2624 completions = set()
2625 2625 if not args:
2626 2626 args = [b'']
2627 2627 for a in args:
2628 2628 completions.update(n for n in names if n.startswith(a))
2629 2629 ui.write(b'\n'.join(sorted(completions)))
2630 2630 ui.write(b'\n')
2631 2631
2632 2632
2633 2633 @command(
2634 2634 b'debugnodemap',
2635 2635 [
2636 2636 (
2637 2637 b'',
2638 2638 b'dump-new',
2639 2639 False,
2640 2640 _(b'write a (new) persistent binary nodemap on stdout'),
2641 2641 ),
2642 2642 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2643 2643 (
2644 2644 b'',
2645 2645 b'check',
2646 2646 False,
2647 2647 _(b'check that the data on disk data are correct.'),
2648 2648 ),
2649 2649 (
2650 2650 b'',
2651 2651 b'metadata',
2652 2652 False,
2653 2653 _(b'display the on disk meta data for the nodemap'),
2654 2654 ),
2655 2655 ],
2656 2656 )
2657 2657 def debugnodemap(ui, repo, **opts):
2658 2658 """write and inspect on disk nodemap"""
2659 2659 if opts['dump_new']:
2660 2660 unfi = repo.unfiltered()
2661 2661 cl = unfi.changelog
2662 2662 if util.safehasattr(cl.index, "nodemap_data_all"):
2663 2663 data = cl.index.nodemap_data_all()
2664 2664 else:
2665 2665 data = nodemap.persistent_data(cl.index)
2666 2666 ui.write(data)
2667 2667 elif opts['dump_disk']:
2668 2668 unfi = repo.unfiltered()
2669 2669 cl = unfi.changelog
2670 2670 nm_data = nodemap.persisted_data(cl)
2671 2671 if nm_data is not None:
2672 2672 docket, data = nm_data
2673 2673 ui.write(data[:])
2674 2674 elif opts['check']:
2675 2675 unfi = repo.unfiltered()
2676 2676 cl = unfi.changelog
2677 2677 nm_data = nodemap.persisted_data(cl)
2678 2678 if nm_data is not None:
2679 2679 docket, data = nm_data
2680 2680 return nodemap.check_data(ui, cl.index, data)
2681 2681 elif opts['metadata']:
2682 2682 unfi = repo.unfiltered()
2683 2683 cl = unfi.changelog
2684 2684 nm_data = nodemap.persisted_data(cl)
2685 2685 if nm_data is not None:
2686 2686 docket, data = nm_data
2687 2687 ui.write((b"uid: %s\n") % docket.uid)
2688 2688 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2689 2689 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2690 2690 ui.write((b"data-length: %d\n") % docket.data_length)
2691 2691 ui.write((b"data-unused: %d\n") % docket.data_unused)
2692 2692 unused_perc = docket.data_unused * 100.0 / docket.data_length
2693 2693 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2694 2694
2695 2695
2696 2696 @command(
2697 2697 b'debugobsolete',
2698 2698 [
2699 2699 (b'', b'flags', 0, _(b'markers flag')),
2700 2700 (
2701 2701 b'',
2702 2702 b'record-parents',
2703 2703 False,
2704 2704 _(b'record parent information for the precursor'),
2705 2705 ),
2706 2706 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2707 2707 (
2708 2708 b'',
2709 2709 b'exclusive',
2710 2710 False,
2711 2711 _(b'restrict display to markers only relevant to REV'),
2712 2712 ),
2713 2713 (b'', b'index', False, _(b'display index of the marker')),
2714 2714 (b'', b'delete', [], _(b'delete markers specified by indices')),
2715 2715 ]
2716 2716 + cmdutil.commitopts2
2717 2717 + cmdutil.formatteropts,
2718 2718 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2719 2719 )
2720 2720 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2721 2721 """create arbitrary obsolete marker
2722 2722
2723 2723 With no arguments, displays the list of obsolescence markers."""
2724 2724
2725 2725 opts = pycompat.byteskwargs(opts)
2726 2726
2727 2727 def parsenodeid(s):
2728 2728 try:
2729 2729 # We do not use revsingle/revrange functions here to accept
2730 2730 # arbitrary node identifiers, possibly not present in the
2731 2731 # local repository.
2732 2732 n = bin(s)
2733 2733 if len(n) != repo.nodeconstants.nodelen:
2734 2734 raise ValueError
2735 2735 return n
2736 2736 except ValueError:
2737 2737 raise error.InputError(
2738 2738 b'changeset references must be full hexadecimal '
2739 2739 b'node identifiers'
2740 2740 )
2741 2741
2742 2742 if opts.get(b'delete'):
2743 2743 indices = []
2744 2744 for v in opts.get(b'delete'):
2745 2745 try:
2746 2746 indices.append(int(v))
2747 2747 except ValueError:
2748 2748 raise error.InputError(
2749 2749 _(b'invalid index value: %r') % v,
2750 2750 hint=_(b'use integers for indices'),
2751 2751 )
2752 2752
2753 2753 if repo.currenttransaction():
2754 2754 raise error.Abort(
2755 2755 _(b'cannot delete obsmarkers in the middle of transaction.')
2756 2756 )
2757 2757
2758 2758 with repo.lock():
2759 2759 n = repair.deleteobsmarkers(repo.obsstore, indices)
2760 2760 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2761 2761
2762 2762 return
2763 2763
2764 2764 if precursor is not None:
2765 2765 if opts[b'rev']:
2766 2766 raise error.InputError(
2767 2767 b'cannot select revision when creating marker'
2768 2768 )
2769 2769 metadata = {}
2770 2770 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2771 2771 succs = tuple(parsenodeid(succ) for succ in successors)
2772 2772 l = repo.lock()
2773 2773 try:
2774 2774 tr = repo.transaction(b'debugobsolete')
2775 2775 try:
2776 2776 date = opts.get(b'date')
2777 2777 if date:
2778 2778 date = dateutil.parsedate(date)
2779 2779 else:
2780 2780 date = None
2781 2781 prec = parsenodeid(precursor)
2782 2782 parents = None
2783 2783 if opts[b'record_parents']:
2784 2784 if prec not in repo.unfiltered():
2785 2785 raise error.Abort(
2786 2786 b'cannot used --record-parents on '
2787 2787 b'unknown changesets'
2788 2788 )
2789 2789 parents = repo.unfiltered()[prec].parents()
2790 2790 parents = tuple(p.node() for p in parents)
2791 2791 repo.obsstore.create(
2792 2792 tr,
2793 2793 prec,
2794 2794 succs,
2795 2795 opts[b'flags'],
2796 2796 parents=parents,
2797 2797 date=date,
2798 2798 metadata=metadata,
2799 2799 ui=ui,
2800 2800 )
2801 2801 tr.close()
2802 2802 except ValueError as exc:
2803 2803 raise error.Abort(
2804 2804 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2805 2805 )
2806 2806 finally:
2807 2807 tr.release()
2808 2808 finally:
2809 2809 l.release()
2810 2810 else:
2811 2811 if opts[b'rev']:
2812 2812 revs = logcmdutil.revrange(repo, opts[b'rev'])
2813 2813 nodes = [repo[r].node() for r in revs]
2814 2814 markers = list(
2815 2815 obsutil.getmarkers(
2816 2816 repo, nodes=nodes, exclusive=opts[b'exclusive']
2817 2817 )
2818 2818 )
2819 2819 markers.sort(key=lambda x: x._data)
2820 2820 else:
2821 2821 markers = obsutil.getmarkers(repo)
2822 2822
2823 2823 markerstoiter = markers
2824 2824 isrelevant = lambda m: True
2825 2825 if opts.get(b'rev') and opts.get(b'index'):
2826 2826 markerstoiter = obsutil.getmarkers(repo)
2827 2827 markerset = set(markers)
2828 2828 isrelevant = lambda m: m in markerset
2829 2829
2830 2830 fm = ui.formatter(b'debugobsolete', opts)
2831 2831 for i, m in enumerate(markerstoiter):
2832 2832 if not isrelevant(m):
2833 2833 # marker can be irrelevant when we're iterating over a set
2834 2834 # of markers (markerstoiter) which is bigger than the set
2835 2835 # of markers we want to display (markers)
2836 2836 # this can happen if both --index and --rev options are
2837 2837 # provided and thus we need to iterate over all of the markers
2838 2838 # to get the correct indices, but only display the ones that
2839 2839 # are relevant to --rev value
2840 2840 continue
2841 2841 fm.startitem()
2842 2842 ind = i if opts.get(b'index') else None
2843 2843 cmdutil.showmarker(fm, m, index=ind)
2844 2844 fm.end()
2845 2845
2846 2846
2847 2847 @command(
2848 2848 b'debugp1copies',
2849 2849 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2850 2850 _(b'[-r REV]'),
2851 2851 )
2852 2852 def debugp1copies(ui, repo, **opts):
2853 2853 """dump copy information compared to p1"""
2854 2854
2855 2855 opts = pycompat.byteskwargs(opts)
2856 2856 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2857 2857 for dst, src in ctx.p1copies().items():
2858 2858 ui.write(b'%s -> %s\n' % (src, dst))
2859 2859
2860 2860
2861 2861 @command(
2862 2862 b'debugp2copies',
2863 2863 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2864 2864 _(b'[-r REV]'),
2865 2865 )
2866 2866 def debugp2copies(ui, repo, **opts):
2867 2867 """dump copy information compared to p2"""
2868 2868
2869 2869 opts = pycompat.byteskwargs(opts)
2870 2870 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2871 2871 for dst, src in ctx.p2copies().items():
2872 2872 ui.write(b'%s -> %s\n' % (src, dst))
2873 2873
2874 2874
2875 2875 @command(
2876 2876 b'debugpathcomplete',
2877 2877 [
2878 2878 (b'f', b'full', None, _(b'complete an entire path')),
2879 2879 (b'n', b'normal', None, _(b'show only normal files')),
2880 2880 (b'a', b'added', None, _(b'show only added files')),
2881 2881 (b'r', b'removed', None, _(b'show only removed files')),
2882 2882 ],
2883 2883 _(b'FILESPEC...'),
2884 2884 )
2885 2885 def debugpathcomplete(ui, repo, *specs, **opts):
2886 2886 """complete part or all of a tracked path
2887 2887
2888 2888 This command supports shells that offer path name completion. It
2889 2889 currently completes only files already known to the dirstate.
2890 2890
2891 2891 Completion extends only to the next path segment unless
2892 2892 --full is specified, in which case entire paths are used."""
2893 2893
2894 2894 def complete(path, acceptable):
2895 2895 dirstate = repo.dirstate
2896 2896 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2897 2897 rootdir = repo.root + pycompat.ossep
2898 2898 if spec != repo.root and not spec.startswith(rootdir):
2899 2899 return [], []
2900 2900 if os.path.isdir(spec):
2901 2901 spec += b'/'
2902 2902 spec = spec[len(rootdir) :]
2903 2903 fixpaths = pycompat.ossep != b'/'
2904 2904 if fixpaths:
2905 2905 spec = spec.replace(pycompat.ossep, b'/')
2906 2906 speclen = len(spec)
2907 2907 fullpaths = opts['full']
2908 2908 files, dirs = set(), set()
2909 2909 adddir, addfile = dirs.add, files.add
2910 2910 for f, st in dirstate.items():
2911 2911 if f.startswith(spec) and st.state in acceptable:
2912 2912 if fixpaths:
2913 2913 f = f.replace(b'/', pycompat.ossep)
2914 2914 if fullpaths:
2915 2915 addfile(f)
2916 2916 continue
2917 2917 s = f.find(pycompat.ossep, speclen)
2918 2918 if s >= 0:
2919 2919 adddir(f[:s])
2920 2920 else:
2921 2921 addfile(f)
2922 2922 return files, dirs
2923 2923
2924 2924 acceptable = b''
2925 2925 if opts['normal']:
2926 2926 acceptable += b'nm'
2927 2927 if opts['added']:
2928 2928 acceptable += b'a'
2929 2929 if opts['removed']:
2930 2930 acceptable += b'r'
2931 2931 cwd = repo.getcwd()
2932 2932 if not specs:
2933 2933 specs = [b'.']
2934 2934
2935 2935 files, dirs = set(), set()
2936 2936 for spec in specs:
2937 2937 f, d = complete(spec, acceptable or b'nmar')
2938 2938 files.update(f)
2939 2939 dirs.update(d)
2940 2940 files.update(dirs)
2941 2941 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2942 2942 ui.write(b'\n')
2943 2943
2944 2944
2945 2945 @command(
2946 2946 b'debugpathcopies',
2947 2947 cmdutil.walkopts,
2948 2948 b'hg debugpathcopies REV1 REV2 [FILE]',
2949 2949 inferrepo=True,
2950 2950 )
2951 2951 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2952 2952 """show copies between two revisions"""
2953 2953 ctx1 = scmutil.revsingle(repo, rev1)
2954 2954 ctx2 = scmutil.revsingle(repo, rev2)
2955 2955 m = scmutil.match(ctx1, pats, opts)
2956 2956 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2957 2957 ui.write(b'%s -> %s\n' % (src, dst))
2958 2958
2959 2959
2960 2960 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2961 2961 def debugpeer(ui, path):
2962 2962 """establish a connection to a peer repository"""
2963 2963 # Always enable peer request logging. Requires --debug to display
2964 2964 # though.
2965 2965 overrides = {
2966 2966 (b'devel', b'debug.peer-request'): True,
2967 2967 }
2968 2968
2969 2969 with ui.configoverride(overrides):
2970 2970 peer = hg.peer(ui, {}, path)
2971 2971
2972 2972 try:
2973 2973 local = peer.local() is not None
2974 2974 canpush = peer.canpush()
2975 2975
2976 2976 ui.write(_(b'url: %s\n') % peer.url())
2977 2977 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2978 2978 ui.write(
2979 2979 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2980 2980 )
2981 2981 finally:
2982 2982 peer.close()
2983 2983
2984 2984
2985 2985 @command(
2986 2986 b'debugpickmergetool',
2987 2987 [
2988 2988 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2989 2989 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2990 2990 ]
2991 2991 + cmdutil.walkopts
2992 2992 + cmdutil.mergetoolopts,
2993 2993 _(b'[PATTERN]...'),
2994 2994 inferrepo=True,
2995 2995 )
2996 2996 def debugpickmergetool(ui, repo, *pats, **opts):
2997 2997 """examine which merge tool is chosen for specified file
2998 2998
2999 2999 As described in :hg:`help merge-tools`, Mercurial examines
3000 3000 configurations below in this order to decide which merge tool is
3001 3001 chosen for specified file.
3002 3002
3003 3003 1. ``--tool`` option
3004 3004 2. ``HGMERGE`` environment variable
3005 3005 3. configurations in ``merge-patterns`` section
3006 3006 4. configuration of ``ui.merge``
3007 3007 5. configurations in ``merge-tools`` section
3008 3008 6. ``hgmerge`` tool (for historical reason only)
3009 3009 7. default tool for fallback (``:merge`` or ``:prompt``)
3010 3010
3011 3011 This command writes out examination result in the style below::
3012 3012
3013 3013 FILE = MERGETOOL
3014 3014
3015 3015 By default, all files known in the first parent context of the
3016 3016 working directory are examined. Use file patterns and/or -I/-X
3017 3017 options to limit target files. -r/--rev is also useful to examine
3018 3018 files in another context without actual updating to it.
3019 3019
3020 3020 With --debug, this command shows warning messages while matching
3021 3021 against ``merge-patterns`` and so on, too. It is recommended to
3022 3022 use this option with explicit file patterns and/or -I/-X options,
3023 3023 because this option increases amount of output per file according
3024 3024 to configurations in hgrc.
3025 3025
3026 3026 With -v/--verbose, this command shows configurations below at
3027 3027 first (only if specified).
3028 3028
3029 3029 - ``--tool`` option
3030 3030 - ``HGMERGE`` environment variable
3031 3031 - configuration of ``ui.merge``
3032 3032
3033 3033 If merge tool is chosen before matching against
3034 3034 ``merge-patterns``, this command can't show any helpful
3035 3035 information, even with --debug. In such case, information above is
3036 3036 useful to know why a merge tool is chosen.
3037 3037 """
3038 3038 opts = pycompat.byteskwargs(opts)
3039 3039 overrides = {}
3040 3040 if opts[b'tool']:
3041 3041 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3042 3042 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3043 3043
3044 3044 with ui.configoverride(overrides, b'debugmergepatterns'):
3045 3045 hgmerge = encoding.environ.get(b"HGMERGE")
3046 3046 if hgmerge is not None:
3047 3047 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3048 3048 uimerge = ui.config(b"ui", b"merge")
3049 3049 if uimerge:
3050 3050 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3051 3051
3052 3052 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3053 3053 m = scmutil.match(ctx, pats, opts)
3054 3054 changedelete = opts[b'changedelete']
3055 3055 for path in ctx.walk(m):
3056 3056 fctx = ctx[path]
3057 3057 with ui.silent(
3058 3058 error=True
3059 3059 ) if not ui.debugflag else util.nullcontextmanager():
3060 3060 tool, toolpath = filemerge._picktool(
3061 3061 repo,
3062 3062 ui,
3063 3063 path,
3064 3064 fctx.isbinary(),
3065 3065 b'l' in fctx.flags(),
3066 3066 changedelete,
3067 3067 )
3068 3068 ui.write(b'%s = %s\n' % (path, tool))
3069 3069
3070 3070
3071 3071 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3072 3072 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3073 3073 """access the pushkey key/value protocol
3074 3074
3075 3075 With two args, list the keys in the given namespace.
3076 3076
3077 3077 With five args, set a key to new if it currently is set to old.
3078 3078 Reports success or failure.
3079 3079 """
3080 3080
3081 3081 target = hg.peer(ui, {}, repopath)
3082 3082 try:
3083 3083 if keyinfo:
3084 3084 key, old, new = keyinfo
3085 3085 with target.commandexecutor() as e:
3086 3086 r = e.callcommand(
3087 3087 b'pushkey',
3088 3088 {
3089 3089 b'namespace': namespace,
3090 3090 b'key': key,
3091 3091 b'old': old,
3092 3092 b'new': new,
3093 3093 },
3094 3094 ).result()
3095 3095
3096 3096 ui.status(pycompat.bytestr(r) + b'\n')
3097 3097 return not r
3098 3098 else:
3099 3099 for k, v in sorted(target.listkeys(namespace).items()):
3100 3100 ui.write(
3101 3101 b"%s\t%s\n"
3102 3102 % (stringutil.escapestr(k), stringutil.escapestr(v))
3103 3103 )
3104 3104 finally:
3105 3105 target.close()
3106 3106
3107 3107
3108 3108 @command(b'debugpvec', [], _(b'A B'))
3109 3109 def debugpvec(ui, repo, a, b=None):
3110 3110 ca = scmutil.revsingle(repo, a)
3111 3111 cb = scmutil.revsingle(repo, b)
3112 3112 pa = pvec.ctxpvec(ca)
3113 3113 pb = pvec.ctxpvec(cb)
3114 3114 if pa == pb:
3115 3115 rel = b"="
3116 3116 elif pa > pb:
3117 3117 rel = b">"
3118 3118 elif pa < pb:
3119 3119 rel = b"<"
3120 3120 elif pa | pb:
3121 3121 rel = b"|"
3122 3122 ui.write(_(b"a: %s\n") % pa)
3123 3123 ui.write(_(b"b: %s\n") % pb)
3124 3124 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3125 3125 ui.write(
3126 3126 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3127 3127 % (
3128 3128 abs(pa._depth - pb._depth),
3129 3129 pvec._hamming(pa._vec, pb._vec),
3130 3130 pa.distance(pb),
3131 3131 rel,
3132 3132 )
3133 3133 )
3134 3134
3135 3135
3136 3136 @command(
3137 3137 b'debugrebuilddirstate|debugrebuildstate',
3138 3138 [
3139 3139 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3140 3140 (
3141 3141 b'',
3142 3142 b'minimal',
3143 3143 None,
3144 3144 _(
3145 3145 b'only rebuild files that are inconsistent with '
3146 3146 b'the working copy parent'
3147 3147 ),
3148 3148 ),
3149 3149 ],
3150 3150 _(b'[-r REV]'),
3151 3151 )
3152 3152 def debugrebuilddirstate(ui, repo, rev, **opts):
3153 3153 """rebuild the dirstate as it would look like for the given revision
3154 3154
3155 3155 If no revision is specified the first current parent will be used.
3156 3156
3157 3157 The dirstate will be set to the files of the given revision.
3158 3158 The actual working directory content or existing dirstate
3159 3159 information such as adds or removes is not considered.
3160 3160
3161 3161 ``minimal`` will only rebuild the dirstate status for files that claim to be
3162 3162 tracked but are not in the parent manifest, or that exist in the parent
3163 3163 manifest but are not in the dirstate. It will not change adds, removes, or
3164 3164 modified files that are in the working copy parent.
3165 3165
3166 3166 One use of this command is to make the next :hg:`status` invocation
3167 3167 check the actual file content.
3168 3168 """
3169 3169 ctx = scmutil.revsingle(repo, rev)
3170 3170 with repo.wlock():
3171 3171 dirstate = repo.dirstate
3172 3172 changedfiles = None
3173 3173 # See command doc for what minimal does.
3174 3174 if opts.get('minimal'):
3175 3175 manifestfiles = set(ctx.manifest().keys())
3176 3176 dirstatefiles = set(dirstate)
3177 3177 manifestonly = manifestfiles - dirstatefiles
3178 3178 dsonly = dirstatefiles - manifestfiles
3179 3179 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3180 3180 changedfiles = manifestonly | dsnotadded
3181 3181
3182 3182 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3183 3183
3184 3184
3185 3185 @command(
3186 3186 b'debugrebuildfncache',
3187 3187 [
3188 3188 (
3189 3189 b'',
3190 3190 b'only-data',
3191 3191 False,
3192 3192 _(b'only look for wrong .d files (much faster)'),
3193 3193 )
3194 3194 ],
3195 3195 b'',
3196 3196 )
3197 3197 def debugrebuildfncache(ui, repo, **opts):
3198 3198 """rebuild the fncache file"""
3199 3199 opts = pycompat.byteskwargs(opts)
3200 3200 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3201 3201
3202 3202
3203 3203 @command(
3204 3204 b'debugrename',
3205 3205 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3206 3206 _(b'[-r REV] [FILE]...'),
3207 3207 )
3208 3208 def debugrename(ui, repo, *pats, **opts):
3209 3209 """dump rename information"""
3210 3210
3211 3211 opts = pycompat.byteskwargs(opts)
3212 3212 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3213 3213 m = scmutil.match(ctx, pats, opts)
3214 3214 for abs in ctx.walk(m):
3215 3215 fctx = ctx[abs]
3216 3216 o = fctx.filelog().renamed(fctx.filenode())
3217 3217 rel = repo.pathto(abs)
3218 3218 if o:
3219 3219 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3220 3220 else:
3221 3221 ui.write(_(b"%s not renamed\n") % rel)
3222 3222
3223 3223
3224 3224 @command(b'debugrequires|debugrequirements', [], b'')
3225 3225 def debugrequirements(ui, repo):
3226 3226 """print the current repo requirements"""
3227 3227 for r in sorted(repo.requirements):
3228 3228 ui.write(b"%s\n" % r)
3229 3229
3230 3230
3231 3231 @command(
3232 3232 b'debugrevlog',
3233 3233 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3234 3234 _(b'-c|-m|FILE'),
3235 3235 optionalrepo=True,
3236 3236 )
3237 3237 def debugrevlog(ui, repo, file_=None, **opts):
3238 3238 """show data and statistics about a revlog"""
3239 3239 opts = pycompat.byteskwargs(opts)
3240 3240 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3241 3241
3242 3242 if opts.get(b"dump"):
3243 numrevs = len(r)
3244 ui.write(
3245 (
3246 b"# rev p1rev p2rev start end deltastart base p1 p2"
3247 b" rawsize totalsize compression heads chainlen\n"
3248 )
3249 )
3250 ts = 0
3251 heads = set()
3252
3253 for rev in range(numrevs):
3254 dbase = r.deltaparent(rev)
3255 if dbase == -1:
3256 dbase = rev
3257 cbase = r.chainbase(rev)
3258 clen = r.chainlen(rev)
3259 p1, p2 = r.parentrevs(rev)
3260 rs = r.rawsize(rev)
3261 ts = ts + rs
3262 heads -= set(r.parentrevs(rev))
3263 heads.add(rev)
3264 try:
3265 compression = ts / r.end(rev)
3266 except ZeroDivisionError:
3267 compression = 0
3268 ui.write(
3269 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3270 b"%11d %5d %8d\n"
3271 % (
3272 rev,
3273 p1,
3274 p2,
3275 r.start(rev),
3276 r.end(rev),
3277 r.start(dbase),
3278 r.start(cbase),
3279 r.start(p1),
3280 r.start(p2),
3281 rs,
3282 ts,
3283 compression,
3284 len(heads),
3285 clen,
3286 )
3287 )
3243 revlog_debug.dump(ui, r)
3288 3244 return 0
3289 3245
3290 3246 format = r._format_version
3291 3247 v = r._format_flags
3292 3248 flags = []
3293 3249 gdelta = False
3294 3250 if v & revlog.FLAG_INLINE_DATA:
3295 3251 flags.append(b'inline')
3296 3252 if v & revlog.FLAG_GENERALDELTA:
3297 3253 gdelta = True
3298 3254 flags.append(b'generaldelta')
3299 3255 if not flags:
3300 3256 flags = [b'(none)']
3301 3257
3302 3258 ### tracks merge vs single parent
3303 3259 nummerges = 0
3304 3260
3305 3261 ### tracks ways the "delta" are build
3306 3262 # nodelta
3307 3263 numempty = 0
3308 3264 numemptytext = 0
3309 3265 numemptydelta = 0
3310 3266 # full file content
3311 3267 numfull = 0
3312 3268 # intermediate snapshot against a prior snapshot
3313 3269 numsemi = 0
3314 3270 # snapshot count per depth
3315 3271 numsnapdepth = collections.defaultdict(lambda: 0)
3316 3272 # delta against previous revision
3317 3273 numprev = 0
3318 3274 # delta against first or second parent (not prev)
3319 3275 nump1 = 0
3320 3276 nump2 = 0
3321 3277 # delta against neither prev nor parents
3322 3278 numother = 0
3323 3279 # delta against prev that are also first or second parent
3324 3280 # (details of `numprev`)
3325 3281 nump1prev = 0
3326 3282 nump2prev = 0
3327 3283
3328 3284 # data about delta chain of each revs
3329 3285 chainlengths = []
3330 3286 chainbases = []
3331 3287 chainspans = []
3332 3288
3333 3289 # data about each revision
3334 3290 datasize = [None, 0, 0]
3335 3291 fullsize = [None, 0, 0]
3336 3292 semisize = [None, 0, 0]
3337 3293 # snapshot count per depth
3338 3294 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3339 3295 deltasize = [None, 0, 0]
3340 3296 chunktypecounts = {}
3341 3297 chunktypesizes = {}
3342 3298
3343 3299 def addsize(size, l):
3344 3300 if l[0] is None or size < l[0]:
3345 3301 l[0] = size
3346 3302 if size > l[1]:
3347 3303 l[1] = size
3348 3304 l[2] += size
3349 3305
3350 3306 numrevs = len(r)
3351 3307 for rev in range(numrevs):
3352 3308 p1, p2 = r.parentrevs(rev)
3353 3309 delta = r.deltaparent(rev)
3354 3310 if format > 0:
3355 3311 addsize(r.rawsize(rev), datasize)
3356 3312 if p2 != nullrev:
3357 3313 nummerges += 1
3358 3314 size = r.length(rev)
3359 3315 if delta == nullrev:
3360 3316 chainlengths.append(0)
3361 3317 chainbases.append(r.start(rev))
3362 3318 chainspans.append(size)
3363 3319 if size == 0:
3364 3320 numempty += 1
3365 3321 numemptytext += 1
3366 3322 else:
3367 3323 numfull += 1
3368 3324 numsnapdepth[0] += 1
3369 3325 addsize(size, fullsize)
3370 3326 addsize(size, snapsizedepth[0])
3371 3327 else:
3372 3328 chainlengths.append(chainlengths[delta] + 1)
3373 3329 baseaddr = chainbases[delta]
3374 3330 revaddr = r.start(rev)
3375 3331 chainbases.append(baseaddr)
3376 3332 chainspans.append((revaddr - baseaddr) + size)
3377 3333 if size == 0:
3378 3334 numempty += 1
3379 3335 numemptydelta += 1
3380 3336 elif r.issnapshot(rev):
3381 3337 addsize(size, semisize)
3382 3338 numsemi += 1
3383 3339 depth = r.snapshotdepth(rev)
3384 3340 numsnapdepth[depth] += 1
3385 3341 addsize(size, snapsizedepth[depth])
3386 3342 else:
3387 3343 addsize(size, deltasize)
3388 3344 if delta == rev - 1:
3389 3345 numprev += 1
3390 3346 if delta == p1:
3391 3347 nump1prev += 1
3392 3348 elif delta == p2:
3393 3349 nump2prev += 1
3394 3350 elif delta == p1:
3395 3351 nump1 += 1
3396 3352 elif delta == p2:
3397 3353 nump2 += 1
3398 3354 elif delta != nullrev:
3399 3355 numother += 1
3400 3356
3401 3357 # Obtain data on the raw chunks in the revlog.
3402 3358 if util.safehasattr(r, b'_getsegmentforrevs'):
3403 3359 segment = r._getsegmentforrevs(rev, rev)[1]
3404 3360 else:
3405 3361 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3406 3362 if segment:
3407 3363 chunktype = bytes(segment[0:1])
3408 3364 else:
3409 3365 chunktype = b'empty'
3410 3366
3411 3367 if chunktype not in chunktypecounts:
3412 3368 chunktypecounts[chunktype] = 0
3413 3369 chunktypesizes[chunktype] = 0
3414 3370
3415 3371 chunktypecounts[chunktype] += 1
3416 3372 chunktypesizes[chunktype] += size
3417 3373
3418 3374 # Adjust size min value for empty cases
3419 3375 for size in (datasize, fullsize, semisize, deltasize):
3420 3376 if size[0] is None:
3421 3377 size[0] = 0
3422 3378
3423 3379 numdeltas = numrevs - numfull - numempty - numsemi
3424 3380 numoprev = numprev - nump1prev - nump2prev
3425 3381 totalrawsize = datasize[2]
3426 3382 datasize[2] /= numrevs
3427 3383 fulltotal = fullsize[2]
3428 3384 if numfull == 0:
3429 3385 fullsize[2] = 0
3430 3386 else:
3431 3387 fullsize[2] /= numfull
3432 3388 semitotal = semisize[2]
3433 3389 snaptotal = {}
3434 3390 if numsemi > 0:
3435 3391 semisize[2] /= numsemi
3436 3392 for depth in snapsizedepth:
3437 3393 snaptotal[depth] = snapsizedepth[depth][2]
3438 3394 snapsizedepth[depth][2] /= numsnapdepth[depth]
3439 3395
3440 3396 deltatotal = deltasize[2]
3441 3397 if numdeltas > 0:
3442 3398 deltasize[2] /= numdeltas
3443 3399 totalsize = fulltotal + semitotal + deltatotal
3444 3400 avgchainlen = sum(chainlengths) / numrevs
3445 3401 maxchainlen = max(chainlengths)
3446 3402 maxchainspan = max(chainspans)
3447 3403 compratio = 1
3448 3404 if totalsize:
3449 3405 compratio = totalrawsize / totalsize
3450 3406
3451 3407 basedfmtstr = b'%%%dd\n'
3452 3408 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3453 3409
3454 3410 def dfmtstr(max):
3455 3411 return basedfmtstr % len(str(max))
3456 3412
3457 3413 def pcfmtstr(max, padding=0):
3458 3414 return basepcfmtstr % (len(str(max)), b' ' * padding)
3459 3415
3460 3416 def pcfmt(value, total):
3461 3417 if total:
3462 3418 return (value, 100 * float(value) / total)
3463 3419 else:
3464 3420 return value, 100.0
3465 3421
3466 3422 ui.writenoi18n(b'format : %d\n' % format)
3467 3423 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3468 3424
3469 3425 ui.write(b'\n')
3470 3426 fmt = pcfmtstr(totalsize)
3471 3427 fmt2 = dfmtstr(totalsize)
3472 3428 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3473 3429 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3474 3430 ui.writenoi18n(
3475 3431 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3476 3432 )
3477 3433 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3478 3434 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3479 3435 ui.writenoi18n(
3480 3436 b' text : '
3481 3437 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3482 3438 )
3483 3439 ui.writenoi18n(
3484 3440 b' delta : '
3485 3441 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3486 3442 )
3487 3443 ui.writenoi18n(
3488 3444 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3489 3445 )
3490 3446 for depth in sorted(numsnapdepth):
3491 3447 ui.write(
3492 3448 (b' lvl-%-3d : ' % depth)
3493 3449 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3494 3450 )
3495 3451 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3496 3452 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3497 3453 ui.writenoi18n(
3498 3454 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3499 3455 )
3500 3456 for depth in sorted(numsnapdepth):
3501 3457 ui.write(
3502 3458 (b' lvl-%-3d : ' % depth)
3503 3459 + fmt % pcfmt(snaptotal[depth], totalsize)
3504 3460 )
3505 3461 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3506 3462
3507 3463 def fmtchunktype(chunktype):
3508 3464 if chunktype == b'empty':
3509 3465 return b' %s : ' % chunktype
3510 3466 elif chunktype in pycompat.bytestr(string.ascii_letters):
3511 3467 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3512 3468 else:
3513 3469 return b' 0x%s : ' % hex(chunktype)
3514 3470
3515 3471 ui.write(b'\n')
3516 3472 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3517 3473 for chunktype in sorted(chunktypecounts):
3518 3474 ui.write(fmtchunktype(chunktype))
3519 3475 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3520 3476 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3521 3477 for chunktype in sorted(chunktypecounts):
3522 3478 ui.write(fmtchunktype(chunktype))
3523 3479 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3524 3480
3525 3481 ui.write(b'\n')
3526 3482 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3527 3483 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3528 3484 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3529 3485 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3530 3486 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3531 3487
3532 3488 if format > 0:
3533 3489 ui.write(b'\n')
3534 3490 ui.writenoi18n(
3535 3491 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3536 3492 % tuple(datasize)
3537 3493 )
3538 3494 ui.writenoi18n(
3539 3495 b'full revision size (min/max/avg) : %d / %d / %d\n'
3540 3496 % tuple(fullsize)
3541 3497 )
3542 3498 ui.writenoi18n(
3543 3499 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3544 3500 % tuple(semisize)
3545 3501 )
3546 3502 for depth in sorted(snapsizedepth):
3547 3503 if depth == 0:
3548 3504 continue
3549 3505 ui.writenoi18n(
3550 3506 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3551 3507 % ((depth,) + tuple(snapsizedepth[depth]))
3552 3508 )
3553 3509 ui.writenoi18n(
3554 3510 b'delta size (min/max/avg) : %d / %d / %d\n'
3555 3511 % tuple(deltasize)
3556 3512 )
3557 3513
3558 3514 if numdeltas > 0:
3559 3515 ui.write(b'\n')
3560 3516 fmt = pcfmtstr(numdeltas)
3561 3517 fmt2 = pcfmtstr(numdeltas, 4)
3562 3518 ui.writenoi18n(
3563 3519 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3564 3520 )
3565 3521 if numprev > 0:
3566 3522 ui.writenoi18n(
3567 3523 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3568 3524 )
3569 3525 ui.writenoi18n(
3570 3526 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3571 3527 )
3572 3528 ui.writenoi18n(
3573 3529 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3574 3530 )
3575 3531 if gdelta:
3576 3532 ui.writenoi18n(
3577 3533 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3578 3534 )
3579 3535 ui.writenoi18n(
3580 3536 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3581 3537 )
3582 3538 ui.writenoi18n(
3583 3539 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3584 3540 )
3585 3541
3586 3542
3587 3543 @command(
3588 3544 b'debugrevlogindex',
3589 3545 cmdutil.debugrevlogopts
3590 3546 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3591 3547 _(b'[-f FORMAT] -c|-m|FILE'),
3592 3548 optionalrepo=True,
3593 3549 )
3594 3550 def debugrevlogindex(ui, repo, file_=None, **opts):
3595 3551 """dump the contents of a revlog index"""
3596 3552 opts = pycompat.byteskwargs(opts)
3597 3553 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3598 3554 format = opts.get(b'format', 0)
3599 3555 if format not in (0, 1):
3600 3556 raise error.Abort(_(b"unknown format %d") % format)
3601 3557
3602 3558 if ui.debugflag:
3603 3559 shortfn = hex
3604 3560 else:
3605 3561 shortfn = short
3606 3562
3607 3563 # There might not be anything in r, so have a sane default
3608 3564 idlen = 12
3609 3565 for i in r:
3610 3566 idlen = len(shortfn(r.node(i)))
3611 3567 break
3612 3568
3613 3569 if format == 0:
3614 3570 if ui.verbose:
3615 3571 ui.writenoi18n(
3616 3572 b" rev offset length linkrev %s %s p2\n"
3617 3573 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3618 3574 )
3619 3575 else:
3620 3576 ui.writenoi18n(
3621 3577 b" rev linkrev %s %s p2\n"
3622 3578 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3623 3579 )
3624 3580 elif format == 1:
3625 3581 if ui.verbose:
3626 3582 ui.writenoi18n(
3627 3583 (
3628 3584 b" rev flag offset length size link p1"
3629 3585 b" p2 %s\n"
3630 3586 )
3631 3587 % b"nodeid".rjust(idlen)
3632 3588 )
3633 3589 else:
3634 3590 ui.writenoi18n(
3635 3591 b" rev flag size link p1 p2 %s\n"
3636 3592 % b"nodeid".rjust(idlen)
3637 3593 )
3638 3594
3639 3595 for i in r:
3640 3596 node = r.node(i)
3641 3597 if format == 0:
3642 3598 try:
3643 3599 pp = r.parents(node)
3644 3600 except Exception:
3645 3601 pp = [repo.nullid, repo.nullid]
3646 3602 if ui.verbose:
3647 3603 ui.write(
3648 3604 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3649 3605 % (
3650 3606 i,
3651 3607 r.start(i),
3652 3608 r.length(i),
3653 3609 r.linkrev(i),
3654 3610 shortfn(node),
3655 3611 shortfn(pp[0]),
3656 3612 shortfn(pp[1]),
3657 3613 )
3658 3614 )
3659 3615 else:
3660 3616 ui.write(
3661 3617 b"% 6d % 7d %s %s %s\n"
3662 3618 % (
3663 3619 i,
3664 3620 r.linkrev(i),
3665 3621 shortfn(node),
3666 3622 shortfn(pp[0]),
3667 3623 shortfn(pp[1]),
3668 3624 )
3669 3625 )
3670 3626 elif format == 1:
3671 3627 pr = r.parentrevs(i)
3672 3628 if ui.verbose:
3673 3629 ui.write(
3674 3630 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3675 3631 % (
3676 3632 i,
3677 3633 r.flags(i),
3678 3634 r.start(i),
3679 3635 r.length(i),
3680 3636 r.rawsize(i),
3681 3637 r.linkrev(i),
3682 3638 pr[0],
3683 3639 pr[1],
3684 3640 shortfn(node),
3685 3641 )
3686 3642 )
3687 3643 else:
3688 3644 ui.write(
3689 3645 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3690 3646 % (
3691 3647 i,
3692 3648 r.flags(i),
3693 3649 r.rawsize(i),
3694 3650 r.linkrev(i),
3695 3651 pr[0],
3696 3652 pr[1],
3697 3653 shortfn(node),
3698 3654 )
3699 3655 )
3700 3656
3701 3657
3702 3658 @command(
3703 3659 b'debugrevspec',
3704 3660 [
3705 3661 (
3706 3662 b'',
3707 3663 b'optimize',
3708 3664 None,
3709 3665 _(b'print parsed tree after optimizing (DEPRECATED)'),
3710 3666 ),
3711 3667 (
3712 3668 b'',
3713 3669 b'show-revs',
3714 3670 True,
3715 3671 _(b'print list of result revisions (default)'),
3716 3672 ),
3717 3673 (
3718 3674 b's',
3719 3675 b'show-set',
3720 3676 None,
3721 3677 _(b'print internal representation of result set'),
3722 3678 ),
3723 3679 (
3724 3680 b'p',
3725 3681 b'show-stage',
3726 3682 [],
3727 3683 _(b'print parsed tree at the given stage'),
3728 3684 _(b'NAME'),
3729 3685 ),
3730 3686 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3731 3687 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3732 3688 ],
3733 3689 b'REVSPEC',
3734 3690 )
3735 3691 def debugrevspec(ui, repo, expr, **opts):
3736 3692 """parse and apply a revision specification
3737 3693
3738 3694 Use -p/--show-stage option to print the parsed tree at the given stages.
3739 3695 Use -p all to print tree at every stage.
3740 3696
3741 3697 Use --no-show-revs option with -s or -p to print only the set
3742 3698 representation or the parsed tree respectively.
3743 3699
3744 3700 Use --verify-optimized to compare the optimized result with the unoptimized
3745 3701 one. Returns 1 if the optimized result differs.
3746 3702 """
3747 3703 opts = pycompat.byteskwargs(opts)
3748 3704 aliases = ui.configitems(b'revsetalias')
3749 3705 stages = [
3750 3706 (b'parsed', lambda tree: tree),
3751 3707 (
3752 3708 b'expanded',
3753 3709 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3754 3710 ),
3755 3711 (b'concatenated', revsetlang.foldconcat),
3756 3712 (b'analyzed', revsetlang.analyze),
3757 3713 (b'optimized', revsetlang.optimize),
3758 3714 ]
3759 3715 if opts[b'no_optimized']:
3760 3716 stages = stages[:-1]
3761 3717 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3762 3718 raise error.Abort(
3763 3719 _(b'cannot use --verify-optimized with --no-optimized')
3764 3720 )
3765 3721 stagenames = {n for n, f in stages}
3766 3722
3767 3723 showalways = set()
3768 3724 showchanged = set()
3769 3725 if ui.verbose and not opts[b'show_stage']:
3770 3726 # show parsed tree by --verbose (deprecated)
3771 3727 showalways.add(b'parsed')
3772 3728 showchanged.update([b'expanded', b'concatenated'])
3773 3729 if opts[b'optimize']:
3774 3730 showalways.add(b'optimized')
3775 3731 if opts[b'show_stage'] and opts[b'optimize']:
3776 3732 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3777 3733 if opts[b'show_stage'] == [b'all']:
3778 3734 showalways.update(stagenames)
3779 3735 else:
3780 3736 for n in opts[b'show_stage']:
3781 3737 if n not in stagenames:
3782 3738 raise error.Abort(_(b'invalid stage name: %s') % n)
3783 3739 showalways.update(opts[b'show_stage'])
3784 3740
3785 3741 treebystage = {}
3786 3742 printedtree = None
3787 3743 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3788 3744 for n, f in stages:
3789 3745 treebystage[n] = tree = f(tree)
3790 3746 if n in showalways or (n in showchanged and tree != printedtree):
3791 3747 if opts[b'show_stage'] or n != b'parsed':
3792 3748 ui.write(b"* %s:\n" % n)
3793 3749 ui.write(revsetlang.prettyformat(tree), b"\n")
3794 3750 printedtree = tree
3795 3751
3796 3752 if opts[b'verify_optimized']:
3797 3753 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3798 3754 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3799 3755 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3800 3756 ui.writenoi18n(
3801 3757 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3802 3758 )
3803 3759 ui.writenoi18n(
3804 3760 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3805 3761 )
3806 3762 arevs = list(arevs)
3807 3763 brevs = list(brevs)
3808 3764 if arevs == brevs:
3809 3765 return 0
3810 3766 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3811 3767 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3812 3768 sm = difflib.SequenceMatcher(None, arevs, brevs)
3813 3769 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3814 3770 if tag in ('delete', 'replace'):
3815 3771 for c in arevs[alo:ahi]:
3816 3772 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3817 3773 if tag in ('insert', 'replace'):
3818 3774 for c in brevs[blo:bhi]:
3819 3775 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3820 3776 if tag == 'equal':
3821 3777 for c in arevs[alo:ahi]:
3822 3778 ui.write(b' %d\n' % c)
3823 3779 return 1
3824 3780
3825 3781 func = revset.makematcher(tree)
3826 3782 revs = func(repo)
3827 3783 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3828 3784 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3829 3785 if not opts[b'show_revs']:
3830 3786 return
3831 3787 for c in revs:
3832 3788 ui.write(b"%d\n" % c)
3833 3789
3834 3790
3835 3791 @command(
3836 3792 b'debugserve',
3837 3793 [
3838 3794 (
3839 3795 b'',
3840 3796 b'sshstdio',
3841 3797 False,
3842 3798 _(b'run an SSH server bound to process handles'),
3843 3799 ),
3844 3800 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3845 3801 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3846 3802 ],
3847 3803 b'',
3848 3804 )
3849 3805 def debugserve(ui, repo, **opts):
3850 3806 """run a server with advanced settings
3851 3807
3852 3808 This command is similar to :hg:`serve`. It exists partially as a
3853 3809 workaround to the fact that ``hg serve --stdio`` must have specific
3854 3810 arguments for security reasons.
3855 3811 """
3856 3812 opts = pycompat.byteskwargs(opts)
3857 3813
3858 3814 if not opts[b'sshstdio']:
3859 3815 raise error.Abort(_(b'only --sshstdio is currently supported'))
3860 3816
3861 3817 logfh = None
3862 3818
3863 3819 if opts[b'logiofd'] and opts[b'logiofile']:
3864 3820 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3865 3821
3866 3822 if opts[b'logiofd']:
3867 3823 # Ideally we would be line buffered. But line buffering in binary
3868 3824 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3869 3825 # buffering could have performance impacts. But since this isn't
3870 3826 # performance critical code, it should be fine.
3871 3827 try:
3872 3828 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3873 3829 except OSError as e:
3874 3830 if e.errno != errno.ESPIPE:
3875 3831 raise
3876 3832 # can't seek a pipe, so `ab` mode fails on py3
3877 3833 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3878 3834 elif opts[b'logiofile']:
3879 3835 logfh = open(opts[b'logiofile'], b'ab', 0)
3880 3836
3881 3837 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3882 3838 s.serve_forever()
3883 3839
3884 3840
3885 3841 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3886 3842 def debugsetparents(ui, repo, rev1, rev2=None):
3887 3843 """manually set the parents of the current working directory (DANGEROUS)
3888 3844
3889 3845 This command is not what you are looking for and should not be used. Using
3890 3846 this command will most certainly results in slight corruption of the file
3891 3847 level histories withing your repository. DO NOT USE THIS COMMAND.
3892 3848
3893 3849 The command update the p1 and p2 field in the dirstate, and not touching
3894 3850 anything else. This useful for writing repository conversion tools, but
3895 3851 should be used with extreme care. For example, neither the working
3896 3852 directory nor the dirstate is updated, so file status may be incorrect
3897 3853 after running this command. Only used if you are one of the few people that
3898 3854 deeply unstand both conversion tools and file level histories. If you are
3899 3855 reading this help, you are not one of this people (most of them sailed west
3900 3856 from Mithlond anyway.
3901 3857
3902 3858 So one last time DO NOT USE THIS COMMAND.
3903 3859
3904 3860 Returns 0 on success.
3905 3861 """
3906 3862
3907 3863 node1 = scmutil.revsingle(repo, rev1).node()
3908 3864 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3909 3865
3910 3866 with repo.wlock():
3911 3867 repo.setparents(node1, node2)
3912 3868
3913 3869
3914 3870 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3915 3871 def debugsidedata(ui, repo, file_, rev=None, **opts):
3916 3872 """dump the side data for a cl/manifest/file revision
3917 3873
3918 3874 Use --verbose to dump the sidedata content."""
3919 3875 opts = pycompat.byteskwargs(opts)
3920 3876 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3921 3877 if rev is not None:
3922 3878 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3923 3879 file_, rev = None, file_
3924 3880 elif rev is None:
3925 3881 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3926 3882 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3927 3883 r = getattr(r, '_revlog', r)
3928 3884 try:
3929 3885 sidedata = r.sidedata(r.lookup(rev))
3930 3886 except KeyError:
3931 3887 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3932 3888 if sidedata:
3933 3889 sidedata = list(sidedata.items())
3934 3890 sidedata.sort()
3935 3891 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3936 3892 for key, value in sidedata:
3937 3893 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3938 3894 if ui.verbose:
3939 3895 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3940 3896
3941 3897
3942 3898 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3943 3899 def debugssl(ui, repo, source=None, **opts):
3944 3900 """test a secure connection to a server
3945 3901
3946 3902 This builds the certificate chain for the server on Windows, installing the
3947 3903 missing intermediates and trusted root via Windows Update if necessary. It
3948 3904 does nothing on other platforms.
3949 3905
3950 3906 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3951 3907 that server is used. See :hg:`help urls` for more information.
3952 3908
3953 3909 If the update succeeds, retry the original operation. Otherwise, the cause
3954 3910 of the SSL error is likely another issue.
3955 3911 """
3956 3912 if not pycompat.iswindows:
3957 3913 raise error.Abort(
3958 3914 _(b'certificate chain building is only possible on Windows')
3959 3915 )
3960 3916
3961 3917 if not source:
3962 3918 if not repo:
3963 3919 raise error.Abort(
3964 3920 _(
3965 3921 b"there is no Mercurial repository here, and no "
3966 3922 b"server specified"
3967 3923 )
3968 3924 )
3969 3925 source = b"default"
3970 3926
3971 3927 source, branches = urlutil.get_unique_pull_path(
3972 3928 b'debugssl', repo, ui, source
3973 3929 )
3974 3930 url = urlutil.url(source)
3975 3931
3976 3932 defaultport = {b'https': 443, b'ssh': 22}
3977 3933 if url.scheme in defaultport:
3978 3934 try:
3979 3935 addr = (url.host, int(url.port or defaultport[url.scheme]))
3980 3936 except ValueError:
3981 3937 raise error.Abort(_(b"malformed port number in URL"))
3982 3938 else:
3983 3939 raise error.Abort(_(b"only https and ssh connections are supported"))
3984 3940
3985 3941 from . import win32
3986 3942
3987 3943 s = ssl.wrap_socket(
3988 3944 socket.socket(),
3989 3945 ssl_version=ssl.PROTOCOL_TLS,
3990 3946 cert_reqs=ssl.CERT_NONE,
3991 3947 ca_certs=None,
3992 3948 )
3993 3949
3994 3950 try:
3995 3951 s.connect(addr)
3996 3952 cert = s.getpeercert(True)
3997 3953
3998 3954 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3999 3955
4000 3956 complete = win32.checkcertificatechain(cert, build=False)
4001 3957
4002 3958 if not complete:
4003 3959 ui.status(_(b'certificate chain is incomplete, updating... '))
4004 3960
4005 3961 if not win32.checkcertificatechain(cert):
4006 3962 ui.status(_(b'failed.\n'))
4007 3963 else:
4008 3964 ui.status(_(b'done.\n'))
4009 3965 else:
4010 3966 ui.status(_(b'full certificate chain is available\n'))
4011 3967 finally:
4012 3968 s.close()
4013 3969
4014 3970
4015 3971 @command(
4016 3972 b"debugbackupbundle",
4017 3973 [
4018 3974 (
4019 3975 b"",
4020 3976 b"recover",
4021 3977 b"",
4022 3978 b"brings the specified changeset back into the repository",
4023 3979 )
4024 3980 ]
4025 3981 + cmdutil.logopts,
4026 3982 _(b"hg debugbackupbundle [--recover HASH]"),
4027 3983 )
4028 3984 def debugbackupbundle(ui, repo, *pats, **opts):
4029 3985 """lists the changesets available in backup bundles
4030 3986
4031 3987 Without any arguments, this command prints a list of the changesets in each
4032 3988 backup bundle.
4033 3989
4034 3990 --recover takes a changeset hash and unbundles the first bundle that
4035 3991 contains that hash, which puts that changeset back in your repository.
4036 3992
4037 3993 --verbose will print the entire commit message and the bundle path for that
4038 3994 backup.
4039 3995 """
4040 3996 backups = list(
4041 3997 filter(
4042 3998 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4043 3999 )
4044 4000 )
4045 4001 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4046 4002
4047 4003 opts = pycompat.byteskwargs(opts)
4048 4004 opts[b"bundle"] = b""
4049 4005 opts[b"force"] = None
4050 4006 limit = logcmdutil.getlimit(opts)
4051 4007
4052 4008 def display(other, chlist, displayer):
4053 4009 if opts.get(b"newest_first"):
4054 4010 chlist.reverse()
4055 4011 count = 0
4056 4012 for n in chlist:
4057 4013 if limit is not None and count >= limit:
4058 4014 break
4059 4015 parents = [
4060 4016 True for p in other.changelog.parents(n) if p != repo.nullid
4061 4017 ]
4062 4018 if opts.get(b"no_merges") and len(parents) == 2:
4063 4019 continue
4064 4020 count += 1
4065 4021 displayer.show(other[n])
4066 4022
4067 4023 recovernode = opts.get(b"recover")
4068 4024 if recovernode:
4069 4025 if scmutil.isrevsymbol(repo, recovernode):
4070 4026 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4071 4027 return
4072 4028 elif backups:
4073 4029 msg = _(
4074 4030 b"Recover changesets using: hg debugbackupbundle --recover "
4075 4031 b"<changeset hash>\n\nAvailable backup changesets:"
4076 4032 )
4077 4033 ui.status(msg, label=b"status.removed")
4078 4034 else:
4079 4035 ui.status(_(b"no backup changesets found\n"))
4080 4036 return
4081 4037
4082 4038 for backup in backups:
4083 4039 # Much of this is copied from the hg incoming logic
4084 4040 source = os.path.relpath(backup, encoding.getcwd())
4085 4041 source, branches = urlutil.get_unique_pull_path(
4086 4042 b'debugbackupbundle',
4087 4043 repo,
4088 4044 ui,
4089 4045 source,
4090 4046 default_branches=opts.get(b'branch'),
4091 4047 )
4092 4048 try:
4093 4049 other = hg.peer(repo, opts, source)
4094 4050 except error.LookupError as ex:
4095 4051 msg = _(b"\nwarning: unable to open bundle %s") % source
4096 4052 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4097 4053 ui.warn(msg, hint=hint)
4098 4054 continue
4099 4055 revs, checkout = hg.addbranchrevs(
4100 4056 repo, other, branches, opts.get(b"rev")
4101 4057 )
4102 4058
4103 4059 if revs:
4104 4060 revs = [other.lookup(rev) for rev in revs]
4105 4061
4106 4062 with ui.silent():
4107 4063 try:
4108 4064 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4109 4065 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4110 4066 )
4111 4067 except error.LookupError:
4112 4068 continue
4113 4069
4114 4070 try:
4115 4071 if not chlist:
4116 4072 continue
4117 4073 if recovernode:
4118 4074 with repo.lock(), repo.transaction(b"unbundle") as tr:
4119 4075 if scmutil.isrevsymbol(other, recovernode):
4120 4076 ui.status(_(b"Unbundling %s\n") % (recovernode))
4121 4077 f = hg.openpath(ui, source)
4122 4078 gen = exchange.readbundle(ui, f, source)
4123 4079 if isinstance(gen, bundle2.unbundle20):
4124 4080 bundle2.applybundle(
4125 4081 repo,
4126 4082 gen,
4127 4083 tr,
4128 4084 source=b"unbundle",
4129 4085 url=b"bundle:" + source,
4130 4086 )
4131 4087 else:
4132 4088 gen.apply(repo, b"unbundle", b"bundle:" + source)
4133 4089 break
4134 4090 else:
4135 4091 backupdate = encoding.strtolocal(
4136 4092 time.strftime(
4137 4093 "%a %H:%M, %Y-%m-%d",
4138 4094 time.localtime(os.path.getmtime(source)),
4139 4095 )
4140 4096 )
4141 4097 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4142 4098 if ui.verbose:
4143 4099 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4144 4100 else:
4145 4101 opts[
4146 4102 b"template"
4147 4103 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4148 4104 displayer = logcmdutil.changesetdisplayer(
4149 4105 ui, other, opts, False
4150 4106 )
4151 4107 display(other, chlist, displayer)
4152 4108 displayer.close()
4153 4109 finally:
4154 4110 cleanupfn()
4155 4111
4156 4112
4157 4113 @command(
4158 4114 b'debugsub',
4159 4115 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4160 4116 _(b'[-r REV] [REV]'),
4161 4117 )
4162 4118 def debugsub(ui, repo, rev=None):
4163 4119 ctx = scmutil.revsingle(repo, rev, None)
4164 4120 for k, v in sorted(ctx.substate.items()):
4165 4121 ui.writenoi18n(b'path %s\n' % k)
4166 4122 ui.writenoi18n(b' source %s\n' % v[0])
4167 4123 ui.writenoi18n(b' revision %s\n' % v[1])
4168 4124
4169 4125
4170 4126 @command(b'debugshell', optionalrepo=True)
4171 4127 def debugshell(ui, repo):
4172 4128 """run an interactive Python interpreter
4173 4129
4174 4130 The local namespace is provided with a reference to the ui and
4175 4131 the repo instance (if available).
4176 4132 """
4177 4133 import code
4178 4134
4179 4135 imported_objects = {
4180 4136 'ui': ui,
4181 4137 'repo': repo,
4182 4138 }
4183 4139
4184 4140 code.interact(local=imported_objects)
4185 4141
4186 4142
4187 4143 @command(
4188 4144 b'debugsuccessorssets',
4189 4145 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4190 4146 _(b'[REV]'),
4191 4147 )
4192 4148 def debugsuccessorssets(ui, repo, *revs, **opts):
4193 4149 """show set of successors for revision
4194 4150
4195 4151 A successors set of changeset A is a consistent group of revisions that
4196 4152 succeed A. It contains non-obsolete changesets only unless closests
4197 4153 successors set is set.
4198 4154
4199 4155 In most cases a changeset A has a single successors set containing a single
4200 4156 successor (changeset A replaced by A').
4201 4157
4202 4158 A changeset that is made obsolete with no successors are called "pruned".
4203 4159 Such changesets have no successors sets at all.
4204 4160
4205 4161 A changeset that has been "split" will have a successors set containing
4206 4162 more than one successor.
4207 4163
4208 4164 A changeset that has been rewritten in multiple different ways is called
4209 4165 "divergent". Such changesets have multiple successor sets (each of which
4210 4166 may also be split, i.e. have multiple successors).
4211 4167
4212 4168 Results are displayed as follows::
4213 4169
4214 4170 <rev1>
4215 4171 <successors-1A>
4216 4172 <rev2>
4217 4173 <successors-2A>
4218 4174 <successors-2B1> <successors-2B2> <successors-2B3>
4219 4175
4220 4176 Here rev2 has two possible (i.e. divergent) successors sets. The first
4221 4177 holds one element, whereas the second holds three (i.e. the changeset has
4222 4178 been split).
4223 4179 """
4224 4180 # passed to successorssets caching computation from one call to another
4225 4181 cache = {}
4226 4182 ctx2str = bytes
4227 4183 node2str = short
4228 4184 for rev in logcmdutil.revrange(repo, revs):
4229 4185 ctx = repo[rev]
4230 4186 ui.write(b'%s\n' % ctx2str(ctx))
4231 4187 for succsset in obsutil.successorssets(
4232 4188 repo, ctx.node(), closest=opts['closest'], cache=cache
4233 4189 ):
4234 4190 if succsset:
4235 4191 ui.write(b' ')
4236 4192 ui.write(node2str(succsset[0]))
4237 4193 for node in succsset[1:]:
4238 4194 ui.write(b' ')
4239 4195 ui.write(node2str(node))
4240 4196 ui.write(b'\n')
4241 4197
4242 4198
4243 4199 @command(b'debugtagscache', [])
4244 4200 def debugtagscache(ui, repo):
4245 4201 """display the contents of .hg/cache/hgtagsfnodes1"""
4246 4202 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4247 4203 flog = repo.file(b'.hgtags')
4248 4204 for r in repo:
4249 4205 node = repo[r].node()
4250 4206 tagsnode = cache.getfnode(node, computemissing=False)
4251 4207 if tagsnode:
4252 4208 tagsnodedisplay = hex(tagsnode)
4253 4209 if not flog.hasnode(tagsnode):
4254 4210 tagsnodedisplay += b' (unknown node)'
4255 4211 elif tagsnode is None:
4256 4212 tagsnodedisplay = b'missing'
4257 4213 else:
4258 4214 tagsnodedisplay = b'invalid'
4259 4215
4260 4216 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4261 4217
4262 4218
4263 4219 @command(
4264 4220 b'debugtemplate',
4265 4221 [
4266 4222 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4267 4223 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4268 4224 ],
4269 4225 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4270 4226 optionalrepo=True,
4271 4227 )
4272 4228 def debugtemplate(ui, repo, tmpl, **opts):
4273 4229 """parse and apply a template
4274 4230
4275 4231 If -r/--rev is given, the template is processed as a log template and
4276 4232 applied to the given changesets. Otherwise, it is processed as a generic
4277 4233 template.
4278 4234
4279 4235 Use --verbose to print the parsed tree.
4280 4236 """
4281 4237 revs = None
4282 4238 if opts['rev']:
4283 4239 if repo is None:
4284 4240 raise error.RepoError(
4285 4241 _(b'there is no Mercurial repository here (.hg not found)')
4286 4242 )
4287 4243 revs = logcmdutil.revrange(repo, opts['rev'])
4288 4244
4289 4245 props = {}
4290 4246 for d in opts['define']:
4291 4247 try:
4292 4248 k, v = (e.strip() for e in d.split(b'=', 1))
4293 4249 if not k or k == b'ui':
4294 4250 raise ValueError
4295 4251 props[k] = v
4296 4252 except ValueError:
4297 4253 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4298 4254
4299 4255 if ui.verbose:
4300 4256 aliases = ui.configitems(b'templatealias')
4301 4257 tree = templater.parse(tmpl)
4302 4258 ui.note(templater.prettyformat(tree), b'\n')
4303 4259 newtree = templater.expandaliases(tree, aliases)
4304 4260 if newtree != tree:
4305 4261 ui.notenoi18n(
4306 4262 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4307 4263 )
4308 4264
4309 4265 if revs is None:
4310 4266 tres = formatter.templateresources(ui, repo)
4311 4267 t = formatter.maketemplater(ui, tmpl, resources=tres)
4312 4268 if ui.verbose:
4313 4269 kwds, funcs = t.symbolsuseddefault()
4314 4270 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4315 4271 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4316 4272 ui.write(t.renderdefault(props))
4317 4273 else:
4318 4274 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4319 4275 if ui.verbose:
4320 4276 kwds, funcs = displayer.t.symbolsuseddefault()
4321 4277 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4322 4278 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4323 4279 for r in revs:
4324 4280 displayer.show(repo[r], **pycompat.strkwargs(props))
4325 4281 displayer.close()
4326 4282
4327 4283
4328 4284 @command(
4329 4285 b'debuguigetpass',
4330 4286 [
4331 4287 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4332 4288 ],
4333 4289 _(b'[-p TEXT]'),
4334 4290 norepo=True,
4335 4291 )
4336 4292 def debuguigetpass(ui, prompt=b''):
4337 4293 """show prompt to type password"""
4338 4294 r = ui.getpass(prompt)
4339 4295 if r is None:
4340 4296 r = b"<default response>"
4341 4297 ui.writenoi18n(b'response: %s\n' % r)
4342 4298
4343 4299
4344 4300 @command(
4345 4301 b'debuguiprompt',
4346 4302 [
4347 4303 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4348 4304 ],
4349 4305 _(b'[-p TEXT]'),
4350 4306 norepo=True,
4351 4307 )
4352 4308 def debuguiprompt(ui, prompt=b''):
4353 4309 """show plain prompt"""
4354 4310 r = ui.prompt(prompt)
4355 4311 ui.writenoi18n(b'response: %s\n' % r)
4356 4312
4357 4313
4358 4314 @command(b'debugupdatecaches', [])
4359 4315 def debugupdatecaches(ui, repo, *pats, **opts):
4360 4316 """warm all known caches in the repository"""
4361 4317 with repo.wlock(), repo.lock():
4362 4318 repo.updatecaches(caches=repository.CACHES_ALL)
4363 4319
4364 4320
4365 4321 @command(
4366 4322 b'debugupgraderepo',
4367 4323 [
4368 4324 (
4369 4325 b'o',
4370 4326 b'optimize',
4371 4327 [],
4372 4328 _(b'extra optimization to perform'),
4373 4329 _(b'NAME'),
4374 4330 ),
4375 4331 (b'', b'run', False, _(b'performs an upgrade')),
4376 4332 (b'', b'backup', True, _(b'keep the old repository content around')),
4377 4333 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4378 4334 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4379 4335 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4380 4336 ],
4381 4337 )
4382 4338 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4383 4339 """upgrade a repository to use different features
4384 4340
4385 4341 If no arguments are specified, the repository is evaluated for upgrade
4386 4342 and a list of problems and potential optimizations is printed.
4387 4343
4388 4344 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4389 4345 can be influenced via additional arguments. More details will be provided
4390 4346 by the command output when run without ``--run``.
4391 4347
4392 4348 During the upgrade, the repository will be locked and no writes will be
4393 4349 allowed.
4394 4350
4395 4351 At the end of the upgrade, the repository may not be readable while new
4396 4352 repository data is swapped in. This window will be as long as it takes to
4397 4353 rename some directories inside the ``.hg`` directory. On most machines, this
4398 4354 should complete almost instantaneously and the chances of a consumer being
4399 4355 unable to access the repository should be low.
4400 4356
4401 4357 By default, all revlogs will be upgraded. You can restrict this using flags
4402 4358 such as `--manifest`:
4403 4359
4404 4360 * `--manifest`: only optimize the manifest
4405 4361 * `--no-manifest`: optimize all revlog but the manifest
4406 4362 * `--changelog`: optimize the changelog only
4407 4363 * `--no-changelog --no-manifest`: optimize filelogs only
4408 4364 * `--filelogs`: optimize the filelogs only
4409 4365 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4410 4366 """
4411 4367 return upgrade.upgraderepo(
4412 4368 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4413 4369 )
4414 4370
4415 4371
4416 4372 @command(
4417 4373 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4418 4374 )
4419 4375 def debugwalk(ui, repo, *pats, **opts):
4420 4376 """show how files match on given patterns"""
4421 4377 opts = pycompat.byteskwargs(opts)
4422 4378 m = scmutil.match(repo[None], pats, opts)
4423 4379 if ui.verbose:
4424 4380 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4425 4381 items = list(repo[None].walk(m))
4426 4382 if not items:
4427 4383 return
4428 4384 f = lambda fn: fn
4429 4385 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4430 4386 f = lambda fn: util.normpath(fn)
4431 4387 fmt = b'f %%-%ds %%-%ds %%s' % (
4432 4388 max([len(abs) for abs in items]),
4433 4389 max([len(repo.pathto(abs)) for abs in items]),
4434 4390 )
4435 4391 for abs in items:
4436 4392 line = fmt % (
4437 4393 abs,
4438 4394 f(repo.pathto(abs)),
4439 4395 m.exact(abs) and b'exact' or b'',
4440 4396 )
4441 4397 ui.write(b"%s\n" % line.rstrip())
4442 4398
4443 4399
4444 4400 @command(b'debugwhyunstable', [], _(b'REV'))
4445 4401 def debugwhyunstable(ui, repo, rev):
4446 4402 """explain instabilities of a changeset"""
4447 4403 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4448 4404 dnodes = b''
4449 4405 if entry.get(b'divergentnodes'):
4450 4406 dnodes = (
4451 4407 b' '.join(
4452 4408 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4453 4409 for ctx in entry[b'divergentnodes']
4454 4410 )
4455 4411 + b' '
4456 4412 )
4457 4413 ui.write(
4458 4414 b'%s: %s%s %s\n'
4459 4415 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4460 4416 )
4461 4417
4462 4418
4463 4419 @command(
4464 4420 b'debugwireargs',
4465 4421 [
4466 4422 (b'', b'three', b'', b'three'),
4467 4423 (b'', b'four', b'', b'four'),
4468 4424 (b'', b'five', b'', b'five'),
4469 4425 ]
4470 4426 + cmdutil.remoteopts,
4471 4427 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4472 4428 norepo=True,
4473 4429 )
4474 4430 def debugwireargs(ui, repopath, *vals, **opts):
4475 4431 opts = pycompat.byteskwargs(opts)
4476 4432 repo = hg.peer(ui, opts, repopath)
4477 4433 try:
4478 4434 for opt in cmdutil.remoteopts:
4479 4435 del opts[opt[1]]
4480 4436 args = {}
4481 4437 for k, v in opts.items():
4482 4438 if v:
4483 4439 args[k] = v
4484 4440 args = pycompat.strkwargs(args)
4485 4441 # run twice to check that we don't mess up the stream for the next command
4486 4442 res1 = repo.debugwireargs(*vals, **args)
4487 4443 res2 = repo.debugwireargs(*vals, **args)
4488 4444 ui.write(b"%s\n" % res1)
4489 4445 if res1 != res2:
4490 4446 ui.warn(b"%s\n" % res2)
4491 4447 finally:
4492 4448 repo.close()
4493 4449
4494 4450
4495 4451 def _parsewirelangblocks(fh):
4496 4452 activeaction = None
4497 4453 blocklines = []
4498 4454 lastindent = 0
4499 4455
4500 4456 for line in fh:
4501 4457 line = line.rstrip()
4502 4458 if not line:
4503 4459 continue
4504 4460
4505 4461 if line.startswith(b'#'):
4506 4462 continue
4507 4463
4508 4464 if not line.startswith(b' '):
4509 4465 # New block. Flush previous one.
4510 4466 if activeaction:
4511 4467 yield activeaction, blocklines
4512 4468
4513 4469 activeaction = line
4514 4470 blocklines = []
4515 4471 lastindent = 0
4516 4472 continue
4517 4473
4518 4474 # Else we start with an indent.
4519 4475
4520 4476 if not activeaction:
4521 4477 raise error.Abort(_(b'indented line outside of block'))
4522 4478
4523 4479 indent = len(line) - len(line.lstrip())
4524 4480
4525 4481 # If this line is indented more than the last line, concatenate it.
4526 4482 if indent > lastindent and blocklines:
4527 4483 blocklines[-1] += line.lstrip()
4528 4484 else:
4529 4485 blocklines.append(line)
4530 4486 lastindent = indent
4531 4487
4532 4488 # Flush last block.
4533 4489 if activeaction:
4534 4490 yield activeaction, blocklines
4535 4491
4536 4492
4537 4493 @command(
4538 4494 b'debugwireproto',
4539 4495 [
4540 4496 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4541 4497 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4542 4498 (
4543 4499 b'',
4544 4500 b'noreadstderr',
4545 4501 False,
4546 4502 _(b'do not read from stderr of the remote'),
4547 4503 ),
4548 4504 (
4549 4505 b'',
4550 4506 b'nologhandshake',
4551 4507 False,
4552 4508 _(b'do not log I/O related to the peer handshake'),
4553 4509 ),
4554 4510 ]
4555 4511 + cmdutil.remoteopts,
4556 4512 _(b'[PATH]'),
4557 4513 optionalrepo=True,
4558 4514 )
4559 4515 def debugwireproto(ui, repo, path=None, **opts):
4560 4516 """send wire protocol commands to a server
4561 4517
4562 4518 This command can be used to issue wire protocol commands to remote
4563 4519 peers and to debug the raw data being exchanged.
4564 4520
4565 4521 ``--localssh`` will start an SSH server against the current repository
4566 4522 and connect to that. By default, the connection will perform a handshake
4567 4523 and establish an appropriate peer instance.
4568 4524
4569 4525 ``--peer`` can be used to bypass the handshake protocol and construct a
4570 4526 peer instance using the specified class type. Valid values are ``raw``,
4571 4527 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4572 4528 don't support higher-level command actions.
4573 4529
4574 4530 ``--noreadstderr`` can be used to disable automatic reading from stderr
4575 4531 of the peer (for SSH connections only). Disabling automatic reading of
4576 4532 stderr is useful for making output more deterministic.
4577 4533
4578 4534 Commands are issued via a mini language which is specified via stdin.
4579 4535 The language consists of individual actions to perform. An action is
4580 4536 defined by a block. A block is defined as a line with no leading
4581 4537 space followed by 0 or more lines with leading space. Blocks are
4582 4538 effectively a high-level command with additional metadata.
4583 4539
4584 4540 Lines beginning with ``#`` are ignored.
4585 4541
4586 4542 The following sections denote available actions.
4587 4543
4588 4544 raw
4589 4545 ---
4590 4546
4591 4547 Send raw data to the server.
4592 4548
4593 4549 The block payload contains the raw data to send as one atomic send
4594 4550 operation. The data may not actually be delivered in a single system
4595 4551 call: it depends on the abilities of the transport being used.
4596 4552
4597 4553 Each line in the block is de-indented and concatenated. Then, that
4598 4554 value is evaluated as a Python b'' literal. This allows the use of
4599 4555 backslash escaping, etc.
4600 4556
4601 4557 raw+
4602 4558 ----
4603 4559
4604 4560 Behaves like ``raw`` except flushes output afterwards.
4605 4561
4606 4562 command <X>
4607 4563 -----------
4608 4564
4609 4565 Send a request to run a named command, whose name follows the ``command``
4610 4566 string.
4611 4567
4612 4568 Arguments to the command are defined as lines in this block. The format of
4613 4569 each line is ``<key> <value>``. e.g.::
4614 4570
4615 4571 command listkeys
4616 4572 namespace bookmarks
4617 4573
4618 4574 If the value begins with ``eval:``, it will be interpreted as a Python
4619 4575 literal expression. Otherwise values are interpreted as Python b'' literals.
4620 4576 This allows sending complex types and encoding special byte sequences via
4621 4577 backslash escaping.
4622 4578
4623 4579 The following arguments have special meaning:
4624 4580
4625 4581 ``PUSHFILE``
4626 4582 When defined, the *push* mechanism of the peer will be used instead
4627 4583 of the static request-response mechanism and the content of the
4628 4584 file specified in the value of this argument will be sent as the
4629 4585 command payload.
4630 4586
4631 4587 This can be used to submit a local bundle file to the remote.
4632 4588
4633 4589 batchbegin
4634 4590 ----------
4635 4591
4636 4592 Instruct the peer to begin a batched send.
4637 4593
4638 4594 All ``command`` blocks are queued for execution until the next
4639 4595 ``batchsubmit`` block.
4640 4596
4641 4597 batchsubmit
4642 4598 -----------
4643 4599
4644 4600 Submit previously queued ``command`` blocks as a batch request.
4645 4601
4646 4602 This action MUST be paired with a ``batchbegin`` action.
4647 4603
4648 4604 httprequest <method> <path>
4649 4605 ---------------------------
4650 4606
4651 4607 (HTTP peer only)
4652 4608
4653 4609 Send an HTTP request to the peer.
4654 4610
4655 4611 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4656 4612
4657 4613 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4658 4614 headers to add to the request. e.g. ``Accept: foo``.
4659 4615
4660 4616 The following arguments are special:
4661 4617
4662 4618 ``BODYFILE``
4663 4619 The content of the file defined as the value to this argument will be
4664 4620 transferred verbatim as the HTTP request body.
4665 4621
4666 4622 ``frame <type> <flags> <payload>``
4667 4623 Send a unified protocol frame as part of the request body.
4668 4624
4669 4625 All frames will be collected and sent as the body to the HTTP
4670 4626 request.
4671 4627
4672 4628 close
4673 4629 -----
4674 4630
4675 4631 Close the connection to the server.
4676 4632
4677 4633 flush
4678 4634 -----
4679 4635
4680 4636 Flush data written to the server.
4681 4637
4682 4638 readavailable
4683 4639 -------------
4684 4640
4685 4641 Close the write end of the connection and read all available data from
4686 4642 the server.
4687 4643
4688 4644 If the connection to the server encompasses multiple pipes, we poll both
4689 4645 pipes and read available data.
4690 4646
4691 4647 readline
4692 4648 --------
4693 4649
4694 4650 Read a line of output from the server. If there are multiple output
4695 4651 pipes, reads only the main pipe.
4696 4652
4697 4653 ereadline
4698 4654 ---------
4699 4655
4700 4656 Like ``readline``, but read from the stderr pipe, if available.
4701 4657
4702 4658 read <X>
4703 4659 --------
4704 4660
4705 4661 ``read()`` N bytes from the server's main output pipe.
4706 4662
4707 4663 eread <X>
4708 4664 ---------
4709 4665
4710 4666 ``read()`` N bytes from the server's stderr pipe, if available.
4711 4667
4712 4668 Specifying Unified Frame-Based Protocol Frames
4713 4669 ----------------------------------------------
4714 4670
4715 4671 It is possible to emit a *Unified Frame-Based Protocol* by using special
4716 4672 syntax.
4717 4673
4718 4674 A frame is composed as a type, flags, and payload. These can be parsed
4719 4675 from a string of the form:
4720 4676
4721 4677 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4722 4678
4723 4679 ``request-id`` and ``stream-id`` are integers defining the request and
4724 4680 stream identifiers.
4725 4681
4726 4682 ``type`` can be an integer value for the frame type or the string name
4727 4683 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4728 4684 ``command-name``.
4729 4685
4730 4686 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4731 4687 components. Each component (and there can be just one) can be an integer
4732 4688 or a flag name for stream flags or frame flags, respectively. Values are
4733 4689 resolved to integers and then bitwise OR'd together.
4734 4690
4735 4691 ``payload`` represents the raw frame payload. If it begins with
4736 4692 ``cbor:``, the following string is evaluated as Python code and the
4737 4693 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4738 4694 as a Python byte string literal.
4739 4695 """
4740 4696 opts = pycompat.byteskwargs(opts)
4741 4697
4742 4698 if opts[b'localssh'] and not repo:
4743 4699 raise error.Abort(_(b'--localssh requires a repository'))
4744 4700
4745 4701 if opts[b'peer'] and opts[b'peer'] not in (
4746 4702 b'raw',
4747 4703 b'ssh1',
4748 4704 ):
4749 4705 raise error.Abort(
4750 4706 _(b'invalid value for --peer'),
4751 4707 hint=_(b'valid values are "raw" and "ssh1"'),
4752 4708 )
4753 4709
4754 4710 if path and opts[b'localssh']:
4755 4711 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4756 4712
4757 4713 if ui.interactive():
4758 4714 ui.write(_(b'(waiting for commands on stdin)\n'))
4759 4715
4760 4716 blocks = list(_parsewirelangblocks(ui.fin))
4761 4717
4762 4718 proc = None
4763 4719 stdin = None
4764 4720 stdout = None
4765 4721 stderr = None
4766 4722 opener = None
4767 4723
4768 4724 if opts[b'localssh']:
4769 4725 # We start the SSH server in its own process so there is process
4770 4726 # separation. This prevents a whole class of potential bugs around
4771 4727 # shared state from interfering with server operation.
4772 4728 args = procutil.hgcmd() + [
4773 4729 b'-R',
4774 4730 repo.root,
4775 4731 b'debugserve',
4776 4732 b'--sshstdio',
4777 4733 ]
4778 4734 proc = subprocess.Popen(
4779 4735 pycompat.rapply(procutil.tonativestr, args),
4780 4736 stdin=subprocess.PIPE,
4781 4737 stdout=subprocess.PIPE,
4782 4738 stderr=subprocess.PIPE,
4783 4739 bufsize=0,
4784 4740 )
4785 4741
4786 4742 stdin = proc.stdin
4787 4743 stdout = proc.stdout
4788 4744 stderr = proc.stderr
4789 4745
4790 4746 # We turn the pipes into observers so we can log I/O.
4791 4747 if ui.verbose or opts[b'peer'] == b'raw':
4792 4748 stdin = util.makeloggingfileobject(
4793 4749 ui, proc.stdin, b'i', logdata=True
4794 4750 )
4795 4751 stdout = util.makeloggingfileobject(
4796 4752 ui, proc.stdout, b'o', logdata=True
4797 4753 )
4798 4754 stderr = util.makeloggingfileobject(
4799 4755 ui, proc.stderr, b'e', logdata=True
4800 4756 )
4801 4757
4802 4758 # --localssh also implies the peer connection settings.
4803 4759
4804 4760 url = b'ssh://localserver'
4805 4761 autoreadstderr = not opts[b'noreadstderr']
4806 4762
4807 4763 if opts[b'peer'] == b'ssh1':
4808 4764 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4809 4765 peer = sshpeer.sshv1peer(
4810 4766 ui,
4811 4767 url,
4812 4768 proc,
4813 4769 stdin,
4814 4770 stdout,
4815 4771 stderr,
4816 4772 None,
4817 4773 autoreadstderr=autoreadstderr,
4818 4774 )
4819 4775 elif opts[b'peer'] == b'raw':
4820 4776 ui.write(_(b'using raw connection to peer\n'))
4821 4777 peer = None
4822 4778 else:
4823 4779 ui.write(_(b'creating ssh peer from handshake results\n'))
4824 4780 peer = sshpeer.makepeer(
4825 4781 ui,
4826 4782 url,
4827 4783 proc,
4828 4784 stdin,
4829 4785 stdout,
4830 4786 stderr,
4831 4787 autoreadstderr=autoreadstderr,
4832 4788 )
4833 4789
4834 4790 elif path:
4835 4791 # We bypass hg.peer() so we can proxy the sockets.
4836 4792 # TODO consider not doing this because we skip
4837 4793 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4838 4794 u = urlutil.url(path)
4839 4795 if u.scheme != b'http':
4840 4796 raise error.Abort(_(b'only http:// paths are currently supported'))
4841 4797
4842 4798 url, authinfo = u.authinfo()
4843 4799 openerargs = {
4844 4800 'useragent': b'Mercurial debugwireproto',
4845 4801 }
4846 4802
4847 4803 # Turn pipes/sockets into observers so we can log I/O.
4848 4804 if ui.verbose:
4849 4805 openerargs.update(
4850 4806 {
4851 4807 'loggingfh': ui,
4852 4808 'loggingname': b's',
4853 4809 'loggingopts': {
4854 4810 'logdata': True,
4855 4811 'logdataapis': False,
4856 4812 },
4857 4813 }
4858 4814 )
4859 4815
4860 4816 if ui.debugflag:
4861 4817 openerargs['loggingopts']['logdataapis'] = True
4862 4818
4863 4819 # Don't send default headers when in raw mode. This allows us to
4864 4820 # bypass most of the behavior of our URL handling code so we can
4865 4821 # have near complete control over what's sent on the wire.
4866 4822 if opts[b'peer'] == b'raw':
4867 4823 openerargs['sendaccept'] = False
4868 4824
4869 4825 opener = urlmod.opener(ui, authinfo, **openerargs)
4870 4826
4871 4827 if opts[b'peer'] == b'raw':
4872 4828 ui.write(_(b'using raw connection to peer\n'))
4873 4829 peer = None
4874 4830 elif opts[b'peer']:
4875 4831 raise error.Abort(
4876 4832 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4877 4833 )
4878 4834 else:
4879 4835 peer = httppeer.makepeer(ui, path, opener=opener)
4880 4836
4881 4837 # We /could/ populate stdin/stdout with sock.makefile()...
4882 4838 else:
4883 4839 raise error.Abort(_(b'unsupported connection configuration'))
4884 4840
4885 4841 batchedcommands = None
4886 4842
4887 4843 # Now perform actions based on the parsed wire language instructions.
4888 4844 for action, lines in blocks:
4889 4845 if action in (b'raw', b'raw+'):
4890 4846 if not stdin:
4891 4847 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4892 4848
4893 4849 # Concatenate the data together.
4894 4850 data = b''.join(l.lstrip() for l in lines)
4895 4851 data = stringutil.unescapestr(data)
4896 4852 stdin.write(data)
4897 4853
4898 4854 if action == b'raw+':
4899 4855 stdin.flush()
4900 4856 elif action == b'flush':
4901 4857 if not stdin:
4902 4858 raise error.Abort(_(b'cannot call flush on this peer'))
4903 4859 stdin.flush()
4904 4860 elif action.startswith(b'command'):
4905 4861 if not peer:
4906 4862 raise error.Abort(
4907 4863 _(
4908 4864 b'cannot send commands unless peer instance '
4909 4865 b'is available'
4910 4866 )
4911 4867 )
4912 4868
4913 4869 command = action.split(b' ', 1)[1]
4914 4870
4915 4871 args = {}
4916 4872 for line in lines:
4917 4873 # We need to allow empty values.
4918 4874 fields = line.lstrip().split(b' ', 1)
4919 4875 if len(fields) == 1:
4920 4876 key = fields[0]
4921 4877 value = b''
4922 4878 else:
4923 4879 key, value = fields
4924 4880
4925 4881 if value.startswith(b'eval:'):
4926 4882 value = stringutil.evalpythonliteral(value[5:])
4927 4883 else:
4928 4884 value = stringutil.unescapestr(value)
4929 4885
4930 4886 args[key] = value
4931 4887
4932 4888 if batchedcommands is not None:
4933 4889 batchedcommands.append((command, args))
4934 4890 continue
4935 4891
4936 4892 ui.status(_(b'sending %s command\n') % command)
4937 4893
4938 4894 if b'PUSHFILE' in args:
4939 4895 with open(args[b'PUSHFILE'], 'rb') as fh:
4940 4896 del args[b'PUSHFILE']
4941 4897 res, output = peer._callpush(
4942 4898 command, fh, **pycompat.strkwargs(args)
4943 4899 )
4944 4900 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4945 4901 ui.status(
4946 4902 _(b'remote output: %s\n') % stringutil.escapestr(output)
4947 4903 )
4948 4904 else:
4949 4905 with peer.commandexecutor() as e:
4950 4906 res = e.callcommand(command, args).result()
4951 4907
4952 4908 ui.status(
4953 4909 _(b'response: %s\n')
4954 4910 % stringutil.pprint(res, bprefix=True, indent=2)
4955 4911 )
4956 4912
4957 4913 elif action == b'batchbegin':
4958 4914 if batchedcommands is not None:
4959 4915 raise error.Abort(_(b'nested batchbegin not allowed'))
4960 4916
4961 4917 batchedcommands = []
4962 4918 elif action == b'batchsubmit':
4963 4919 # There is a batching API we could go through. But it would be
4964 4920 # difficult to normalize requests into function calls. It is easier
4965 4921 # to bypass this layer and normalize to commands + args.
4966 4922 ui.status(
4967 4923 _(b'sending batch with %d sub-commands\n')
4968 4924 % len(batchedcommands)
4969 4925 )
4970 4926 assert peer is not None
4971 4927 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4972 4928 ui.status(
4973 4929 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4974 4930 )
4975 4931
4976 4932 batchedcommands = None
4977 4933
4978 4934 elif action.startswith(b'httprequest '):
4979 4935 if not opener:
4980 4936 raise error.Abort(
4981 4937 _(b'cannot use httprequest without an HTTP peer')
4982 4938 )
4983 4939
4984 4940 request = action.split(b' ', 2)
4985 4941 if len(request) != 3:
4986 4942 raise error.Abort(
4987 4943 _(
4988 4944 b'invalid httprequest: expected format is '
4989 4945 b'"httprequest <method> <path>'
4990 4946 )
4991 4947 )
4992 4948
4993 4949 method, httppath = request[1:]
4994 4950 headers = {}
4995 4951 body = None
4996 4952 frames = []
4997 4953 for line in lines:
4998 4954 line = line.lstrip()
4999 4955 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
5000 4956 if m:
5001 4957 # Headers need to use native strings.
5002 4958 key = pycompat.strurl(m.group(1))
5003 4959 value = pycompat.strurl(m.group(2))
5004 4960 headers[key] = value
5005 4961 continue
5006 4962
5007 4963 if line.startswith(b'BODYFILE '):
5008 4964 with open(line.split(b' ', 1), b'rb') as fh:
5009 4965 body = fh.read()
5010 4966 elif line.startswith(b'frame '):
5011 4967 frame = wireprotoframing.makeframefromhumanstring(
5012 4968 line[len(b'frame ') :]
5013 4969 )
5014 4970
5015 4971 frames.append(frame)
5016 4972 else:
5017 4973 raise error.Abort(
5018 4974 _(b'unknown argument to httprequest: %s') % line
5019 4975 )
5020 4976
5021 4977 url = path + httppath
5022 4978
5023 4979 if frames:
5024 4980 body = b''.join(bytes(f) for f in frames)
5025 4981
5026 4982 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
5027 4983
5028 4984 # urllib.Request insists on using has_data() as a proxy for
5029 4985 # determining the request method. Override that to use our
5030 4986 # explicitly requested method.
5031 4987 req.get_method = lambda: pycompat.sysstr(method)
5032 4988
5033 4989 try:
5034 4990 res = opener.open(req)
5035 4991 body = res.read()
5036 4992 except util.urlerr.urlerror as e:
5037 4993 # read() method must be called, but only exists in Python 2
5038 4994 getattr(e, 'read', lambda: None)()
5039 4995 continue
5040 4996
5041 4997 ct = res.headers.get('Content-Type')
5042 4998 if ct == 'application/mercurial-cbor':
5043 4999 ui.write(
5044 5000 _(b'cbor> %s\n')
5045 5001 % stringutil.pprint(
5046 5002 cborutil.decodeall(body), bprefix=True, indent=2
5047 5003 )
5048 5004 )
5049 5005
5050 5006 elif action == b'close':
5051 5007 assert peer is not None
5052 5008 peer.close()
5053 5009 elif action == b'readavailable':
5054 5010 if not stdout or not stderr:
5055 5011 raise error.Abort(
5056 5012 _(b'readavailable not available on this peer')
5057 5013 )
5058 5014
5059 5015 stdin.close()
5060 5016 stdout.read()
5061 5017 stderr.read()
5062 5018
5063 5019 elif action == b'readline':
5064 5020 if not stdout:
5065 5021 raise error.Abort(_(b'readline not available on this peer'))
5066 5022 stdout.readline()
5067 5023 elif action == b'ereadline':
5068 5024 if not stderr:
5069 5025 raise error.Abort(_(b'ereadline not available on this peer'))
5070 5026 stderr.readline()
5071 5027 elif action.startswith(b'read '):
5072 5028 count = int(action.split(b' ', 1)[1])
5073 5029 if not stdout:
5074 5030 raise error.Abort(_(b'read not available on this peer'))
5075 5031 stdout.read(count)
5076 5032 elif action.startswith(b'eread '):
5077 5033 count = int(action.split(b' ', 1)[1])
5078 5034 if not stderr:
5079 5035 raise error.Abort(_(b'eread not available on this peer'))
5080 5036 stderr.read(count)
5081 5037 else:
5082 5038 raise error.Abort(_(b'unknown action: %s') % action)
5083 5039
5084 5040 if batchedcommands is not None:
5085 5041 raise error.Abort(_(b'unclosed "batchbegin" request'))
5086 5042
5087 5043 if peer:
5088 5044 peer.close()
5089 5045
5090 5046 if proc:
5091 5047 proc.kill()
@@ -1,218 +1,269
1 1 # revlogutils/debug.py - utility used for revlog debuging
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 # Copyright 2022 Octobus <contact@octobus.net>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from .. import (
10 10 node as nodemod,
11 11 )
12 12
13 13 from . import (
14 14 constants,
15 15 )
16 16
17 17 INDEX_ENTRY_DEBUG_COLUMN = []
18 18
19 19 NODE_SIZE = object()
20 20
21 21
22 22 class _column_base:
23 23 """constains the definition of a revlog column
24 24
25 25 name: the column header,
26 26 value_func: the function called to get a value,
27 27 size: the width of the column,
28 28 verbose_only: only include the column in verbose mode.
29 29 """
30 30
31 31 def __init__(self, name, value_func, size=None, verbose=False):
32 32 self.name = name
33 33 self.value_func = value_func
34 34 if size is not NODE_SIZE:
35 35 if size is None:
36 36 size = 8 # arbitrary default
37 37 size = max(len(name), size)
38 38 self._size = size
39 39 self.verbose_only = verbose
40 40
41 41 def get_size(self, node_size):
42 42 if self._size is NODE_SIZE:
43 43 return node_size
44 44 else:
45 45 return self._size
46 46
47 47
48 48 def debug_column(name, size=None, verbose=False):
49 49 """decorated function is registered as a column
50 50
51 51 name: the name of the column,
52 52 size: the expected size of the column.
53 53 """
54 54
55 55 def register(func):
56 56 entry = _column_base(
57 57 name=name,
58 58 value_func=func,
59 59 size=size,
60 60 verbose=verbose,
61 61 )
62 62 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
63 63 return entry
64 64
65 65 return register
66 66
67 67
68 68 @debug_column(b"rev", size=6)
69 69 def _rev(index, rev, entry, hexfn):
70 70 return b"%d" % rev
71 71
72 72
73 73 @debug_column(b"rank", size=6, verbose=True)
74 74 def rank(index, rev, entry, hexfn):
75 75 return b"%d" % entry[constants.ENTRY_RANK]
76 76
77 77
78 78 @debug_column(b"linkrev", size=6)
79 79 def _linkrev(index, rev, entry, hexfn):
80 80 return b"%d" % entry[constants.ENTRY_LINK_REV]
81 81
82 82
83 83 @debug_column(b"nodeid", size=NODE_SIZE)
84 84 def _nodeid(index, rev, entry, hexfn):
85 85 return hexfn(entry[constants.ENTRY_NODE_ID])
86 86
87 87
88 88 @debug_column(b"p1-rev", size=6, verbose=True)
89 89 def _p1_rev(index, rev, entry, hexfn):
90 90 return b"%d" % entry[constants.ENTRY_PARENT_1]
91 91
92 92
93 93 @debug_column(b"p1-nodeid", size=NODE_SIZE)
94 94 def _p1_node(index, rev, entry, hexfn):
95 95 parent = entry[constants.ENTRY_PARENT_1]
96 96 p_entry = index[parent]
97 97 return hexfn(p_entry[constants.ENTRY_NODE_ID])
98 98
99 99
100 100 @debug_column(b"p2-rev", size=6, verbose=True)
101 101 def _p2_rev(index, rev, entry, hexfn):
102 102 return b"%d" % entry[constants.ENTRY_PARENT_2]
103 103
104 104
105 105 @debug_column(b"p2-nodeid", size=NODE_SIZE)
106 106 def _p2_node(index, rev, entry, hexfn):
107 107 parent = entry[constants.ENTRY_PARENT_2]
108 108 p_entry = index[parent]
109 109 return hexfn(p_entry[constants.ENTRY_NODE_ID])
110 110
111 111
112 112 @debug_column(b"full-size", size=20, verbose=True)
113 113 def full_size(index, rev, entry, hexfn):
114 114 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
115 115
116 116
117 117 @debug_column(b"delta-base", size=6, verbose=True)
118 118 def delta_base(index, rev, entry, hexfn):
119 119 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
120 120
121 121
122 122 @debug_column(b"flags", size=2, verbose=True)
123 123 def flags(index, rev, entry, hexfn):
124 124 field = entry[constants.ENTRY_DATA_OFFSET]
125 125 field &= 0xFFFF
126 126 return b"%d" % field
127 127
128 128
129 129 @debug_column(b"comp-mode", size=4, verbose=True)
130 130 def compression_mode(index, rev, entry, hexfn):
131 131 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
132 132
133 133
134 134 @debug_column(b"data-offset", size=20, verbose=True)
135 135 def data_offset(index, rev, entry, hexfn):
136 136 field = entry[constants.ENTRY_DATA_OFFSET]
137 137 field >>= 16
138 138 return b"%d" % field
139 139
140 140
141 141 @debug_column(b"chunk-size", size=10, verbose=True)
142 142 def data_chunk_size(index, rev, entry, hexfn):
143 143 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
144 144
145 145
146 146 @debug_column(b"sd-comp-mode", size=7, verbose=True)
147 147 def sidedata_compression_mode(index, rev, entry, hexfn):
148 148 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
149 149 if compression == constants.COMP_MODE_PLAIN:
150 150 return b"plain"
151 151 elif compression == constants.COMP_MODE_DEFAULT:
152 152 return b"default"
153 153 elif compression == constants.COMP_MODE_INLINE:
154 154 return b"inline"
155 155 else:
156 156 return b"%d" % compression
157 157
158 158
159 159 @debug_column(b"sidedata-offset", size=20, verbose=True)
160 160 def sidedata_offset(index, rev, entry, hexfn):
161 161 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
162 162
163 163
164 164 @debug_column(b"sd-chunk-size", size=10, verbose=True)
165 165 def sidedata_chunk_size(index, rev, entry, hexfn):
166 166 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
167 167
168 168
169 169 def debug_index(
170 170 ui,
171 171 repo,
172 172 formatter,
173 173 revlog,
174 174 full_node,
175 175 ):
176 176 """display index data for a revlog"""
177 177 if full_node:
178 178 hexfn = nodemod.hex
179 179 else:
180 180 hexfn = nodemod.short
181 181
182 182 idlen = 12
183 183 for i in revlog:
184 184 idlen = len(hexfn(revlog.node(i)))
185 185 break
186 186
187 187 fm = formatter
188 188
189 189 header_pieces = []
190 190 for column in INDEX_ENTRY_DEBUG_COLUMN:
191 191 if column.verbose_only and not ui.verbose:
192 192 continue
193 193 size = column.get_size(idlen)
194 194 name = column.name
195 195 header_pieces.append(name.rjust(size))
196 196
197 197 fm.plain(b' '.join(header_pieces) + b'\n')
198 198
199 199 index = revlog.index
200 200
201 201 for rev in revlog:
202 202 fm.startitem()
203 203 entry = index[rev]
204 204 first = True
205 205 for column in INDEX_ENTRY_DEBUG_COLUMN:
206 206 if column.verbose_only and not ui.verbose:
207 207 continue
208 208 if not first:
209 209 fm.plain(b' ')
210 210 first = False
211 211
212 212 size = column.get_size(idlen)
213 213 value = column.value_func(index, rev, entry, hexfn)
214 214 display = b"%%%ds" % size
215 215 fm.write(column.name, display, value)
216 216 fm.plain(b'\n')
217 217
218 218 fm.end()
219
220
221 def dump(ui, revlog):
222 """perform the work for `hg debugrevlog --dump"""
223 # XXX seems redundant with debug index ?
224 r = revlog
225 numrevs = len(r)
226 ui.write(
227 (
228 b"# rev p1rev p2rev start end deltastart base p1 p2"
229 b" rawsize totalsize compression heads chainlen\n"
230 )
231 )
232 ts = 0
233 heads = set()
234
235 for rev in range(numrevs):
236 dbase = r.deltaparent(rev)
237 if dbase == -1:
238 dbase = rev
239 cbase = r.chainbase(rev)
240 clen = r.chainlen(rev)
241 p1, p2 = r.parentrevs(rev)
242 rs = r.rawsize(rev)
243 ts = ts + rs
244 heads -= set(r.parentrevs(rev))
245 heads.add(rev)
246 try:
247 compression = ts / r.end(rev)
248 except ZeroDivisionError:
249 compression = 0
250 ui.write(
251 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
252 b"%11d %5d %8d\n"
253 % (
254 rev,
255 p1,
256 p2,
257 r.start(rev),
258 r.end(rev),
259 r.start(dbase),
260 r.start(cbase),
261 r.start(p1),
262 r.start(p2),
263 rs,
264 ts,
265 compression,
266 len(heads),
267 clen,
268 )
269 )
General Comments 0
You need to be logged in to leave comments. Login now