##// END OF EJS Templates
debugindex: move to a flexible column...
marmoute -
r50148:a3213042 default
parent child Browse files
Show More
@@ -1,5032 +1,5034 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revlogutils,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 constants as revlog_constants,
108 108 debug as revlog_debug,
109 109 deltas as deltautil,
110 110 nodemap,
111 111 rewrite,
112 112 sidedata,
113 113 )
114 114
115 115 release = lockmod.release
116 116
117 117 table = {}
118 118 table.update(strip.command._table)
119 119 command = registrar.command(table)
120 120
121 121
122 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 123 def debugancestor(ui, repo, *args):
124 124 """find the ancestor revision of two revisions in a given index"""
125 125 if len(args) == 3:
126 126 index, rev1, rev2 = args
127 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 128 lookup = r.lookup
129 129 elif len(args) == 2:
130 130 if not repo:
131 131 raise error.Abort(
132 132 _(b'there is no Mercurial repository here (.hg not found)')
133 133 )
134 134 rev1, rev2 = args
135 135 r = repo.changelog
136 136 lookup = repo.lookup
137 137 else:
138 138 raise error.Abort(_(b'either two or three arguments required'))
139 139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 141
142 142
143 143 @command(b'debugantivirusrunning', [])
144 144 def debugantivirusrunning(ui, repo):
145 145 """attempt to trigger an antivirus scanner to see if one is active"""
146 146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 147 f.write(
148 148 util.b85decode(
149 149 # This is a base85-armored version of the EICAR test file. See
150 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 153 )
154 154 )
155 155 # Give an AV engine time to scan the file.
156 156 time.sleep(2)
157 157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 158
159 159
160 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 161 def debugapplystreamclonebundle(ui, repo, fname):
162 162 """apply a stream clone bundle file"""
163 163 f = hg.openpath(ui, fname)
164 164 gen = exchange.readbundle(ui, f, fname)
165 165 gen.apply(repo)
166 166
167 167
168 168 @command(
169 169 b'debugbuilddag',
170 170 [
171 171 (
172 172 b'm',
173 173 b'mergeable-file',
174 174 None,
175 175 _(b'add single file mergeable changes'),
176 176 ),
177 177 (
178 178 b'o',
179 179 b'overwritten-file',
180 180 None,
181 181 _(b'add single file all revs overwrite'),
182 182 ),
183 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 184 (
185 185 b'',
186 186 b'from-existing',
187 187 None,
188 188 _(b'continue from a non-empty repository'),
189 189 ),
190 190 ],
191 191 _(b'[OPTION]... [TEXT]'),
192 192 )
193 193 def debugbuilddag(
194 194 ui,
195 195 repo,
196 196 text=None,
197 197 mergeable_file=False,
198 198 overwritten_file=False,
199 199 new_file=False,
200 200 from_existing=False,
201 201 ):
202 202 """builds a repo with a given DAG from scratch in the current empty repo
203 203
204 204 The description of the DAG is read from stdin if not given on the
205 205 command line.
206 206
207 207 Elements:
208 208
209 209 - "+n" is a linear run of n nodes based on the current default parent
210 210 - "." is a single node based on the current default parent
211 211 - "$" resets the default parent to null (implied at the start);
212 212 otherwise the default parent is always the last node created
213 213 - "<p" sets the default parent to the backref p
214 214 - "*p" is a fork at parent p, which is a backref
215 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 216 - "/p2" is a merge of the preceding node and p2
217 217 - ":tag" defines a local tag for the preceding node
218 218 - "@branch" sets the named branch for subsequent nodes
219 219 - "#...\\n" is a comment up to the end of the line
220 220
221 221 Whitespace between the above elements is ignored.
222 222
223 223 A backref is either
224 224
225 225 - a number n, which references the node curr-n, where curr is the current
226 226 node, or
227 227 - the name of a local tag you placed earlier using ":tag", or
228 228 - empty to denote the default parent.
229 229
230 230 All string valued-elements are either strictly alphanumeric, or must
231 231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 232 """
233 233
234 234 if text is None:
235 235 ui.status(_(b"reading DAG from stdin\n"))
236 236 text = ui.fin.read()
237 237
238 238 cl = repo.changelog
239 239 if len(cl) > 0 and not from_existing:
240 240 raise error.Abort(_(b'repository is not empty'))
241 241
242 242 # determine number of revs in DAG
243 243 total = 0
244 244 for type, data in dagparser.parsedag(text):
245 245 if type == b'n':
246 246 total += 1
247 247
248 248 if mergeable_file:
249 249 linesperrev = 2
250 250 # make a file with k lines per rev
251 251 initialmergedlines = [
252 252 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
253 253 ]
254 254 initialmergedlines.append(b"")
255 255
256 256 tags = []
257 257 progress = ui.makeprogress(
258 258 _(b'building'), unit=_(b'revisions'), total=total
259 259 )
260 260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
261 261 at = -1
262 262 atbranch = b'default'
263 263 nodeids = []
264 264 id = 0
265 265 progress.update(id)
266 266 for type, data in dagparser.parsedag(text):
267 267 if type == b'n':
268 268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
269 269 id, ps = data
270 270
271 271 files = []
272 272 filecontent = {}
273 273
274 274 p2 = None
275 275 if mergeable_file:
276 276 fn = b"mf"
277 277 p1 = repo[ps[0]]
278 278 if len(ps) > 1:
279 279 p2 = repo[ps[1]]
280 280 pa = p1.ancestor(p2)
281 281 base, local, other = [
282 282 x[fn].data() for x in (pa, p1, p2)
283 283 ]
284 284 m3 = simplemerge.Merge3Text(base, local, other)
285 285 ml = [
286 286 l.strip()
287 287 for l in simplemerge.render_minimized(m3)[0]
288 288 ]
289 289 ml.append(b"")
290 290 elif at > 0:
291 291 ml = p1[fn].data().split(b"\n")
292 292 else:
293 293 ml = initialmergedlines
294 294 ml[id * linesperrev] += b" r%i" % id
295 295 mergedtext = b"\n".join(ml)
296 296 files.append(fn)
297 297 filecontent[fn] = mergedtext
298 298
299 299 if overwritten_file:
300 300 fn = b"of"
301 301 files.append(fn)
302 302 filecontent[fn] = b"r%i\n" % id
303 303
304 304 if new_file:
305 305 fn = b"nf%i" % id
306 306 files.append(fn)
307 307 filecontent[fn] = b"r%i\n" % id
308 308 if len(ps) > 1:
309 309 if not p2:
310 310 p2 = repo[ps[1]]
311 311 for fn in p2:
312 312 if fn.startswith(b"nf"):
313 313 files.append(fn)
314 314 filecontent[fn] = p2[fn].data()
315 315
316 316 def fctxfn(repo, cx, path):
317 317 if path in filecontent:
318 318 return context.memfilectx(
319 319 repo, cx, path, filecontent[path]
320 320 )
321 321 return None
322 322
323 323 if len(ps) == 0 or ps[0] < 0:
324 324 pars = [None, None]
325 325 elif len(ps) == 1:
326 326 pars = [nodeids[ps[0]], None]
327 327 else:
328 328 pars = [nodeids[p] for p in ps]
329 329 cx = context.memctx(
330 330 repo,
331 331 pars,
332 332 b"r%i" % id,
333 333 files,
334 334 fctxfn,
335 335 date=(id, 0),
336 336 user=b"debugbuilddag",
337 337 extra={b'branch': atbranch},
338 338 )
339 339 nodeid = repo.commitctx(cx)
340 340 nodeids.append(nodeid)
341 341 at = id
342 342 elif type == b'l':
343 343 id, name = data
344 344 ui.note((b'tag %s\n' % name))
345 345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
346 346 elif type == b'a':
347 347 ui.note((b'branch %s\n' % data))
348 348 atbranch = data
349 349 progress.update(id)
350 350
351 351 if tags:
352 352 repo.vfs.write(b"localtags", b"".join(tags))
353 353
354 354
355 355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
356 356 indent_string = b' ' * indent
357 357 if all:
358 358 ui.writenoi18n(
359 359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
360 360 % indent_string
361 361 )
362 362
363 363 def showchunks(named):
364 364 ui.write(b"\n%s%s\n" % (indent_string, named))
365 365 for deltadata in gen.deltaiter():
366 366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
367 367 ui.write(
368 368 b"%s%s %s %s %s %s %d\n"
369 369 % (
370 370 indent_string,
371 371 hex(node),
372 372 hex(p1),
373 373 hex(p2),
374 374 hex(cs),
375 375 hex(deltabase),
376 376 len(delta),
377 377 )
378 378 )
379 379
380 380 gen.changelogheader()
381 381 showchunks(b"changelog")
382 382 gen.manifestheader()
383 383 showchunks(b"manifest")
384 384 for chunkdata in iter(gen.filelogheader, {}):
385 385 fname = chunkdata[b'filename']
386 386 showchunks(fname)
387 387 else:
388 388 if isinstance(gen, bundle2.unbundle20):
389 389 raise error.Abort(_(b'use debugbundle2 for this file'))
390 390 gen.changelogheader()
391 391 for deltadata in gen.deltaiter():
392 392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
393 393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
394 394
395 395
396 396 def _debugobsmarkers(ui, part, indent=0, **opts):
397 397 """display version and markers contained in 'data'"""
398 398 opts = pycompat.byteskwargs(opts)
399 399 data = part.read()
400 400 indent_string = b' ' * indent
401 401 try:
402 402 version, markers = obsolete._readmarkers(data)
403 403 except error.UnknownVersion as exc:
404 404 msg = b"%sunsupported version: %s (%d bytes)\n"
405 405 msg %= indent_string, exc.version, len(data)
406 406 ui.write(msg)
407 407 else:
408 408 msg = b"%sversion: %d (%d bytes)\n"
409 409 msg %= indent_string, version, len(data)
410 410 ui.write(msg)
411 411 fm = ui.formatter(b'debugobsolete', opts)
412 412 for rawmarker in sorted(markers):
413 413 m = obsutil.marker(None, rawmarker)
414 414 fm.startitem()
415 415 fm.plain(indent_string)
416 416 cmdutil.showmarker(fm, m)
417 417 fm.end()
418 418
419 419
420 420 def _debugphaseheads(ui, data, indent=0):
421 421 """display version and markers contained in 'data'"""
422 422 indent_string = b' ' * indent
423 423 headsbyphase = phases.binarydecode(data)
424 424 for phase in phases.allphases:
425 425 for head in headsbyphase[phase]:
426 426 ui.write(indent_string)
427 427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
428 428
429 429
430 430 def _quasirepr(thing):
431 431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
432 432 return b'{%s}' % (
433 433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
434 434 )
435 435 return pycompat.bytestr(repr(thing))
436 436
437 437
438 438 def _debugbundle2(ui, gen, all=None, **opts):
439 439 """lists the contents of a bundle2"""
440 440 if not isinstance(gen, bundle2.unbundle20):
441 441 raise error.Abort(_(b'not a bundle2 file'))
442 442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
443 443 parttypes = opts.get('part_type', [])
444 444 for part in gen.iterparts():
445 445 if parttypes and part.type not in parttypes:
446 446 continue
447 447 msg = b'%s -- %s (mandatory: %r)\n'
448 448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
449 449 if part.type == b'changegroup':
450 450 version = part.params.get(b'version', b'01')
451 451 cg = changegroup.getunbundler(version, part, b'UN')
452 452 if not ui.quiet:
453 453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
454 454 if part.type == b'obsmarkers':
455 455 if not ui.quiet:
456 456 _debugobsmarkers(ui, part, indent=4, **opts)
457 457 if part.type == b'phase-heads':
458 458 if not ui.quiet:
459 459 _debugphaseheads(ui, part, indent=4)
460 460
461 461
462 462 @command(
463 463 b'debugbundle',
464 464 [
465 465 (b'a', b'all', None, _(b'show all details')),
466 466 (b'', b'part-type', [], _(b'show only the named part type')),
467 467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
468 468 ],
469 469 _(b'FILE'),
470 470 norepo=True,
471 471 )
472 472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
473 473 """lists the contents of a bundle"""
474 474 with hg.openpath(ui, bundlepath) as f:
475 475 if spec:
476 476 spec = exchange.getbundlespec(ui, f)
477 477 ui.write(b'%s\n' % spec)
478 478 return
479 479
480 480 gen = exchange.readbundle(ui, f, bundlepath)
481 481 if isinstance(gen, bundle2.unbundle20):
482 482 return _debugbundle2(ui, gen, all=all, **opts)
483 483 _debugchangegroup(ui, gen, all=all, **opts)
484 484
485 485
486 486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
487 487 def debugcapabilities(ui, path, **opts):
488 488 """lists the capabilities of a remote peer"""
489 489 opts = pycompat.byteskwargs(opts)
490 490 peer = hg.peer(ui, opts, path)
491 491 try:
492 492 caps = peer.capabilities()
493 493 ui.writenoi18n(b'Main capabilities:\n')
494 494 for c in sorted(caps):
495 495 ui.write(b' %s\n' % c)
496 496 b2caps = bundle2.bundle2caps(peer)
497 497 if b2caps:
498 498 ui.writenoi18n(b'Bundle2 capabilities:\n')
499 499 for key, values in sorted(b2caps.items()):
500 500 ui.write(b' %s\n' % key)
501 501 for v in values:
502 502 ui.write(b' %s\n' % v)
503 503 finally:
504 504 peer.close()
505 505
506 506
507 507 @command(
508 508 b'debugchangedfiles',
509 509 [
510 510 (
511 511 b'',
512 512 b'compute',
513 513 False,
514 514 b"compute information instead of reading it from storage",
515 515 ),
516 516 ],
517 517 b'REV',
518 518 )
519 519 def debugchangedfiles(ui, repo, rev, **opts):
520 520 """list the stored files changes for a revision"""
521 521 ctx = logcmdutil.revsingle(repo, rev, None)
522 522 files = None
523 523
524 524 if opts['compute']:
525 525 files = metadata.compute_all_files_changes(ctx)
526 526 else:
527 527 sd = repo.changelog.sidedata(ctx.rev())
528 528 files_block = sd.get(sidedata.SD_FILES)
529 529 if files_block is not None:
530 530 files = metadata.decode_files_sidedata(sd)
531 531 if files is not None:
532 532 for f in sorted(files.touched):
533 533 if f in files.added:
534 534 action = b"added"
535 535 elif f in files.removed:
536 536 action = b"removed"
537 537 elif f in files.merged:
538 538 action = b"merged"
539 539 elif f in files.salvaged:
540 540 action = b"salvaged"
541 541 else:
542 542 action = b"touched"
543 543
544 544 copy_parent = b""
545 545 copy_source = b""
546 546 if f in files.copied_from_p1:
547 547 copy_parent = b"p1"
548 548 copy_source = files.copied_from_p1[f]
549 549 elif f in files.copied_from_p2:
550 550 copy_parent = b"p2"
551 551 copy_source = files.copied_from_p2[f]
552 552
553 553 data = (action, copy_parent, f, copy_source)
554 554 template = b"%-8s %2s: %s, %s;\n"
555 555 ui.write(template % data)
556 556
557 557
558 558 @command(b'debugcheckstate', [], b'')
559 559 def debugcheckstate(ui, repo):
560 560 """validate the correctness of the current dirstate"""
561 561 parent1, parent2 = repo.dirstate.parents()
562 562 m1 = repo[parent1].manifest()
563 563 m2 = repo[parent2].manifest()
564 564 errors = 0
565 565 for err in repo.dirstate.verify(m1, m2):
566 566 ui.warn(err[0] % err[1:])
567 567 errors += 1
568 568 if errors:
569 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 570 raise error.Abort(errstr)
571 571
572 572
573 573 @command(
574 574 b'debugcolor',
575 575 [(b'', b'style', None, _(b'show all configured styles'))],
576 576 b'hg debugcolor',
577 577 )
578 578 def debugcolor(ui, repo, **opts):
579 579 """show available color, effects or style"""
580 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 581 if opts.get('style'):
582 582 return _debugdisplaystyle(ui)
583 583 else:
584 584 return _debugdisplaycolor(ui)
585 585
586 586
587 587 def _debugdisplaycolor(ui):
588 588 ui = ui.copy()
589 589 ui._styles.clear()
590 590 for effect in color._activeeffects(ui).keys():
591 591 ui._styles[effect] = effect
592 592 if ui._terminfoparams:
593 593 for k, v in ui.configitems(b'color'):
594 594 if k.startswith(b'color.'):
595 595 ui._styles[k] = k[6:]
596 596 elif k.startswith(b'terminfo.'):
597 597 ui._styles[k] = k[9:]
598 598 ui.write(_(b'available colors:\n'))
599 599 # sort label with a '_' after the other to group '_background' entry.
600 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 601 for colorname, label in items:
602 602 ui.write(b'%s\n' % colorname, label=label)
603 603
604 604
605 605 def _debugdisplaystyle(ui):
606 606 ui.write(_(b'available style:\n'))
607 607 if not ui._styles:
608 608 return
609 609 width = max(len(s) for s in ui._styles)
610 610 for label, effects in sorted(ui._styles.items()):
611 611 ui.write(b'%s' % label, label=label)
612 612 if effects:
613 613 # 50
614 614 ui.write(b': ')
615 615 ui.write(b' ' * (max(0, width - len(label))))
616 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 617 ui.write(b'\n')
618 618
619 619
620 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 621 def debugcreatestreamclonebundle(ui, repo, fname):
622 622 """create a stream clone bundle file
623 623
624 624 Stream bundles are special bundles that are essentially archives of
625 625 revlog files. They are commonly used for cloning very quickly.
626 626 """
627 627 # TODO we may want to turn this into an abort when this functionality
628 628 # is moved into `hg bundle`.
629 629 if phases.hassecret(repo):
630 630 ui.warn(
631 631 _(
632 632 b'(warning: stream clone bundle will contain secret '
633 633 b'revisions)\n'
634 634 )
635 635 )
636 636
637 637 requirements, gen = streamclone.generatebundlev1(repo)
638 638 changegroup.writechunks(ui, gen, fname)
639 639
640 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641 641
642 642
643 643 @command(
644 644 b'debugdag',
645 645 [
646 646 (b't', b'tags', None, _(b'use tags as labels')),
647 647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 648 (b'', b'dots', None, _(b'use dots for runs')),
649 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 650 ],
651 651 _(b'[OPTION]... [FILE [REV]...]'),
652 652 optionalrepo=True,
653 653 )
654 654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 655 """format the changelog or an index DAG as a concise textual description
656 656
657 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 658 revision numbers, they get labeled in the output as rN.
659 659
660 660 Otherwise, the changelog DAG of the current repo is emitted.
661 661 """
662 662 spaces = opts.get('spaces')
663 663 dots = opts.get('dots')
664 664 if file_:
665 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 666 revs = {int(r) for r in revs}
667 667
668 668 def events():
669 669 for r in rlog:
670 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 671 if r in revs:
672 672 yield b'l', (r, b"r%i" % r)
673 673
674 674 elif repo:
675 675 cl = repo.changelog
676 676 tags = opts.get('tags')
677 677 branches = opts.get('branches')
678 678 if tags:
679 679 labels = {}
680 680 for l, n in repo.tags().items():
681 681 labels.setdefault(cl.rev(n), []).append(l)
682 682
683 683 def events():
684 684 b = b"default"
685 685 for r in cl:
686 686 if branches:
687 687 newb = cl.read(cl.node(r))[5][b'branch']
688 688 if newb != b:
689 689 yield b'a', newb
690 690 b = newb
691 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 692 if tags:
693 693 ls = labels.get(r)
694 694 if ls:
695 695 for l in ls:
696 696 yield b'l', (r, l)
697 697
698 698 else:
699 699 raise error.Abort(_(b'need repo for changelog dag'))
700 700
701 701 for line in dagparser.dagtextlines(
702 702 events(),
703 703 addspaces=spaces,
704 704 wraplabels=True,
705 705 wrapannotations=True,
706 706 wrapnonlinear=dots,
707 707 usedots=dots,
708 708 maxlinewidth=70,
709 709 ):
710 710 ui.write(line)
711 711 ui.write(b"\n")
712 712
713 713
714 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 715 def debugdata(ui, repo, file_, rev=None, **opts):
716 716 """dump the contents of a data file revision"""
717 717 opts = pycompat.byteskwargs(opts)
718 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 719 if rev is not None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 file_, rev = None, file_
722 722 elif rev is None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 725 try:
726 726 ui.write(r.rawdata(r.lookup(rev)))
727 727 except KeyError:
728 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729 729
730 730
731 731 @command(
732 732 b'debugdate',
733 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 734 _(b'[-e] DATE [RANGE]'),
735 735 norepo=True,
736 736 optionalrepo=True,
737 737 )
738 738 def debugdate(ui, date, range=None, **opts):
739 739 """parse and display a date"""
740 740 if opts["extended"]:
741 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 742 else:
743 743 d = dateutil.parsedate(date)
744 744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 746 if range:
747 747 m = dateutil.matchdate(range)
748 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749 749
750 750
751 751 @command(
752 752 b'debugdeltachain',
753 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 754 _(b'-c|-m|FILE'),
755 755 optionalrepo=True,
756 756 )
757 757 def debugdeltachain(ui, repo, file_=None, **opts):
758 758 """dump information about delta chains in a revlog
759 759
760 760 Output can be templatized. Available template keywords are:
761 761
762 762 :``rev``: revision number
763 763 :``p1``: parent 1 revision number (for reference)
764 764 :``p2``: parent 2 revision number (for reference)
765 765 :``chainid``: delta chain identifier (numbered by unique base)
766 766 :``chainlen``: delta chain length to this revision
767 767 :``prevrev``: previous revision in delta chain
768 768 :``deltatype``: role of delta / how it was computed
769 769 - base: a full snapshot
770 770 - snap: an intermediate snapshot
771 771 - p1: a delta against the first parent
772 772 - p2: a delta against the second parent
773 773 - skip1: a delta against the same base as p1
774 774 (when p1 has empty delta
775 775 - skip2: a delta against the same base as p2
776 776 (when p2 has empty delta
777 777 - prev: a delta against the previous revision
778 778 - other: a delta against an arbitrary revision
779 779 :``compsize``: compressed size of revision
780 780 :``uncompsize``: uncompressed size of revision
781 781 :``chainsize``: total size of compressed revisions in chain
782 782 :``chainratio``: total chain size divided by uncompressed revision size
783 783 (new delta chains typically start at ratio 2.00)
784 784 :``lindist``: linear distance from base revision in delta chain to end
785 785 of this revision
786 786 :``extradist``: total size of revisions not part of this delta chain from
787 787 base of delta chain to end of this revision; a measurement
788 788 of how much extra data we need to read/seek across to read
789 789 the delta chain for this revision
790 790 :``extraratio``: extradist divided by chainsize; another representation of
791 791 how much unrelated data is needed to load this delta chain
792 792
793 793 If the repository is configured to use the sparse read, additional keywords
794 794 are available:
795 795
796 796 :``readsize``: total size of data read from the disk for a revision
797 797 (sum of the sizes of all the blocks)
798 798 :``largestblock``: size of the largest block of data read from the disk
799 799 :``readdensity``: density of useful bytes in the data read from the disk
800 800 :``srchunks``: in how many data hunks the whole revision would be read
801 801
802 802 The sparse read can be enabled with experimental.sparse-read = True
803 803 """
804 804 opts = pycompat.byteskwargs(opts)
805 805 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
806 806 index = r.index
807 807 start = r.start
808 808 length = r.length
809 809 generaldelta = r._generaldelta
810 810 withsparseread = getattr(r, '_withsparseread', False)
811 811
812 812 # security to avoid crash on corrupted revlogs
813 813 total_revs = len(index)
814 814
815 815 def revinfo(rev):
816 816 e = index[rev]
817 817 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
818 818 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
819 819 chainsize = 0
820 820
821 821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824 824
825 825 # If the parents of a revision has an empty delta, we never try to delta
826 826 # against that parent, but directly against the delta base of that
827 827 # parent (recursively). It avoids adding a useless entry in the chain.
828 828 #
829 829 # However we need to detect that as a special case for delta-type, that
830 830 # is not simply "other".
831 831 p1_base = p1
832 832 if p1 != nullrev and p1 < total_revs:
833 833 e1 = index[p1]
834 834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 836 if (
837 837 new_base == p1_base
838 838 or new_base == nullrev
839 839 or new_base >= total_revs
840 840 ):
841 841 break
842 842 p1_base = new_base
843 843 e1 = index[p1_base]
844 844 p2_base = p2
845 845 if p2 != nullrev and p2 < total_revs:
846 846 e2 = index[p2]
847 847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 849 if (
850 850 new_base == p2_base
851 851 or new_base == nullrev
852 852 or new_base >= total_revs
853 853 ):
854 854 break
855 855 p2_base = new_base
856 856 e2 = index[p2_base]
857 857
858 858 if generaldelta:
859 859 if base == p1:
860 860 deltatype = b'p1'
861 861 elif base == p2:
862 862 deltatype = b'p2'
863 863 elif base == rev:
864 864 deltatype = b'base'
865 865 elif base == p1_base:
866 866 deltatype = b'skip1'
867 867 elif base == p2_base:
868 868 deltatype = b'skip2'
869 869 elif r.issnapshot(rev):
870 870 deltatype = b'snap'
871 871 elif base == rev - 1:
872 872 deltatype = b'prev'
873 873 else:
874 874 deltatype = b'other'
875 875 else:
876 876 if base == rev:
877 877 deltatype = b'base'
878 878 else:
879 879 deltatype = b'prev'
880 880
881 881 chain = r._deltachain(rev)[0]
882 882 for iterrev in chain:
883 883 e = index[iterrev]
884 884 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
885 885
886 886 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
887 887
888 888 fm = ui.formatter(b'debugdeltachain', opts)
889 889
890 890 fm.plain(
891 891 b' rev p1 p2 chain# chainlen prev delta '
892 892 b'size rawsize chainsize ratio lindist extradist '
893 893 b'extraratio'
894 894 )
895 895 if withsparseread:
896 896 fm.plain(b' readsize largestblk rddensity srchunks')
897 897 fm.plain(b'\n')
898 898
899 899 chainbases = {}
900 900 for rev in r:
901 901 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
902 902 chainbase = chain[0]
903 903 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
904 904 basestart = start(chainbase)
905 905 revstart = start(rev)
906 906 lineardist = revstart + comp - basestart
907 907 extradist = lineardist - chainsize
908 908 try:
909 909 prevrev = chain[-2]
910 910 except IndexError:
911 911 prevrev = -1
912 912
913 913 if uncomp != 0:
914 914 chainratio = float(chainsize) / float(uncomp)
915 915 else:
916 916 chainratio = chainsize
917 917
918 918 if chainsize != 0:
919 919 extraratio = float(extradist) / float(chainsize)
920 920 else:
921 921 extraratio = extradist
922 922
923 923 fm.startitem()
924 924 fm.write(
925 925 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
926 926 b'uncompsize chainsize chainratio lindist extradist '
927 927 b'extraratio',
928 928 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
929 929 rev,
930 930 p1,
931 931 p2,
932 932 chainid,
933 933 len(chain),
934 934 prevrev,
935 935 deltatype,
936 936 comp,
937 937 uncomp,
938 938 chainsize,
939 939 chainratio,
940 940 lineardist,
941 941 extradist,
942 942 extraratio,
943 943 rev=rev,
944 944 chainid=chainid,
945 945 chainlen=len(chain),
946 946 prevrev=prevrev,
947 947 deltatype=deltatype,
948 948 compsize=comp,
949 949 uncompsize=uncomp,
950 950 chainsize=chainsize,
951 951 chainratio=chainratio,
952 952 lindist=lineardist,
953 953 extradist=extradist,
954 954 extraratio=extraratio,
955 955 )
956 956 if withsparseread:
957 957 readsize = 0
958 958 largestblock = 0
959 959 srchunks = 0
960 960
961 961 for revschunk in deltautil.slicechunk(r, chain):
962 962 srchunks += 1
963 963 blkend = start(revschunk[-1]) + length(revschunk[-1])
964 964 blksize = blkend - start(revschunk[0])
965 965
966 966 readsize += blksize
967 967 if largestblock < blksize:
968 968 largestblock = blksize
969 969
970 970 if readsize:
971 971 readdensity = float(chainsize) / float(readsize)
972 972 else:
973 973 readdensity = 1
974 974
975 975 fm.write(
976 976 b'readsize largestblock readdensity srchunks',
977 977 b' %10d %10d %9.5f %8d',
978 978 readsize,
979 979 largestblock,
980 980 readdensity,
981 981 srchunks,
982 982 readsize=readsize,
983 983 largestblock=largestblock,
984 984 readdensity=readdensity,
985 985 srchunks=srchunks,
986 986 )
987 987
988 988 fm.plain(b'\n')
989 989
990 990 fm.end()
991 991
992 992
993 993 @command(
994 994 b'debug-delta-find',
995 995 cmdutil.debugrevlogopts + cmdutil.formatteropts,
996 996 _(b'-c|-m|FILE REV'),
997 997 optionalrepo=True,
998 998 )
999 999 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
1000 1000 """display the computation to get to a valid delta for storing REV
1001 1001
1002 1002 This command will replay the process used to find the "best" delta to store
1003 1003 a revision and display information about all the steps used to get to that
1004 1004 result.
1005 1005
1006 1006 The revision use the revision number of the target storage (not changelog
1007 1007 revision number).
1008 1008
1009 1009 note: the process is initiated from a full text of the revision to store.
1010 1010 """
1011 1011 opts = pycompat.byteskwargs(opts)
1012 1012 if arg_2 is None:
1013 1013 file_ = None
1014 1014 rev = arg_1
1015 1015 else:
1016 1016 file_ = arg_1
1017 1017 rev = arg_2
1018 1018
1019 1019 rev = int(rev)
1020 1020
1021 1021 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1022 1022
1023 1023 deltacomputer = deltautil.deltacomputer(
1024 1024 revlog,
1025 1025 write_debug=ui.write,
1026 1026 debug_search=True,
1027 1027 )
1028 1028
1029 1029 node = revlog.node(rev)
1030 1030 p1r, p2r = revlog.parentrevs(rev)
1031 1031 p1 = revlog.node(p1r)
1032 1032 p2 = revlog.node(p2r)
1033 1033 btext = [revlog.revision(rev)]
1034 1034 textlen = len(btext[0])
1035 1035 cachedelta = None
1036 1036 flags = revlog.flags(rev)
1037 1037
1038 1038 revinfo = revlogutils.revisioninfo(
1039 1039 node,
1040 1040 p1,
1041 1041 p2,
1042 1042 btext,
1043 1043 textlen,
1044 1044 cachedelta,
1045 1045 flags,
1046 1046 )
1047 1047
1048 1048 fh = revlog._datafp()
1049 1049 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1050 1050
1051 1051
1052 1052 @command(
1053 1053 b'debugdirstate|debugstate',
1054 1054 [
1055 1055 (
1056 1056 b'',
1057 1057 b'nodates',
1058 1058 None,
1059 1059 _(b'do not display the saved mtime (DEPRECATED)'),
1060 1060 ),
1061 1061 (b'', b'dates', True, _(b'display the saved mtime')),
1062 1062 (b'', b'datesort', None, _(b'sort by saved mtime')),
1063 1063 (
1064 1064 b'',
1065 1065 b'docket',
1066 1066 False,
1067 1067 _(b'display the docket (metadata file) instead'),
1068 1068 ),
1069 1069 (
1070 1070 b'',
1071 1071 b'all',
1072 1072 False,
1073 1073 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1074 1074 ),
1075 1075 ],
1076 1076 _(b'[OPTION]...'),
1077 1077 )
1078 1078 def debugstate(ui, repo, **opts):
1079 1079 """show the contents of the current dirstate"""
1080 1080
1081 1081 if opts.get("docket"):
1082 1082 if not repo.dirstate._use_dirstate_v2:
1083 1083 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1084 1084
1085 1085 docket = repo.dirstate._map.docket
1086 1086 (
1087 1087 start_offset,
1088 1088 root_nodes,
1089 1089 nodes_with_entry,
1090 1090 nodes_with_copy,
1091 1091 unused_bytes,
1092 1092 _unused,
1093 1093 ignore_pattern,
1094 1094 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1095 1095
1096 1096 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1097 1097 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1098 1098 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1099 1099 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1100 1100 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1101 1101 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1102 1102 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1103 1103 ui.write(
1104 1104 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1105 1105 )
1106 1106 return
1107 1107
1108 1108 nodates = not opts['dates']
1109 1109 if opts.get('nodates') is not None:
1110 1110 nodates = True
1111 1111 datesort = opts.get('datesort')
1112 1112
1113 1113 if datesort:
1114 1114
1115 1115 def keyfunc(entry):
1116 1116 filename, _state, _mode, _size, mtime = entry
1117 1117 return (mtime, filename)
1118 1118
1119 1119 else:
1120 1120 keyfunc = None # sort by filename
1121 1121 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1122 1122 entries.sort(key=keyfunc)
1123 1123 for entry in entries:
1124 1124 filename, state, mode, size, mtime = entry
1125 1125 if mtime == -1:
1126 1126 timestr = b'unset '
1127 1127 elif nodates:
1128 1128 timestr = b'set '
1129 1129 else:
1130 1130 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1131 1131 timestr = encoding.strtolocal(timestr)
1132 1132 if mode & 0o20000:
1133 1133 mode = b'lnk'
1134 1134 else:
1135 1135 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1136 1136 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1137 1137 for f in repo.dirstate.copies():
1138 1138 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1139 1139
1140 1140
1141 1141 @command(
1142 1142 b'debugdirstateignorepatternshash',
1143 1143 [],
1144 1144 _(b''),
1145 1145 )
1146 1146 def debugdirstateignorepatternshash(ui, repo, **opts):
1147 1147 """show the hash of ignore patterns stored in dirstate if v2,
1148 1148 or nothing for dirstate-v2
1149 1149 """
1150 1150 if repo.dirstate._use_dirstate_v2:
1151 1151 docket = repo.dirstate._map.docket
1152 1152 hash_len = 20 # 160 bits for SHA-1
1153 1153 hash_bytes = docket.tree_metadata[-hash_len:]
1154 1154 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1155 1155
1156 1156
1157 1157 @command(
1158 1158 b'debugdiscovery',
1159 1159 [
1160 1160 (b'', b'old', None, _(b'use old-style discovery')),
1161 1161 (
1162 1162 b'',
1163 1163 b'nonheads',
1164 1164 None,
1165 1165 _(b'use old-style discovery with non-heads included'),
1166 1166 ),
1167 1167 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1168 1168 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1169 1169 (
1170 1170 b'',
1171 1171 b'local-as-revs',
1172 1172 b"",
1173 1173 b'treat local has having these revisions only',
1174 1174 ),
1175 1175 (
1176 1176 b'',
1177 1177 b'remote-as-revs',
1178 1178 b"",
1179 1179 b'use local as remote, with only these revisions',
1180 1180 ),
1181 1181 ]
1182 1182 + cmdutil.remoteopts
1183 1183 + cmdutil.formatteropts,
1184 1184 _(b'[--rev REV] [OTHER]'),
1185 1185 )
1186 1186 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1187 1187 """runs the changeset discovery protocol in isolation
1188 1188
1189 1189 The local peer can be "replaced" by a subset of the local repository by
1190 1190 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1191 1191 be "replaced" by a subset of the local repository using the
1192 1192 `--local-as-revs` flag. This is useful to efficiently debug pathological
1193 1193 discovery situation.
1194 1194
1195 1195 The following developer oriented config are relevant for people playing with this command:
1196 1196
1197 1197 * devel.discovery.exchange-heads=True
1198 1198
1199 1199 If False, the discovery will not start with
1200 1200 remote head fetching and local head querying.
1201 1201
1202 1202 * devel.discovery.grow-sample=True
1203 1203
1204 1204 If False, the sample size used in set discovery will not be increased
1205 1205 through the process
1206 1206
1207 1207 * devel.discovery.grow-sample.dynamic=True
1208 1208
1209 1209 When discovery.grow-sample.dynamic is True, the default, the sample size is
1210 1210 adapted to the shape of the undecided set (it is set to the max of:
1211 1211 <target-size>, len(roots(undecided)), len(heads(undecided)
1212 1212
1213 1213 * devel.discovery.grow-sample.rate=1.05
1214 1214
1215 1215 the rate at which the sample grow
1216 1216
1217 1217 * devel.discovery.randomize=True
1218 1218
1219 1219 If andom sampling during discovery are deterministic. It is meant for
1220 1220 integration tests.
1221 1221
1222 1222 * devel.discovery.sample-size=200
1223 1223
1224 1224 Control the initial size of the discovery sample
1225 1225
1226 1226 * devel.discovery.sample-size.initial=100
1227 1227
1228 1228 Control the initial size of the discovery for initial change
1229 1229 """
1230 1230 opts = pycompat.byteskwargs(opts)
1231 1231 unfi = repo.unfiltered()
1232 1232
1233 1233 # setup potential extra filtering
1234 1234 local_revs = opts[b"local_as_revs"]
1235 1235 remote_revs = opts[b"remote_as_revs"]
1236 1236
1237 1237 # make sure tests are repeatable
1238 1238 random.seed(int(opts[b'seed']))
1239 1239
1240 1240 if not remote_revs:
1241 1241
1242 1242 remoteurl, branches = urlutil.get_unique_pull_path(
1243 1243 b'debugdiscovery', repo, ui, remoteurl
1244 1244 )
1245 1245 remote = hg.peer(repo, opts, remoteurl)
1246 1246 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1247 1247 else:
1248 1248 branches = (None, [])
1249 1249 remote_filtered_revs = logcmdutil.revrange(
1250 1250 unfi, [b"not (::(%s))" % remote_revs]
1251 1251 )
1252 1252 remote_filtered_revs = frozenset(remote_filtered_revs)
1253 1253
1254 1254 def remote_func(x):
1255 1255 return remote_filtered_revs
1256 1256
1257 1257 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1258 1258
1259 1259 remote = repo.peer()
1260 1260 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1261 1261
1262 1262 if local_revs:
1263 1263 local_filtered_revs = logcmdutil.revrange(
1264 1264 unfi, [b"not (::(%s))" % local_revs]
1265 1265 )
1266 1266 local_filtered_revs = frozenset(local_filtered_revs)
1267 1267
1268 1268 def local_func(x):
1269 1269 return local_filtered_revs
1270 1270
1271 1271 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1272 1272 repo = repo.filtered(b'debug-discovery-local-filter')
1273 1273
1274 1274 data = {}
1275 1275 if opts.get(b'old'):
1276 1276
1277 1277 def doit(pushedrevs, remoteheads, remote=remote):
1278 1278 if not util.safehasattr(remote, b'branches'):
1279 1279 # enable in-client legacy support
1280 1280 remote = localrepo.locallegacypeer(remote.local())
1281 1281 common, _in, hds = treediscovery.findcommonincoming(
1282 1282 repo, remote, force=True, audit=data
1283 1283 )
1284 1284 common = set(common)
1285 1285 if not opts.get(b'nonheads'):
1286 1286 ui.writenoi18n(
1287 1287 b"unpruned common: %s\n"
1288 1288 % b" ".join(sorted(short(n) for n in common))
1289 1289 )
1290 1290
1291 1291 clnode = repo.changelog.node
1292 1292 common = repo.revs(b'heads(::%ln)', common)
1293 1293 common = {clnode(r) for r in common}
1294 1294 return common, hds
1295 1295
1296 1296 else:
1297 1297
1298 1298 def doit(pushedrevs, remoteheads, remote=remote):
1299 1299 nodes = None
1300 1300 if pushedrevs:
1301 1301 revs = logcmdutil.revrange(repo, pushedrevs)
1302 1302 nodes = [repo[r].node() for r in revs]
1303 1303 common, any, hds = setdiscovery.findcommonheads(
1304 1304 ui, repo, remote, ancestorsof=nodes, audit=data
1305 1305 )
1306 1306 return common, hds
1307 1307
1308 1308 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1309 1309 localrevs = opts[b'rev']
1310 1310
1311 1311 fm = ui.formatter(b'debugdiscovery', opts)
1312 1312 if fm.strict_format:
1313 1313
1314 1314 @contextlib.contextmanager
1315 1315 def may_capture_output():
1316 1316 ui.pushbuffer()
1317 1317 yield
1318 1318 data[b'output'] = ui.popbuffer()
1319 1319
1320 1320 else:
1321 1321 may_capture_output = util.nullcontextmanager
1322 1322 with may_capture_output():
1323 1323 with util.timedcm('debug-discovery') as t:
1324 1324 common, hds = doit(localrevs, remoterevs)
1325 1325
1326 1326 # compute all statistics
1327 1327 heads_common = set(common)
1328 1328 heads_remote = set(hds)
1329 1329 heads_local = set(repo.heads())
1330 1330 # note: they cannot be a local or remote head that is in common and not
1331 1331 # itself a head of common.
1332 1332 heads_common_local = heads_common & heads_local
1333 1333 heads_common_remote = heads_common & heads_remote
1334 1334 heads_common_both = heads_common & heads_remote & heads_local
1335 1335
1336 1336 all = repo.revs(b'all()')
1337 1337 common = repo.revs(b'::%ln', common)
1338 1338 roots_common = repo.revs(b'roots(::%ld)', common)
1339 1339 missing = repo.revs(b'not ::%ld', common)
1340 1340 heads_missing = repo.revs(b'heads(%ld)', missing)
1341 1341 roots_missing = repo.revs(b'roots(%ld)', missing)
1342 1342 assert len(common) + len(missing) == len(all)
1343 1343
1344 1344 initial_undecided = repo.revs(
1345 1345 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1346 1346 )
1347 1347 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1348 1348 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1349 1349 common_initial_undecided = initial_undecided & common
1350 1350 missing_initial_undecided = initial_undecided & missing
1351 1351
1352 1352 data[b'elapsed'] = t.elapsed
1353 1353 data[b'nb-common-heads'] = len(heads_common)
1354 1354 data[b'nb-common-heads-local'] = len(heads_common_local)
1355 1355 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1356 1356 data[b'nb-common-heads-both'] = len(heads_common_both)
1357 1357 data[b'nb-common-roots'] = len(roots_common)
1358 1358 data[b'nb-head-local'] = len(heads_local)
1359 1359 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1360 1360 data[b'nb-head-remote'] = len(heads_remote)
1361 1361 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1362 1362 heads_common_remote
1363 1363 )
1364 1364 data[b'nb-revs'] = len(all)
1365 1365 data[b'nb-revs-common'] = len(common)
1366 1366 data[b'nb-revs-missing'] = len(missing)
1367 1367 data[b'nb-missing-heads'] = len(heads_missing)
1368 1368 data[b'nb-missing-roots'] = len(roots_missing)
1369 1369 data[b'nb-ini_und'] = len(initial_undecided)
1370 1370 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1371 1371 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1372 1372 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1373 1373 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1374 1374
1375 1375 fm.startitem()
1376 1376 fm.data(**pycompat.strkwargs(data))
1377 1377 # display discovery summary
1378 1378 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1379 1379 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1380 1380 fm.plain(b"queries: %(total-queries)9d\n" % data)
1381 1381 fm.plain(b"heads summary:\n")
1382 1382 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1383 1383 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1384 1384 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1385 1385 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1386 1386 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1387 1387 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1388 1388 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1389 1389 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1390 1390 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1391 1391 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1392 1392 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1393 1393 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1394 1394 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1395 1395 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1396 1396 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1397 1397 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1398 1398 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1399 1399 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1400 1400 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1401 1401 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1402 1402 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1403 1403 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1404 1404
1405 1405 if ui.verbose:
1406 1406 fm.plain(
1407 1407 b"common heads: %s\n"
1408 1408 % b" ".join(sorted(short(n) for n in heads_common))
1409 1409 )
1410 1410 fm.end()
1411 1411
1412 1412
1413 1413 _chunksize = 4 << 10
1414 1414
1415 1415
1416 1416 @command(
1417 1417 b'debugdownload',
1418 1418 [
1419 1419 (b'o', b'output', b'', _(b'path')),
1420 1420 ],
1421 1421 optionalrepo=True,
1422 1422 )
1423 1423 def debugdownload(ui, repo, url, output=None, **opts):
1424 1424 """download a resource using Mercurial logic and config"""
1425 1425 fh = urlmod.open(ui, url, output)
1426 1426
1427 1427 dest = ui
1428 1428 if output:
1429 1429 dest = open(output, b"wb", _chunksize)
1430 1430 try:
1431 1431 data = fh.read(_chunksize)
1432 1432 while data:
1433 1433 dest.write(data)
1434 1434 data = fh.read(_chunksize)
1435 1435 finally:
1436 1436 if output:
1437 1437 dest.close()
1438 1438
1439 1439
1440 1440 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1441 1441 def debugextensions(ui, repo, **opts):
1442 1442 '''show information about active extensions'''
1443 1443 opts = pycompat.byteskwargs(opts)
1444 1444 exts = extensions.extensions(ui)
1445 1445 hgver = util.version()
1446 1446 fm = ui.formatter(b'debugextensions', opts)
1447 1447 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1448 1448 isinternal = extensions.ismoduleinternal(extmod)
1449 1449 extsource = None
1450 1450
1451 1451 if util.safehasattr(extmod, '__file__'):
1452 1452 extsource = pycompat.fsencode(extmod.__file__)
1453 1453 elif getattr(sys, 'oxidized', False):
1454 1454 extsource = pycompat.sysexecutable
1455 1455 if isinternal:
1456 1456 exttestedwith = [] # never expose magic string to users
1457 1457 else:
1458 1458 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1459 1459 extbuglink = getattr(extmod, 'buglink', None)
1460 1460
1461 1461 fm.startitem()
1462 1462
1463 1463 if ui.quiet or ui.verbose:
1464 1464 fm.write(b'name', b'%s\n', extname)
1465 1465 else:
1466 1466 fm.write(b'name', b'%s', extname)
1467 1467 if isinternal or hgver in exttestedwith:
1468 1468 fm.plain(b'\n')
1469 1469 elif not exttestedwith:
1470 1470 fm.plain(_(b' (untested!)\n'))
1471 1471 else:
1472 1472 lasttestedversion = exttestedwith[-1]
1473 1473 fm.plain(b' (%s!)\n' % lasttestedversion)
1474 1474
1475 1475 fm.condwrite(
1476 1476 ui.verbose and extsource,
1477 1477 b'source',
1478 1478 _(b' location: %s\n'),
1479 1479 extsource or b"",
1480 1480 )
1481 1481
1482 1482 if ui.verbose:
1483 1483 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1484 1484 fm.data(bundled=isinternal)
1485 1485
1486 1486 fm.condwrite(
1487 1487 ui.verbose and exttestedwith,
1488 1488 b'testedwith',
1489 1489 _(b' tested with: %s\n'),
1490 1490 fm.formatlist(exttestedwith, name=b'ver'),
1491 1491 )
1492 1492
1493 1493 fm.condwrite(
1494 1494 ui.verbose and extbuglink,
1495 1495 b'buglink',
1496 1496 _(b' bug reporting: %s\n'),
1497 1497 extbuglink or b"",
1498 1498 )
1499 1499
1500 1500 fm.end()
1501 1501
1502 1502
1503 1503 @command(
1504 1504 b'debugfileset',
1505 1505 [
1506 1506 (
1507 1507 b'r',
1508 1508 b'rev',
1509 1509 b'',
1510 1510 _(b'apply the filespec on this revision'),
1511 1511 _(b'REV'),
1512 1512 ),
1513 1513 (
1514 1514 b'',
1515 1515 b'all-files',
1516 1516 False,
1517 1517 _(b'test files from all revisions and working directory'),
1518 1518 ),
1519 1519 (
1520 1520 b's',
1521 1521 b'show-matcher',
1522 1522 None,
1523 1523 _(b'print internal representation of matcher'),
1524 1524 ),
1525 1525 (
1526 1526 b'p',
1527 1527 b'show-stage',
1528 1528 [],
1529 1529 _(b'print parsed tree at the given stage'),
1530 1530 _(b'NAME'),
1531 1531 ),
1532 1532 ],
1533 1533 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1534 1534 )
1535 1535 def debugfileset(ui, repo, expr, **opts):
1536 1536 '''parse and apply a fileset specification'''
1537 1537 from . import fileset
1538 1538
1539 1539 fileset.symbols # force import of fileset so we have predicates to optimize
1540 1540 opts = pycompat.byteskwargs(opts)
1541 1541 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1542 1542
1543 1543 stages = [
1544 1544 (b'parsed', pycompat.identity),
1545 1545 (b'analyzed', filesetlang.analyze),
1546 1546 (b'optimized', filesetlang.optimize),
1547 1547 ]
1548 1548 stagenames = {n for n, f in stages}
1549 1549
1550 1550 showalways = set()
1551 1551 if ui.verbose and not opts[b'show_stage']:
1552 1552 # show parsed tree by --verbose (deprecated)
1553 1553 showalways.add(b'parsed')
1554 1554 if opts[b'show_stage'] == [b'all']:
1555 1555 showalways.update(stagenames)
1556 1556 else:
1557 1557 for n in opts[b'show_stage']:
1558 1558 if n not in stagenames:
1559 1559 raise error.Abort(_(b'invalid stage name: %s') % n)
1560 1560 showalways.update(opts[b'show_stage'])
1561 1561
1562 1562 tree = filesetlang.parse(expr)
1563 1563 for n, f in stages:
1564 1564 tree = f(tree)
1565 1565 if n in showalways:
1566 1566 if opts[b'show_stage'] or n != b'parsed':
1567 1567 ui.write(b"* %s:\n" % n)
1568 1568 ui.write(filesetlang.prettyformat(tree), b"\n")
1569 1569
1570 1570 files = set()
1571 1571 if opts[b'all_files']:
1572 1572 for r in repo:
1573 1573 c = repo[r]
1574 1574 files.update(c.files())
1575 1575 files.update(c.substate)
1576 1576 if opts[b'all_files'] or ctx.rev() is None:
1577 1577 wctx = repo[None]
1578 1578 files.update(
1579 1579 repo.dirstate.walk(
1580 1580 scmutil.matchall(repo),
1581 1581 subrepos=list(wctx.substate),
1582 1582 unknown=True,
1583 1583 ignored=True,
1584 1584 )
1585 1585 )
1586 1586 files.update(wctx.substate)
1587 1587 else:
1588 1588 files.update(ctx.files())
1589 1589 files.update(ctx.substate)
1590 1590
1591 1591 m = ctx.matchfileset(repo.getcwd(), expr)
1592 1592 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1593 1593 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1594 1594 for f in sorted(files):
1595 1595 if not m(f):
1596 1596 continue
1597 1597 ui.write(b"%s\n" % f)
1598 1598
1599 1599
1600 1600 @command(
1601 1601 b"debug-repair-issue6528",
1602 1602 [
1603 1603 (
1604 1604 b'',
1605 1605 b'to-report',
1606 1606 b'',
1607 1607 _(b'build a report of affected revisions to this file'),
1608 1608 _(b'FILE'),
1609 1609 ),
1610 1610 (
1611 1611 b'',
1612 1612 b'from-report',
1613 1613 b'',
1614 1614 _(b'repair revisions listed in this report file'),
1615 1615 _(b'FILE'),
1616 1616 ),
1617 1617 (
1618 1618 b'',
1619 1619 b'paranoid',
1620 1620 False,
1621 1621 _(b'check that both detection methods do the same thing'),
1622 1622 ),
1623 1623 ]
1624 1624 + cmdutil.dryrunopts,
1625 1625 )
1626 1626 def debug_repair_issue6528(ui, repo, **opts):
1627 1627 """find affected revisions and repair them. See issue6528 for more details.
1628 1628
1629 1629 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1630 1630 computation of affected revisions for a given repository across clones.
1631 1631 The report format is line-based (with empty lines ignored):
1632 1632
1633 1633 ```
1634 1634 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1635 1635 ```
1636 1636
1637 1637 There can be multiple broken revisions per filelog, they are separated by
1638 1638 a comma with no spaces. The only space is between the revision(s) and the
1639 1639 filename.
1640 1640
1641 1641 Note that this does *not* mean that this repairs future affected revisions,
1642 1642 that needs a separate fix at the exchange level that was introduced in
1643 1643 Mercurial 5.9.1.
1644 1644
1645 1645 There is a `--paranoid` flag to test that the fast implementation is correct
1646 1646 by checking it against the slow implementation. Since this matter is quite
1647 1647 urgent and testing every edge-case is probably quite costly, we use this
1648 1648 method to test on large repositories as a fuzzing method of sorts.
1649 1649 """
1650 1650 cmdutil.check_incompatible_arguments(
1651 1651 opts, 'to_report', ['from_report', 'dry_run']
1652 1652 )
1653 1653 dry_run = opts.get('dry_run')
1654 1654 to_report = opts.get('to_report')
1655 1655 from_report = opts.get('from_report')
1656 1656 paranoid = opts.get('paranoid')
1657 1657 # TODO maybe add filelog pattern and revision pattern parameters to help
1658 1658 # narrow down the search for users that know what they're looking for?
1659 1659
1660 1660 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1661 1661 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1662 1662 raise error.Abort(_(msg))
1663 1663
1664 1664 rewrite.repair_issue6528(
1665 1665 ui,
1666 1666 repo,
1667 1667 dry_run=dry_run,
1668 1668 to_report=to_report,
1669 1669 from_report=from_report,
1670 1670 paranoid=paranoid,
1671 1671 )
1672 1672
1673 1673
1674 1674 @command(b'debugformat', [] + cmdutil.formatteropts)
1675 1675 def debugformat(ui, repo, **opts):
1676 1676 """display format information about the current repository
1677 1677
1678 1678 Use --verbose to get extra information about current config value and
1679 1679 Mercurial default."""
1680 1680 opts = pycompat.byteskwargs(opts)
1681 1681 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1682 1682 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1683 1683
1684 1684 def makeformatname(name):
1685 1685 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1686 1686
1687 1687 fm = ui.formatter(b'debugformat', opts)
1688 1688 if fm.isplain():
1689 1689
1690 1690 def formatvalue(value):
1691 1691 if util.safehasattr(value, b'startswith'):
1692 1692 return value
1693 1693 if value:
1694 1694 return b'yes'
1695 1695 else:
1696 1696 return b'no'
1697 1697
1698 1698 else:
1699 1699 formatvalue = pycompat.identity
1700 1700
1701 1701 fm.plain(b'format-variant')
1702 1702 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1703 1703 fm.plain(b' repo')
1704 1704 if ui.verbose:
1705 1705 fm.plain(b' config default')
1706 1706 fm.plain(b'\n')
1707 1707 for fv in upgrade.allformatvariant:
1708 1708 fm.startitem()
1709 1709 repovalue = fv.fromrepo(repo)
1710 1710 configvalue = fv.fromconfig(repo)
1711 1711
1712 1712 if repovalue != configvalue:
1713 1713 namelabel = b'formatvariant.name.mismatchconfig'
1714 1714 repolabel = b'formatvariant.repo.mismatchconfig'
1715 1715 elif repovalue != fv.default:
1716 1716 namelabel = b'formatvariant.name.mismatchdefault'
1717 1717 repolabel = b'formatvariant.repo.mismatchdefault'
1718 1718 else:
1719 1719 namelabel = b'formatvariant.name.uptodate'
1720 1720 repolabel = b'formatvariant.repo.uptodate'
1721 1721
1722 1722 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1723 1723 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1724 1724 if fv.default != configvalue:
1725 1725 configlabel = b'formatvariant.config.special'
1726 1726 else:
1727 1727 configlabel = b'formatvariant.config.default'
1728 1728 fm.condwrite(
1729 1729 ui.verbose,
1730 1730 b'config',
1731 1731 b' %6s',
1732 1732 formatvalue(configvalue),
1733 1733 label=configlabel,
1734 1734 )
1735 1735 fm.condwrite(
1736 1736 ui.verbose,
1737 1737 b'default',
1738 1738 b' %7s',
1739 1739 formatvalue(fv.default),
1740 1740 label=b'formatvariant.default',
1741 1741 )
1742 1742 fm.plain(b'\n')
1743 1743 fm.end()
1744 1744
1745 1745
1746 1746 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1747 1747 def debugfsinfo(ui, path=b"."):
1748 1748 """show information detected about current filesystem"""
1749 1749 ui.writenoi18n(b'path: %s\n' % path)
1750 1750 ui.writenoi18n(
1751 1751 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1752 1752 )
1753 1753 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1754 1754 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1755 1755 ui.writenoi18n(
1756 1756 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1757 1757 )
1758 1758 ui.writenoi18n(
1759 1759 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1760 1760 )
1761 1761 casesensitive = b'(unknown)'
1762 1762 try:
1763 1763 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1764 1764 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1765 1765 except OSError:
1766 1766 pass
1767 1767 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1768 1768
1769 1769
1770 1770 @command(
1771 1771 b'debuggetbundle',
1772 1772 [
1773 1773 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1774 1774 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1775 1775 (
1776 1776 b't',
1777 1777 b'type',
1778 1778 b'bzip2',
1779 1779 _(b'bundle compression type to use'),
1780 1780 _(b'TYPE'),
1781 1781 ),
1782 1782 ],
1783 1783 _(b'REPO FILE [-H|-C ID]...'),
1784 1784 norepo=True,
1785 1785 )
1786 1786 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1787 1787 """retrieves a bundle from a repo
1788 1788
1789 1789 Every ID must be a full-length hex node id string. Saves the bundle to the
1790 1790 given file.
1791 1791 """
1792 1792 opts = pycompat.byteskwargs(opts)
1793 1793 repo = hg.peer(ui, opts, repopath)
1794 1794 if not repo.capable(b'getbundle'):
1795 1795 raise error.Abort(b"getbundle() not supported by target repository")
1796 1796 args = {}
1797 1797 if common:
1798 1798 args['common'] = [bin(s) for s in common]
1799 1799 if head:
1800 1800 args['heads'] = [bin(s) for s in head]
1801 1801 # TODO: get desired bundlecaps from command line.
1802 1802 args['bundlecaps'] = None
1803 1803 bundle = repo.getbundle(b'debug', **args)
1804 1804
1805 1805 bundletype = opts.get(b'type', b'bzip2').lower()
1806 1806 btypes = {
1807 1807 b'none': b'HG10UN',
1808 1808 b'bzip2': b'HG10BZ',
1809 1809 b'gzip': b'HG10GZ',
1810 1810 b'bundle2': b'HG20',
1811 1811 }
1812 1812 bundletype = btypes.get(bundletype)
1813 1813 if bundletype not in bundle2.bundletypes:
1814 1814 raise error.Abort(_(b'unknown bundle type specified with --type'))
1815 1815 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1816 1816
1817 1817
1818 1818 @command(b'debugignore', [], b'[FILE]')
1819 1819 def debugignore(ui, repo, *files, **opts):
1820 1820 """display the combined ignore pattern and information about ignored files
1821 1821
1822 1822 With no argument display the combined ignore pattern.
1823 1823
1824 1824 Given space separated file names, shows if the given file is ignored and
1825 1825 if so, show the ignore rule (file and line number) that matched it.
1826 1826 """
1827 1827 ignore = repo.dirstate._ignore
1828 1828 if not files:
1829 1829 # Show all the patterns
1830 1830 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1831 1831 else:
1832 1832 m = scmutil.match(repo[None], pats=files)
1833 1833 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1834 1834 for f in m.files():
1835 1835 nf = util.normpath(f)
1836 1836 ignored = None
1837 1837 ignoredata = None
1838 1838 if nf != b'.':
1839 1839 if ignore(nf):
1840 1840 ignored = nf
1841 1841 ignoredata = repo.dirstate._ignorefileandline(nf)
1842 1842 else:
1843 1843 for p in pathutil.finddirs(nf):
1844 1844 if ignore(p):
1845 1845 ignored = p
1846 1846 ignoredata = repo.dirstate._ignorefileandline(p)
1847 1847 break
1848 1848 if ignored:
1849 1849 if ignored == nf:
1850 1850 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1851 1851 else:
1852 1852 ui.write(
1853 1853 _(
1854 1854 b"%s is ignored because of "
1855 1855 b"containing directory %s\n"
1856 1856 )
1857 1857 % (uipathfn(f), ignored)
1858 1858 )
1859 1859 ignorefile, lineno, line = ignoredata
1860 1860 ui.write(
1861 1861 _(b"(ignore rule in %s, line %d: '%s')\n")
1862 1862 % (ignorefile, lineno, line)
1863 1863 )
1864 1864 else:
1865 1865 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1866 1866
1867 1867
1868 1868 @command(
1869 1869 b'debug-revlog-index|debugindex',
1870 1870 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1871 1871 _(b'-c|-m|FILE'),
1872 1872 )
1873 1873 def debugindex(ui, repo, file_=None, **opts):
1874 1874 """dump index data for a revlog"""
1875 1875 opts = pycompat.byteskwargs(opts)
1876 1876 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1877 1877
1878 1878 fm = ui.formatter(b'debugindex', opts)
1879 1879
1880 revlog = getattr(store, b'_revlog', store)
1881
1880 1882 return revlog_debug.debug_index(
1881 1883 ui,
1882 1884 repo,
1883 1885 formatter=fm,
1884 revlog=store,
1886 revlog=revlog,
1885 1887 full_node=ui.debugflag,
1886 1888 )
1887 1889
1888 1890
1889 1891 @command(
1890 1892 b'debugindexdot',
1891 1893 cmdutil.debugrevlogopts,
1892 1894 _(b'-c|-m|FILE'),
1893 1895 optionalrepo=True,
1894 1896 )
1895 1897 def debugindexdot(ui, repo, file_=None, **opts):
1896 1898 """dump an index DAG as a graphviz dot file"""
1897 1899 opts = pycompat.byteskwargs(opts)
1898 1900 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1899 1901 ui.writenoi18n(b"digraph G {\n")
1900 1902 for i in r:
1901 1903 node = r.node(i)
1902 1904 pp = r.parents(node)
1903 1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1904 1906 if pp[1] != repo.nullid:
1905 1907 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1906 1908 ui.write(b"}\n")
1907 1909
1908 1910
1909 1911 @command(b'debugindexstats', [])
1910 1912 def debugindexstats(ui, repo):
1911 1913 """show stats related to the changelog index"""
1912 1914 repo.changelog.shortest(repo.nullid, 1)
1913 1915 index = repo.changelog.index
1914 1916 if not util.safehasattr(index, b'stats'):
1915 1917 raise error.Abort(_(b'debugindexstats only works with native code'))
1916 1918 for k, v in sorted(index.stats().items()):
1917 1919 ui.write(b'%s: %d\n' % (k, v))
1918 1920
1919 1921
1920 1922 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1921 1923 def debuginstall(ui, **opts):
1922 1924 """test Mercurial installation
1923 1925
1924 1926 Returns 0 on success.
1925 1927 """
1926 1928 opts = pycompat.byteskwargs(opts)
1927 1929
1928 1930 problems = 0
1929 1931
1930 1932 fm = ui.formatter(b'debuginstall', opts)
1931 1933 fm.startitem()
1932 1934
1933 1935 # encoding might be unknown or wrong. don't translate these messages.
1934 1936 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1935 1937 err = None
1936 1938 try:
1937 1939 codecs.lookup(pycompat.sysstr(encoding.encoding))
1938 1940 except LookupError as inst:
1939 1941 err = stringutil.forcebytestr(inst)
1940 1942 problems += 1
1941 1943 fm.condwrite(
1942 1944 err,
1943 1945 b'encodingerror',
1944 1946 b" %s\n (check that your locale is properly set)\n",
1945 1947 err,
1946 1948 )
1947 1949
1948 1950 # Python
1949 1951 pythonlib = None
1950 1952 if util.safehasattr(os, '__file__'):
1951 1953 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1952 1954 elif getattr(sys, 'oxidized', False):
1953 1955 pythonlib = pycompat.sysexecutable
1954 1956
1955 1957 fm.write(
1956 1958 b'pythonexe',
1957 1959 _(b"checking Python executable (%s)\n"),
1958 1960 pycompat.sysexecutable or _(b"unknown"),
1959 1961 )
1960 1962 fm.write(
1961 1963 b'pythonimplementation',
1962 1964 _(b"checking Python implementation (%s)\n"),
1963 1965 pycompat.sysbytes(platform.python_implementation()),
1964 1966 )
1965 1967 fm.write(
1966 1968 b'pythonver',
1967 1969 _(b"checking Python version (%s)\n"),
1968 1970 (b"%d.%d.%d" % sys.version_info[:3]),
1969 1971 )
1970 1972 fm.write(
1971 1973 b'pythonlib',
1972 1974 _(b"checking Python lib (%s)...\n"),
1973 1975 pythonlib or _(b"unknown"),
1974 1976 )
1975 1977
1976 1978 try:
1977 1979 from . import rustext # pytype: disable=import-error
1978 1980
1979 1981 rustext.__doc__ # trigger lazy import
1980 1982 except ImportError:
1981 1983 rustext = None
1982 1984
1983 1985 security = set(sslutil.supportedprotocols)
1984 1986 if sslutil.hassni:
1985 1987 security.add(b'sni')
1986 1988
1987 1989 fm.write(
1988 1990 b'pythonsecurity',
1989 1991 _(b"checking Python security support (%s)\n"),
1990 1992 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1991 1993 )
1992 1994
1993 1995 # These are warnings, not errors. So don't increment problem count. This
1994 1996 # may change in the future.
1995 1997 if b'tls1.2' not in security:
1996 1998 fm.plain(
1997 1999 _(
1998 2000 b' TLS 1.2 not supported by Python install; '
1999 2001 b'network connections lack modern security\n'
2000 2002 )
2001 2003 )
2002 2004 if b'sni' not in security:
2003 2005 fm.plain(
2004 2006 _(
2005 2007 b' SNI not supported by Python install; may have '
2006 2008 b'connectivity issues with some servers\n'
2007 2009 )
2008 2010 )
2009 2011
2010 2012 fm.plain(
2011 2013 _(
2012 2014 b"checking Rust extensions (%s)\n"
2013 2015 % (b'missing' if rustext is None else b'installed')
2014 2016 ),
2015 2017 )
2016 2018
2017 2019 # TODO print CA cert info
2018 2020
2019 2021 # hg version
2020 2022 hgver = util.version()
2021 2023 fm.write(
2022 2024 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2023 2025 )
2024 2026 fm.write(
2025 2027 b'hgverextra',
2026 2028 _(b"checking Mercurial custom build (%s)\n"),
2027 2029 b'+'.join(hgver.split(b'+')[1:]),
2028 2030 )
2029 2031
2030 2032 # compiled modules
2031 2033 hgmodules = None
2032 2034 if util.safehasattr(sys.modules[__name__], '__file__'):
2033 2035 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2034 2036 elif getattr(sys, 'oxidized', False):
2035 2037 hgmodules = pycompat.sysexecutable
2036 2038
2037 2039 fm.write(
2038 2040 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2039 2041 )
2040 2042 fm.write(
2041 2043 b'hgmodules',
2042 2044 _(b"checking installed modules (%s)...\n"),
2043 2045 hgmodules or _(b"unknown"),
2044 2046 )
2045 2047
2046 2048 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2047 2049 rustext = rustandc # for now, that's the only case
2048 2050 cext = policy.policy in (b'c', b'allow') or rustandc
2049 2051 nopure = cext or rustext
2050 2052 if nopure:
2051 2053 err = None
2052 2054 try:
2053 2055 if cext:
2054 2056 from .cext import ( # pytype: disable=import-error
2055 2057 base85,
2056 2058 bdiff,
2057 2059 mpatch,
2058 2060 osutil,
2059 2061 )
2060 2062
2061 2063 # quiet pyflakes
2062 2064 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2063 2065 if rustext:
2064 2066 from .rustext import ( # pytype: disable=import-error
2065 2067 ancestor,
2066 2068 dirstate,
2067 2069 )
2068 2070
2069 2071 dir(ancestor), dir(dirstate) # quiet pyflakes
2070 2072 except Exception as inst:
2071 2073 err = stringutil.forcebytestr(inst)
2072 2074 problems += 1
2073 2075 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2074 2076
2075 2077 compengines = util.compengines._engines.values()
2076 2078 fm.write(
2077 2079 b'compengines',
2078 2080 _(b'checking registered compression engines (%s)\n'),
2079 2081 fm.formatlist(
2080 2082 sorted(e.name() for e in compengines),
2081 2083 name=b'compengine',
2082 2084 fmt=b'%s',
2083 2085 sep=b', ',
2084 2086 ),
2085 2087 )
2086 2088 fm.write(
2087 2089 b'compenginesavail',
2088 2090 _(b'checking available compression engines (%s)\n'),
2089 2091 fm.formatlist(
2090 2092 sorted(e.name() for e in compengines if e.available()),
2091 2093 name=b'compengine',
2092 2094 fmt=b'%s',
2093 2095 sep=b', ',
2094 2096 ),
2095 2097 )
2096 2098 wirecompengines = compression.compengines.supportedwireengines(
2097 2099 compression.SERVERROLE
2098 2100 )
2099 2101 fm.write(
2100 2102 b'compenginesserver',
2101 2103 _(
2102 2104 b'checking available compression engines '
2103 2105 b'for wire protocol (%s)\n'
2104 2106 ),
2105 2107 fm.formatlist(
2106 2108 [e.name() for e in wirecompengines if e.wireprotosupport()],
2107 2109 name=b'compengine',
2108 2110 fmt=b'%s',
2109 2111 sep=b', ',
2110 2112 ),
2111 2113 )
2112 2114 re2 = b'missing'
2113 2115 if util._re2:
2114 2116 re2 = b'available'
2115 2117 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2116 2118 fm.data(re2=bool(util._re2))
2117 2119
2118 2120 # templates
2119 2121 p = templater.templatedir()
2120 2122 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2121 2123 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2122 2124 if p:
2123 2125 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2124 2126 if m:
2125 2127 # template found, check if it is working
2126 2128 err = None
2127 2129 try:
2128 2130 templater.templater.frommapfile(m)
2129 2131 except Exception as inst:
2130 2132 err = stringutil.forcebytestr(inst)
2131 2133 p = None
2132 2134 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2133 2135 else:
2134 2136 p = None
2135 2137 fm.condwrite(
2136 2138 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2137 2139 )
2138 2140 fm.condwrite(
2139 2141 not m,
2140 2142 b'defaulttemplatenotfound',
2141 2143 _(b" template '%s' not found\n"),
2142 2144 b"default",
2143 2145 )
2144 2146 if not p:
2145 2147 problems += 1
2146 2148 fm.condwrite(
2147 2149 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2148 2150 )
2149 2151
2150 2152 # editor
2151 2153 editor = ui.geteditor()
2152 2154 editor = util.expandpath(editor)
2153 2155 editorbin = procutil.shellsplit(editor)[0]
2154 2156 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2155 2157 cmdpath = procutil.findexe(editorbin)
2156 2158 fm.condwrite(
2157 2159 not cmdpath and editor == b'vi',
2158 2160 b'vinotfound',
2159 2161 _(
2160 2162 b" No commit editor set and can't find %s in PATH\n"
2161 2163 b" (specify a commit editor in your configuration"
2162 2164 b" file)\n"
2163 2165 ),
2164 2166 not cmdpath and editor == b'vi' and editorbin,
2165 2167 )
2166 2168 fm.condwrite(
2167 2169 not cmdpath and editor != b'vi',
2168 2170 b'editornotfound',
2169 2171 _(
2170 2172 b" Can't find editor '%s' in PATH\n"
2171 2173 b" (specify a commit editor in your configuration"
2172 2174 b" file)\n"
2173 2175 ),
2174 2176 not cmdpath and editorbin,
2175 2177 )
2176 2178 if not cmdpath and editor != b'vi':
2177 2179 problems += 1
2178 2180
2179 2181 # check username
2180 2182 username = None
2181 2183 err = None
2182 2184 try:
2183 2185 username = ui.username()
2184 2186 except error.Abort as e:
2185 2187 err = e.message
2186 2188 problems += 1
2187 2189
2188 2190 fm.condwrite(
2189 2191 username, b'username', _(b"checking username (%s)\n"), username
2190 2192 )
2191 2193 fm.condwrite(
2192 2194 err,
2193 2195 b'usernameerror',
2194 2196 _(
2195 2197 b"checking username...\n %s\n"
2196 2198 b" (specify a username in your configuration file)\n"
2197 2199 ),
2198 2200 err,
2199 2201 )
2200 2202
2201 2203 for name, mod in extensions.extensions():
2202 2204 handler = getattr(mod, 'debuginstall', None)
2203 2205 if handler is not None:
2204 2206 problems += handler(ui, fm)
2205 2207
2206 2208 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2207 2209 if not problems:
2208 2210 fm.data(problems=problems)
2209 2211 fm.condwrite(
2210 2212 problems,
2211 2213 b'problems',
2212 2214 _(b"%d problems detected, please check your install!\n"),
2213 2215 problems,
2214 2216 )
2215 2217 fm.end()
2216 2218
2217 2219 return problems
2218 2220
2219 2221
2220 2222 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2221 2223 def debugknown(ui, repopath, *ids, **opts):
2222 2224 """test whether node ids are known to a repo
2223 2225
2224 2226 Every ID must be a full-length hex node id string. Returns a list of 0s
2225 2227 and 1s indicating unknown/known.
2226 2228 """
2227 2229 opts = pycompat.byteskwargs(opts)
2228 2230 repo = hg.peer(ui, opts, repopath)
2229 2231 if not repo.capable(b'known'):
2230 2232 raise error.Abort(b"known() not supported by target repository")
2231 2233 flags = repo.known([bin(s) for s in ids])
2232 2234 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2233 2235
2234 2236
2235 2237 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2236 2238 def debuglabelcomplete(ui, repo, *args):
2237 2239 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2238 2240 debugnamecomplete(ui, repo, *args)
2239 2241
2240 2242
2241 2243 @command(
2242 2244 b'debuglocks',
2243 2245 [
2244 2246 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2245 2247 (
2246 2248 b'W',
2247 2249 b'force-free-wlock',
2248 2250 None,
2249 2251 _(b'free the working state lock (DANGEROUS)'),
2250 2252 ),
2251 2253 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2252 2254 (
2253 2255 b'S',
2254 2256 b'set-wlock',
2255 2257 None,
2256 2258 _(b'set the working state lock until stopped'),
2257 2259 ),
2258 2260 ],
2259 2261 _(b'[OPTION]...'),
2260 2262 )
2261 2263 def debuglocks(ui, repo, **opts):
2262 2264 """show or modify state of locks
2263 2265
2264 2266 By default, this command will show which locks are held. This
2265 2267 includes the user and process holding the lock, the amount of time
2266 2268 the lock has been held, and the machine name where the process is
2267 2269 running if it's not local.
2268 2270
2269 2271 Locks protect the integrity of Mercurial's data, so should be
2270 2272 treated with care. System crashes or other interruptions may cause
2271 2273 locks to not be properly released, though Mercurial will usually
2272 2274 detect and remove such stale locks automatically.
2273 2275
2274 2276 However, detecting stale locks may not always be possible (for
2275 2277 instance, on a shared filesystem). Removing locks may also be
2276 2278 blocked by filesystem permissions.
2277 2279
2278 2280 Setting a lock will prevent other commands from changing the data.
2279 2281 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2280 2282 The set locks are removed when the command exits.
2281 2283
2282 2284 Returns 0 if no locks are held.
2283 2285
2284 2286 """
2285 2287
2286 2288 if opts.get('force_free_lock'):
2287 2289 repo.svfs.tryunlink(b'lock')
2288 2290 if opts.get('force_free_wlock'):
2289 2291 repo.vfs.tryunlink(b'wlock')
2290 2292 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2291 2293 return 0
2292 2294
2293 2295 locks = []
2294 2296 try:
2295 2297 if opts.get('set_wlock'):
2296 2298 try:
2297 2299 locks.append(repo.wlock(False))
2298 2300 except error.LockHeld:
2299 2301 raise error.Abort(_(b'wlock is already held'))
2300 2302 if opts.get('set_lock'):
2301 2303 try:
2302 2304 locks.append(repo.lock(False))
2303 2305 except error.LockHeld:
2304 2306 raise error.Abort(_(b'lock is already held'))
2305 2307 if len(locks):
2306 2308 try:
2307 2309 if ui.interactive():
2308 2310 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2309 2311 ui.promptchoice(prompt)
2310 2312 else:
2311 2313 msg = b"%d locks held, waiting for signal\n"
2312 2314 msg %= len(locks)
2313 2315 ui.status(msg)
2314 2316 while True: # XXX wait for a signal
2315 2317 time.sleep(0.1)
2316 2318 except KeyboardInterrupt:
2317 2319 msg = b"signal-received releasing locks\n"
2318 2320 ui.status(msg)
2319 2321 return 0
2320 2322 finally:
2321 2323 release(*locks)
2322 2324
2323 2325 now = time.time()
2324 2326 held = 0
2325 2327
2326 2328 def report(vfs, name, method):
2327 2329 # this causes stale locks to get reaped for more accurate reporting
2328 2330 try:
2329 2331 l = method(False)
2330 2332 except error.LockHeld:
2331 2333 l = None
2332 2334
2333 2335 if l:
2334 2336 l.release()
2335 2337 else:
2336 2338 try:
2337 2339 st = vfs.lstat(name)
2338 2340 age = now - st[stat.ST_MTIME]
2339 2341 user = util.username(st.st_uid)
2340 2342 locker = vfs.readlock(name)
2341 2343 if b":" in locker:
2342 2344 host, pid = locker.split(b':')
2343 2345 if host == socket.gethostname():
2344 2346 locker = b'user %s, process %s' % (user or b'None', pid)
2345 2347 else:
2346 2348 locker = b'user %s, process %s, host %s' % (
2347 2349 user or b'None',
2348 2350 pid,
2349 2351 host,
2350 2352 )
2351 2353 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2352 2354 return 1
2353 2355 except OSError as e:
2354 2356 if e.errno != errno.ENOENT:
2355 2357 raise
2356 2358
2357 2359 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2358 2360 return 0
2359 2361
2360 2362 held += report(repo.svfs, b"lock", repo.lock)
2361 2363 held += report(repo.vfs, b"wlock", repo.wlock)
2362 2364
2363 2365 return held
2364 2366
2365 2367
2366 2368 @command(
2367 2369 b'debugmanifestfulltextcache',
2368 2370 [
2369 2371 (b'', b'clear', False, _(b'clear the cache')),
2370 2372 (
2371 2373 b'a',
2372 2374 b'add',
2373 2375 [],
2374 2376 _(b'add the given manifest nodes to the cache'),
2375 2377 _(b'NODE'),
2376 2378 ),
2377 2379 ],
2378 2380 b'',
2379 2381 )
2380 2382 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2381 2383 """show, clear or amend the contents of the manifest fulltext cache"""
2382 2384
2383 2385 def getcache():
2384 2386 r = repo.manifestlog.getstorage(b'')
2385 2387 try:
2386 2388 return r._fulltextcache
2387 2389 except AttributeError:
2388 2390 msg = _(
2389 2391 b"Current revlog implementation doesn't appear to have a "
2390 2392 b"manifest fulltext cache\n"
2391 2393 )
2392 2394 raise error.Abort(msg)
2393 2395
2394 2396 if opts.get('clear'):
2395 2397 with repo.wlock():
2396 2398 cache = getcache()
2397 2399 cache.clear(clear_persisted_data=True)
2398 2400 return
2399 2401
2400 2402 if add:
2401 2403 with repo.wlock():
2402 2404 m = repo.manifestlog
2403 2405 store = m.getstorage(b'')
2404 2406 for n in add:
2405 2407 try:
2406 2408 manifest = m[store.lookup(n)]
2407 2409 except error.LookupError as e:
2408 2410 raise error.Abort(
2409 2411 bytes(e), hint=b"Check your manifest node id"
2410 2412 )
2411 2413 manifest.read() # stores revisision in cache too
2412 2414 return
2413 2415
2414 2416 cache = getcache()
2415 2417 if not len(cache):
2416 2418 ui.write(_(b'cache empty\n'))
2417 2419 else:
2418 2420 ui.write(
2419 2421 _(
2420 2422 b'cache contains %d manifest entries, in order of most to '
2421 2423 b'least recent:\n'
2422 2424 )
2423 2425 % (len(cache),)
2424 2426 )
2425 2427 totalsize = 0
2426 2428 for nodeid in cache:
2427 2429 # Use cache.get to not update the LRU order
2428 2430 data = cache.peek(nodeid)
2429 2431 size = len(data)
2430 2432 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2431 2433 ui.write(
2432 2434 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2433 2435 )
2434 2436 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2435 2437 ui.write(
2436 2438 _(b'total cache data size %s, on-disk %s\n')
2437 2439 % (util.bytecount(totalsize), util.bytecount(ondisk))
2438 2440 )
2439 2441
2440 2442
2441 2443 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2442 2444 def debugmergestate(ui, repo, *args, **opts):
2443 2445 """print merge state
2444 2446
2445 2447 Use --verbose to print out information about whether v1 or v2 merge state
2446 2448 was chosen."""
2447 2449
2448 2450 if ui.verbose:
2449 2451 ms = mergestatemod.mergestate(repo)
2450 2452
2451 2453 # sort so that reasonable information is on top
2452 2454 v1records = ms._readrecordsv1()
2453 2455 v2records = ms._readrecordsv2()
2454 2456
2455 2457 if not v1records and not v2records:
2456 2458 pass
2457 2459 elif not v2records:
2458 2460 ui.writenoi18n(b'no version 2 merge state\n')
2459 2461 elif ms._v1v2match(v1records, v2records):
2460 2462 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2461 2463 else:
2462 2464 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2463 2465
2464 2466 opts = pycompat.byteskwargs(opts)
2465 2467 if not opts[b'template']:
2466 2468 opts[b'template'] = (
2467 2469 b'{if(commits, "", "no merge state found\n")}'
2468 2470 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2469 2471 b'{files % "file: {path} (state \\"{state}\\")\n'
2470 2472 b'{if(local_path, "'
2471 2473 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2472 2474 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2473 2475 b' other path: {other_path} (node {other_node})\n'
2474 2476 b'")}'
2475 2477 b'{if(rename_side, "'
2476 2478 b' rename side: {rename_side}\n'
2477 2479 b' renamed path: {renamed_path}\n'
2478 2480 b'")}'
2479 2481 b'{extras % " extra: {key} = {value}\n"}'
2480 2482 b'"}'
2481 2483 b'{extras % "extra: {file} ({key} = {value})\n"}'
2482 2484 )
2483 2485
2484 2486 ms = mergestatemod.mergestate.read(repo)
2485 2487
2486 2488 fm = ui.formatter(b'debugmergestate', opts)
2487 2489 fm.startitem()
2488 2490
2489 2491 fm_commits = fm.nested(b'commits')
2490 2492 if ms.active():
2491 2493 for name, node, label_index in (
2492 2494 (b'local', ms.local, 0),
2493 2495 (b'other', ms.other, 1),
2494 2496 ):
2495 2497 fm_commits.startitem()
2496 2498 fm_commits.data(name=name)
2497 2499 fm_commits.data(node=hex(node))
2498 2500 if ms._labels and len(ms._labels) > label_index:
2499 2501 fm_commits.data(label=ms._labels[label_index])
2500 2502 fm_commits.end()
2501 2503
2502 2504 fm_files = fm.nested(b'files')
2503 2505 if ms.active():
2504 2506 for f in ms:
2505 2507 fm_files.startitem()
2506 2508 fm_files.data(path=f)
2507 2509 state = ms._state[f]
2508 2510 fm_files.data(state=state[0])
2509 2511 if state[0] in (
2510 2512 mergestatemod.MERGE_RECORD_UNRESOLVED,
2511 2513 mergestatemod.MERGE_RECORD_RESOLVED,
2512 2514 ):
2513 2515 fm_files.data(local_key=state[1])
2514 2516 fm_files.data(local_path=state[2])
2515 2517 fm_files.data(ancestor_path=state[3])
2516 2518 fm_files.data(ancestor_node=state[4])
2517 2519 fm_files.data(other_path=state[5])
2518 2520 fm_files.data(other_node=state[6])
2519 2521 fm_files.data(local_flags=state[7])
2520 2522 elif state[0] in (
2521 2523 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2522 2524 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2523 2525 ):
2524 2526 fm_files.data(renamed_path=state[1])
2525 2527 fm_files.data(rename_side=state[2])
2526 2528 fm_extras = fm_files.nested(b'extras')
2527 2529 for k, v in sorted(ms.extras(f).items()):
2528 2530 fm_extras.startitem()
2529 2531 fm_extras.data(key=k)
2530 2532 fm_extras.data(value=v)
2531 2533 fm_extras.end()
2532 2534
2533 2535 fm_files.end()
2534 2536
2535 2537 fm_extras = fm.nested(b'extras')
2536 2538 for f, d in sorted(ms.allextras().items()):
2537 2539 if f in ms:
2538 2540 # If file is in mergestate, we have already processed it's extras
2539 2541 continue
2540 2542 for k, v in d.items():
2541 2543 fm_extras.startitem()
2542 2544 fm_extras.data(file=f)
2543 2545 fm_extras.data(key=k)
2544 2546 fm_extras.data(value=v)
2545 2547 fm_extras.end()
2546 2548
2547 2549 fm.end()
2548 2550
2549 2551
2550 2552 @command(b'debugnamecomplete', [], _(b'NAME...'))
2551 2553 def debugnamecomplete(ui, repo, *args):
2552 2554 '''complete "names" - tags, open branch names, bookmark names'''
2553 2555
2554 2556 names = set()
2555 2557 # since we previously only listed open branches, we will handle that
2556 2558 # specially (after this for loop)
2557 2559 for name, ns in repo.names.items():
2558 2560 if name != b'branches':
2559 2561 names.update(ns.listnames(repo))
2560 2562 names.update(
2561 2563 tag
2562 2564 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2563 2565 if not closed
2564 2566 )
2565 2567 completions = set()
2566 2568 if not args:
2567 2569 args = [b'']
2568 2570 for a in args:
2569 2571 completions.update(n for n in names if n.startswith(a))
2570 2572 ui.write(b'\n'.join(sorted(completions)))
2571 2573 ui.write(b'\n')
2572 2574
2573 2575
2574 2576 @command(
2575 2577 b'debugnodemap',
2576 2578 [
2577 2579 (
2578 2580 b'',
2579 2581 b'dump-new',
2580 2582 False,
2581 2583 _(b'write a (new) persistent binary nodemap on stdout'),
2582 2584 ),
2583 2585 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2584 2586 (
2585 2587 b'',
2586 2588 b'check',
2587 2589 False,
2588 2590 _(b'check that the data on disk data are correct.'),
2589 2591 ),
2590 2592 (
2591 2593 b'',
2592 2594 b'metadata',
2593 2595 False,
2594 2596 _(b'display the on disk meta data for the nodemap'),
2595 2597 ),
2596 2598 ],
2597 2599 )
2598 2600 def debugnodemap(ui, repo, **opts):
2599 2601 """write and inspect on disk nodemap"""
2600 2602 if opts['dump_new']:
2601 2603 unfi = repo.unfiltered()
2602 2604 cl = unfi.changelog
2603 2605 if util.safehasattr(cl.index, "nodemap_data_all"):
2604 2606 data = cl.index.nodemap_data_all()
2605 2607 else:
2606 2608 data = nodemap.persistent_data(cl.index)
2607 2609 ui.write(data)
2608 2610 elif opts['dump_disk']:
2609 2611 unfi = repo.unfiltered()
2610 2612 cl = unfi.changelog
2611 2613 nm_data = nodemap.persisted_data(cl)
2612 2614 if nm_data is not None:
2613 2615 docket, data = nm_data
2614 2616 ui.write(data[:])
2615 2617 elif opts['check']:
2616 2618 unfi = repo.unfiltered()
2617 2619 cl = unfi.changelog
2618 2620 nm_data = nodemap.persisted_data(cl)
2619 2621 if nm_data is not None:
2620 2622 docket, data = nm_data
2621 2623 return nodemap.check_data(ui, cl.index, data)
2622 2624 elif opts['metadata']:
2623 2625 unfi = repo.unfiltered()
2624 2626 cl = unfi.changelog
2625 2627 nm_data = nodemap.persisted_data(cl)
2626 2628 if nm_data is not None:
2627 2629 docket, data = nm_data
2628 2630 ui.write((b"uid: %s\n") % docket.uid)
2629 2631 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2630 2632 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2631 2633 ui.write((b"data-length: %d\n") % docket.data_length)
2632 2634 ui.write((b"data-unused: %d\n") % docket.data_unused)
2633 2635 unused_perc = docket.data_unused * 100.0 / docket.data_length
2634 2636 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2635 2637
2636 2638
2637 2639 @command(
2638 2640 b'debugobsolete',
2639 2641 [
2640 2642 (b'', b'flags', 0, _(b'markers flag')),
2641 2643 (
2642 2644 b'',
2643 2645 b'record-parents',
2644 2646 False,
2645 2647 _(b'record parent information for the precursor'),
2646 2648 ),
2647 2649 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2648 2650 (
2649 2651 b'',
2650 2652 b'exclusive',
2651 2653 False,
2652 2654 _(b'restrict display to markers only relevant to REV'),
2653 2655 ),
2654 2656 (b'', b'index', False, _(b'display index of the marker')),
2655 2657 (b'', b'delete', [], _(b'delete markers specified by indices')),
2656 2658 ]
2657 2659 + cmdutil.commitopts2
2658 2660 + cmdutil.formatteropts,
2659 2661 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2660 2662 )
2661 2663 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2662 2664 """create arbitrary obsolete marker
2663 2665
2664 2666 With no arguments, displays the list of obsolescence markers."""
2665 2667
2666 2668 opts = pycompat.byteskwargs(opts)
2667 2669
2668 2670 def parsenodeid(s):
2669 2671 try:
2670 2672 # We do not use revsingle/revrange functions here to accept
2671 2673 # arbitrary node identifiers, possibly not present in the
2672 2674 # local repository.
2673 2675 n = bin(s)
2674 2676 if len(n) != repo.nodeconstants.nodelen:
2675 2677 raise ValueError
2676 2678 return n
2677 2679 except ValueError:
2678 2680 raise error.InputError(
2679 2681 b'changeset references must be full hexadecimal '
2680 2682 b'node identifiers'
2681 2683 )
2682 2684
2683 2685 if opts.get(b'delete'):
2684 2686 indices = []
2685 2687 for v in opts.get(b'delete'):
2686 2688 try:
2687 2689 indices.append(int(v))
2688 2690 except ValueError:
2689 2691 raise error.InputError(
2690 2692 _(b'invalid index value: %r') % v,
2691 2693 hint=_(b'use integers for indices'),
2692 2694 )
2693 2695
2694 2696 if repo.currenttransaction():
2695 2697 raise error.Abort(
2696 2698 _(b'cannot delete obsmarkers in the middle of transaction.')
2697 2699 )
2698 2700
2699 2701 with repo.lock():
2700 2702 n = repair.deleteobsmarkers(repo.obsstore, indices)
2701 2703 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2702 2704
2703 2705 return
2704 2706
2705 2707 if precursor is not None:
2706 2708 if opts[b'rev']:
2707 2709 raise error.InputError(
2708 2710 b'cannot select revision when creating marker'
2709 2711 )
2710 2712 metadata = {}
2711 2713 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2712 2714 succs = tuple(parsenodeid(succ) for succ in successors)
2713 2715 l = repo.lock()
2714 2716 try:
2715 2717 tr = repo.transaction(b'debugobsolete')
2716 2718 try:
2717 2719 date = opts.get(b'date')
2718 2720 if date:
2719 2721 date = dateutil.parsedate(date)
2720 2722 else:
2721 2723 date = None
2722 2724 prec = parsenodeid(precursor)
2723 2725 parents = None
2724 2726 if opts[b'record_parents']:
2725 2727 if prec not in repo.unfiltered():
2726 2728 raise error.Abort(
2727 2729 b'cannot used --record-parents on '
2728 2730 b'unknown changesets'
2729 2731 )
2730 2732 parents = repo.unfiltered()[prec].parents()
2731 2733 parents = tuple(p.node() for p in parents)
2732 2734 repo.obsstore.create(
2733 2735 tr,
2734 2736 prec,
2735 2737 succs,
2736 2738 opts[b'flags'],
2737 2739 parents=parents,
2738 2740 date=date,
2739 2741 metadata=metadata,
2740 2742 ui=ui,
2741 2743 )
2742 2744 tr.close()
2743 2745 except ValueError as exc:
2744 2746 raise error.Abort(
2745 2747 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2746 2748 )
2747 2749 finally:
2748 2750 tr.release()
2749 2751 finally:
2750 2752 l.release()
2751 2753 else:
2752 2754 if opts[b'rev']:
2753 2755 revs = logcmdutil.revrange(repo, opts[b'rev'])
2754 2756 nodes = [repo[r].node() for r in revs]
2755 2757 markers = list(
2756 2758 obsutil.getmarkers(
2757 2759 repo, nodes=nodes, exclusive=opts[b'exclusive']
2758 2760 )
2759 2761 )
2760 2762 markers.sort(key=lambda x: x._data)
2761 2763 else:
2762 2764 markers = obsutil.getmarkers(repo)
2763 2765
2764 2766 markerstoiter = markers
2765 2767 isrelevant = lambda m: True
2766 2768 if opts.get(b'rev') and opts.get(b'index'):
2767 2769 markerstoiter = obsutil.getmarkers(repo)
2768 2770 markerset = set(markers)
2769 2771 isrelevant = lambda m: m in markerset
2770 2772
2771 2773 fm = ui.formatter(b'debugobsolete', opts)
2772 2774 for i, m in enumerate(markerstoiter):
2773 2775 if not isrelevant(m):
2774 2776 # marker can be irrelevant when we're iterating over a set
2775 2777 # of markers (markerstoiter) which is bigger than the set
2776 2778 # of markers we want to display (markers)
2777 2779 # this can happen if both --index and --rev options are
2778 2780 # provided and thus we need to iterate over all of the markers
2779 2781 # to get the correct indices, but only display the ones that
2780 2782 # are relevant to --rev value
2781 2783 continue
2782 2784 fm.startitem()
2783 2785 ind = i if opts.get(b'index') else None
2784 2786 cmdutil.showmarker(fm, m, index=ind)
2785 2787 fm.end()
2786 2788
2787 2789
2788 2790 @command(
2789 2791 b'debugp1copies',
2790 2792 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2791 2793 _(b'[-r REV]'),
2792 2794 )
2793 2795 def debugp1copies(ui, repo, **opts):
2794 2796 """dump copy information compared to p1"""
2795 2797
2796 2798 opts = pycompat.byteskwargs(opts)
2797 2799 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2798 2800 for dst, src in ctx.p1copies().items():
2799 2801 ui.write(b'%s -> %s\n' % (src, dst))
2800 2802
2801 2803
2802 2804 @command(
2803 2805 b'debugp2copies',
2804 2806 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2805 2807 _(b'[-r REV]'),
2806 2808 )
2807 2809 def debugp1copies(ui, repo, **opts):
2808 2810 """dump copy information compared to p2"""
2809 2811
2810 2812 opts = pycompat.byteskwargs(opts)
2811 2813 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2812 2814 for dst, src in ctx.p2copies().items():
2813 2815 ui.write(b'%s -> %s\n' % (src, dst))
2814 2816
2815 2817
2816 2818 @command(
2817 2819 b'debugpathcomplete',
2818 2820 [
2819 2821 (b'f', b'full', None, _(b'complete an entire path')),
2820 2822 (b'n', b'normal', None, _(b'show only normal files')),
2821 2823 (b'a', b'added', None, _(b'show only added files')),
2822 2824 (b'r', b'removed', None, _(b'show only removed files')),
2823 2825 ],
2824 2826 _(b'FILESPEC...'),
2825 2827 )
2826 2828 def debugpathcomplete(ui, repo, *specs, **opts):
2827 2829 """complete part or all of a tracked path
2828 2830
2829 2831 This command supports shells that offer path name completion. It
2830 2832 currently completes only files already known to the dirstate.
2831 2833
2832 2834 Completion extends only to the next path segment unless
2833 2835 --full is specified, in which case entire paths are used."""
2834 2836
2835 2837 def complete(path, acceptable):
2836 2838 dirstate = repo.dirstate
2837 2839 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2838 2840 rootdir = repo.root + pycompat.ossep
2839 2841 if spec != repo.root and not spec.startswith(rootdir):
2840 2842 return [], []
2841 2843 if os.path.isdir(spec):
2842 2844 spec += b'/'
2843 2845 spec = spec[len(rootdir) :]
2844 2846 fixpaths = pycompat.ossep != b'/'
2845 2847 if fixpaths:
2846 2848 spec = spec.replace(pycompat.ossep, b'/')
2847 2849 speclen = len(spec)
2848 2850 fullpaths = opts['full']
2849 2851 files, dirs = set(), set()
2850 2852 adddir, addfile = dirs.add, files.add
2851 2853 for f, st in dirstate.items():
2852 2854 if f.startswith(spec) and st.state in acceptable:
2853 2855 if fixpaths:
2854 2856 f = f.replace(b'/', pycompat.ossep)
2855 2857 if fullpaths:
2856 2858 addfile(f)
2857 2859 continue
2858 2860 s = f.find(pycompat.ossep, speclen)
2859 2861 if s >= 0:
2860 2862 adddir(f[:s])
2861 2863 else:
2862 2864 addfile(f)
2863 2865 return files, dirs
2864 2866
2865 2867 acceptable = b''
2866 2868 if opts['normal']:
2867 2869 acceptable += b'nm'
2868 2870 if opts['added']:
2869 2871 acceptable += b'a'
2870 2872 if opts['removed']:
2871 2873 acceptable += b'r'
2872 2874 cwd = repo.getcwd()
2873 2875 if not specs:
2874 2876 specs = [b'.']
2875 2877
2876 2878 files, dirs = set(), set()
2877 2879 for spec in specs:
2878 2880 f, d = complete(spec, acceptable or b'nmar')
2879 2881 files.update(f)
2880 2882 dirs.update(d)
2881 2883 files.update(dirs)
2882 2884 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2883 2885 ui.write(b'\n')
2884 2886
2885 2887
2886 2888 @command(
2887 2889 b'debugpathcopies',
2888 2890 cmdutil.walkopts,
2889 2891 b'hg debugpathcopies REV1 REV2 [FILE]',
2890 2892 inferrepo=True,
2891 2893 )
2892 2894 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2893 2895 """show copies between two revisions"""
2894 2896 ctx1 = scmutil.revsingle(repo, rev1)
2895 2897 ctx2 = scmutil.revsingle(repo, rev2)
2896 2898 m = scmutil.match(ctx1, pats, opts)
2897 2899 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2898 2900 ui.write(b'%s -> %s\n' % (src, dst))
2899 2901
2900 2902
2901 2903 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2902 2904 def debugpeer(ui, path):
2903 2905 """establish a connection to a peer repository"""
2904 2906 # Always enable peer request logging. Requires --debug to display
2905 2907 # though.
2906 2908 overrides = {
2907 2909 (b'devel', b'debug.peer-request'): True,
2908 2910 }
2909 2911
2910 2912 with ui.configoverride(overrides):
2911 2913 peer = hg.peer(ui, {}, path)
2912 2914
2913 2915 try:
2914 2916 local = peer.local() is not None
2915 2917 canpush = peer.canpush()
2916 2918
2917 2919 ui.write(_(b'url: %s\n') % peer.url())
2918 2920 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2919 2921 ui.write(
2920 2922 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2921 2923 )
2922 2924 finally:
2923 2925 peer.close()
2924 2926
2925 2927
2926 2928 @command(
2927 2929 b'debugpickmergetool',
2928 2930 [
2929 2931 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2930 2932 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2931 2933 ]
2932 2934 + cmdutil.walkopts
2933 2935 + cmdutil.mergetoolopts,
2934 2936 _(b'[PATTERN]...'),
2935 2937 inferrepo=True,
2936 2938 )
2937 2939 def debugpickmergetool(ui, repo, *pats, **opts):
2938 2940 """examine which merge tool is chosen for specified file
2939 2941
2940 2942 As described in :hg:`help merge-tools`, Mercurial examines
2941 2943 configurations below in this order to decide which merge tool is
2942 2944 chosen for specified file.
2943 2945
2944 2946 1. ``--tool`` option
2945 2947 2. ``HGMERGE`` environment variable
2946 2948 3. configurations in ``merge-patterns`` section
2947 2949 4. configuration of ``ui.merge``
2948 2950 5. configurations in ``merge-tools`` section
2949 2951 6. ``hgmerge`` tool (for historical reason only)
2950 2952 7. default tool for fallback (``:merge`` or ``:prompt``)
2951 2953
2952 2954 This command writes out examination result in the style below::
2953 2955
2954 2956 FILE = MERGETOOL
2955 2957
2956 2958 By default, all files known in the first parent context of the
2957 2959 working directory are examined. Use file patterns and/or -I/-X
2958 2960 options to limit target files. -r/--rev is also useful to examine
2959 2961 files in another context without actual updating to it.
2960 2962
2961 2963 With --debug, this command shows warning messages while matching
2962 2964 against ``merge-patterns`` and so on, too. It is recommended to
2963 2965 use this option with explicit file patterns and/or -I/-X options,
2964 2966 because this option increases amount of output per file according
2965 2967 to configurations in hgrc.
2966 2968
2967 2969 With -v/--verbose, this command shows configurations below at
2968 2970 first (only if specified).
2969 2971
2970 2972 - ``--tool`` option
2971 2973 - ``HGMERGE`` environment variable
2972 2974 - configuration of ``ui.merge``
2973 2975
2974 2976 If merge tool is chosen before matching against
2975 2977 ``merge-patterns``, this command can't show any helpful
2976 2978 information, even with --debug. In such case, information above is
2977 2979 useful to know why a merge tool is chosen.
2978 2980 """
2979 2981 opts = pycompat.byteskwargs(opts)
2980 2982 overrides = {}
2981 2983 if opts[b'tool']:
2982 2984 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2983 2985 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2984 2986
2985 2987 with ui.configoverride(overrides, b'debugmergepatterns'):
2986 2988 hgmerge = encoding.environ.get(b"HGMERGE")
2987 2989 if hgmerge is not None:
2988 2990 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2989 2991 uimerge = ui.config(b"ui", b"merge")
2990 2992 if uimerge:
2991 2993 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2992 2994
2993 2995 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2994 2996 m = scmutil.match(ctx, pats, opts)
2995 2997 changedelete = opts[b'changedelete']
2996 2998 for path in ctx.walk(m):
2997 2999 fctx = ctx[path]
2998 3000 with ui.silent(
2999 3001 error=True
3000 3002 ) if not ui.debugflag else util.nullcontextmanager():
3001 3003 tool, toolpath = filemerge._picktool(
3002 3004 repo,
3003 3005 ui,
3004 3006 path,
3005 3007 fctx.isbinary(),
3006 3008 b'l' in fctx.flags(),
3007 3009 changedelete,
3008 3010 )
3009 3011 ui.write(b'%s = %s\n' % (path, tool))
3010 3012
3011 3013
3012 3014 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3013 3015 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3014 3016 """access the pushkey key/value protocol
3015 3017
3016 3018 With two args, list the keys in the given namespace.
3017 3019
3018 3020 With five args, set a key to new if it currently is set to old.
3019 3021 Reports success or failure.
3020 3022 """
3021 3023
3022 3024 target = hg.peer(ui, {}, repopath)
3023 3025 try:
3024 3026 if keyinfo:
3025 3027 key, old, new = keyinfo
3026 3028 with target.commandexecutor() as e:
3027 3029 r = e.callcommand(
3028 3030 b'pushkey',
3029 3031 {
3030 3032 b'namespace': namespace,
3031 3033 b'key': key,
3032 3034 b'old': old,
3033 3035 b'new': new,
3034 3036 },
3035 3037 ).result()
3036 3038
3037 3039 ui.status(pycompat.bytestr(r) + b'\n')
3038 3040 return not r
3039 3041 else:
3040 3042 for k, v in sorted(target.listkeys(namespace).items()):
3041 3043 ui.write(
3042 3044 b"%s\t%s\n"
3043 3045 % (stringutil.escapestr(k), stringutil.escapestr(v))
3044 3046 )
3045 3047 finally:
3046 3048 target.close()
3047 3049
3048 3050
3049 3051 @command(b'debugpvec', [], _(b'A B'))
3050 3052 def debugpvec(ui, repo, a, b=None):
3051 3053 ca = scmutil.revsingle(repo, a)
3052 3054 cb = scmutil.revsingle(repo, b)
3053 3055 pa = pvec.ctxpvec(ca)
3054 3056 pb = pvec.ctxpvec(cb)
3055 3057 if pa == pb:
3056 3058 rel = b"="
3057 3059 elif pa > pb:
3058 3060 rel = b">"
3059 3061 elif pa < pb:
3060 3062 rel = b"<"
3061 3063 elif pa | pb:
3062 3064 rel = b"|"
3063 3065 ui.write(_(b"a: %s\n") % pa)
3064 3066 ui.write(_(b"b: %s\n") % pb)
3065 3067 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3066 3068 ui.write(
3067 3069 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3068 3070 % (
3069 3071 abs(pa._depth - pb._depth),
3070 3072 pvec._hamming(pa._vec, pb._vec),
3071 3073 pa.distance(pb),
3072 3074 rel,
3073 3075 )
3074 3076 )
3075 3077
3076 3078
3077 3079 @command(
3078 3080 b'debugrebuilddirstate|debugrebuildstate',
3079 3081 [
3080 3082 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3081 3083 (
3082 3084 b'',
3083 3085 b'minimal',
3084 3086 None,
3085 3087 _(
3086 3088 b'only rebuild files that are inconsistent with '
3087 3089 b'the working copy parent'
3088 3090 ),
3089 3091 ),
3090 3092 ],
3091 3093 _(b'[-r REV]'),
3092 3094 )
3093 3095 def debugrebuilddirstate(ui, repo, rev, **opts):
3094 3096 """rebuild the dirstate as it would look like for the given revision
3095 3097
3096 3098 If no revision is specified the first current parent will be used.
3097 3099
3098 3100 The dirstate will be set to the files of the given revision.
3099 3101 The actual working directory content or existing dirstate
3100 3102 information such as adds or removes is not considered.
3101 3103
3102 3104 ``minimal`` will only rebuild the dirstate status for files that claim to be
3103 3105 tracked but are not in the parent manifest, or that exist in the parent
3104 3106 manifest but are not in the dirstate. It will not change adds, removes, or
3105 3107 modified files that are in the working copy parent.
3106 3108
3107 3109 One use of this command is to make the next :hg:`status` invocation
3108 3110 check the actual file content.
3109 3111 """
3110 3112 ctx = scmutil.revsingle(repo, rev)
3111 3113 with repo.wlock():
3112 3114 dirstate = repo.dirstate
3113 3115 changedfiles = None
3114 3116 # See command doc for what minimal does.
3115 3117 if opts.get('minimal'):
3116 3118 manifestfiles = set(ctx.manifest().keys())
3117 3119 dirstatefiles = set(dirstate)
3118 3120 manifestonly = manifestfiles - dirstatefiles
3119 3121 dsonly = dirstatefiles - manifestfiles
3120 3122 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3121 3123 changedfiles = manifestonly | dsnotadded
3122 3124
3123 3125 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3124 3126
3125 3127
3126 3128 @command(
3127 3129 b'debugrebuildfncache',
3128 3130 [
3129 3131 (
3130 3132 b'',
3131 3133 b'only-data',
3132 3134 False,
3133 3135 _(b'only look for wrong .d files (much faster)'),
3134 3136 )
3135 3137 ],
3136 3138 b'',
3137 3139 )
3138 3140 def debugrebuildfncache(ui, repo, **opts):
3139 3141 """rebuild the fncache file"""
3140 3142 opts = pycompat.byteskwargs(opts)
3141 3143 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3142 3144
3143 3145
3144 3146 @command(
3145 3147 b'debugrename',
3146 3148 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3147 3149 _(b'[-r REV] [FILE]...'),
3148 3150 )
3149 3151 def debugrename(ui, repo, *pats, **opts):
3150 3152 """dump rename information"""
3151 3153
3152 3154 opts = pycompat.byteskwargs(opts)
3153 3155 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3154 3156 m = scmutil.match(ctx, pats, opts)
3155 3157 for abs in ctx.walk(m):
3156 3158 fctx = ctx[abs]
3157 3159 o = fctx.filelog().renamed(fctx.filenode())
3158 3160 rel = repo.pathto(abs)
3159 3161 if o:
3160 3162 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3161 3163 else:
3162 3164 ui.write(_(b"%s not renamed\n") % rel)
3163 3165
3164 3166
3165 3167 @command(b'debugrequires|debugrequirements', [], b'')
3166 3168 def debugrequirements(ui, repo):
3167 3169 """print the current repo requirements"""
3168 3170 for r in sorted(repo.requirements):
3169 3171 ui.write(b"%s\n" % r)
3170 3172
3171 3173
3172 3174 @command(
3173 3175 b'debugrevlog',
3174 3176 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3175 3177 _(b'-c|-m|FILE'),
3176 3178 optionalrepo=True,
3177 3179 )
3178 3180 def debugrevlog(ui, repo, file_=None, **opts):
3179 3181 """show data and statistics about a revlog"""
3180 3182 opts = pycompat.byteskwargs(opts)
3181 3183 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3182 3184
3183 3185 if opts.get(b"dump"):
3184 3186 numrevs = len(r)
3185 3187 ui.write(
3186 3188 (
3187 3189 b"# rev p1rev p2rev start end deltastart base p1 p2"
3188 3190 b" rawsize totalsize compression heads chainlen\n"
3189 3191 )
3190 3192 )
3191 3193 ts = 0
3192 3194 heads = set()
3193 3195
3194 3196 for rev in pycompat.xrange(numrevs):
3195 3197 dbase = r.deltaparent(rev)
3196 3198 if dbase == -1:
3197 3199 dbase = rev
3198 3200 cbase = r.chainbase(rev)
3199 3201 clen = r.chainlen(rev)
3200 3202 p1, p2 = r.parentrevs(rev)
3201 3203 rs = r.rawsize(rev)
3202 3204 ts = ts + rs
3203 3205 heads -= set(r.parentrevs(rev))
3204 3206 heads.add(rev)
3205 3207 try:
3206 3208 compression = ts / r.end(rev)
3207 3209 except ZeroDivisionError:
3208 3210 compression = 0
3209 3211 ui.write(
3210 3212 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3211 3213 b"%11d %5d %8d\n"
3212 3214 % (
3213 3215 rev,
3214 3216 p1,
3215 3217 p2,
3216 3218 r.start(rev),
3217 3219 r.end(rev),
3218 3220 r.start(dbase),
3219 3221 r.start(cbase),
3220 3222 r.start(p1),
3221 3223 r.start(p2),
3222 3224 rs,
3223 3225 ts,
3224 3226 compression,
3225 3227 len(heads),
3226 3228 clen,
3227 3229 )
3228 3230 )
3229 3231 return 0
3230 3232
3231 3233 format = r._format_version
3232 3234 v = r._format_flags
3233 3235 flags = []
3234 3236 gdelta = False
3235 3237 if v & revlog.FLAG_INLINE_DATA:
3236 3238 flags.append(b'inline')
3237 3239 if v & revlog.FLAG_GENERALDELTA:
3238 3240 gdelta = True
3239 3241 flags.append(b'generaldelta')
3240 3242 if not flags:
3241 3243 flags = [b'(none)']
3242 3244
3243 3245 ### tracks merge vs single parent
3244 3246 nummerges = 0
3245 3247
3246 3248 ### tracks ways the "delta" are build
3247 3249 # nodelta
3248 3250 numempty = 0
3249 3251 numemptytext = 0
3250 3252 numemptydelta = 0
3251 3253 # full file content
3252 3254 numfull = 0
3253 3255 # intermediate snapshot against a prior snapshot
3254 3256 numsemi = 0
3255 3257 # snapshot count per depth
3256 3258 numsnapdepth = collections.defaultdict(lambda: 0)
3257 3259 # delta against previous revision
3258 3260 numprev = 0
3259 3261 # delta against first or second parent (not prev)
3260 3262 nump1 = 0
3261 3263 nump2 = 0
3262 3264 # delta against neither prev nor parents
3263 3265 numother = 0
3264 3266 # delta against prev that are also first or second parent
3265 3267 # (details of `numprev`)
3266 3268 nump1prev = 0
3267 3269 nump2prev = 0
3268 3270
3269 3271 # data about delta chain of each revs
3270 3272 chainlengths = []
3271 3273 chainbases = []
3272 3274 chainspans = []
3273 3275
3274 3276 # data about each revision
3275 3277 datasize = [None, 0, 0]
3276 3278 fullsize = [None, 0, 0]
3277 3279 semisize = [None, 0, 0]
3278 3280 # snapshot count per depth
3279 3281 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3280 3282 deltasize = [None, 0, 0]
3281 3283 chunktypecounts = {}
3282 3284 chunktypesizes = {}
3283 3285
3284 3286 def addsize(size, l):
3285 3287 if l[0] is None or size < l[0]:
3286 3288 l[0] = size
3287 3289 if size > l[1]:
3288 3290 l[1] = size
3289 3291 l[2] += size
3290 3292
3291 3293 numrevs = len(r)
3292 3294 for rev in pycompat.xrange(numrevs):
3293 3295 p1, p2 = r.parentrevs(rev)
3294 3296 delta = r.deltaparent(rev)
3295 3297 if format > 0:
3296 3298 addsize(r.rawsize(rev), datasize)
3297 3299 if p2 != nullrev:
3298 3300 nummerges += 1
3299 3301 size = r.length(rev)
3300 3302 if delta == nullrev:
3301 3303 chainlengths.append(0)
3302 3304 chainbases.append(r.start(rev))
3303 3305 chainspans.append(size)
3304 3306 if size == 0:
3305 3307 numempty += 1
3306 3308 numemptytext += 1
3307 3309 else:
3308 3310 numfull += 1
3309 3311 numsnapdepth[0] += 1
3310 3312 addsize(size, fullsize)
3311 3313 addsize(size, snapsizedepth[0])
3312 3314 else:
3313 3315 chainlengths.append(chainlengths[delta] + 1)
3314 3316 baseaddr = chainbases[delta]
3315 3317 revaddr = r.start(rev)
3316 3318 chainbases.append(baseaddr)
3317 3319 chainspans.append((revaddr - baseaddr) + size)
3318 3320 if size == 0:
3319 3321 numempty += 1
3320 3322 numemptydelta += 1
3321 3323 elif r.issnapshot(rev):
3322 3324 addsize(size, semisize)
3323 3325 numsemi += 1
3324 3326 depth = r.snapshotdepth(rev)
3325 3327 numsnapdepth[depth] += 1
3326 3328 addsize(size, snapsizedepth[depth])
3327 3329 else:
3328 3330 addsize(size, deltasize)
3329 3331 if delta == rev - 1:
3330 3332 numprev += 1
3331 3333 if delta == p1:
3332 3334 nump1prev += 1
3333 3335 elif delta == p2:
3334 3336 nump2prev += 1
3335 3337 elif delta == p1:
3336 3338 nump1 += 1
3337 3339 elif delta == p2:
3338 3340 nump2 += 1
3339 3341 elif delta != nullrev:
3340 3342 numother += 1
3341 3343
3342 3344 # Obtain data on the raw chunks in the revlog.
3343 3345 if util.safehasattr(r, b'_getsegmentforrevs'):
3344 3346 segment = r._getsegmentforrevs(rev, rev)[1]
3345 3347 else:
3346 3348 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3347 3349 if segment:
3348 3350 chunktype = bytes(segment[0:1])
3349 3351 else:
3350 3352 chunktype = b'empty'
3351 3353
3352 3354 if chunktype not in chunktypecounts:
3353 3355 chunktypecounts[chunktype] = 0
3354 3356 chunktypesizes[chunktype] = 0
3355 3357
3356 3358 chunktypecounts[chunktype] += 1
3357 3359 chunktypesizes[chunktype] += size
3358 3360
3359 3361 # Adjust size min value for empty cases
3360 3362 for size in (datasize, fullsize, semisize, deltasize):
3361 3363 if size[0] is None:
3362 3364 size[0] = 0
3363 3365
3364 3366 numdeltas = numrevs - numfull - numempty - numsemi
3365 3367 numoprev = numprev - nump1prev - nump2prev
3366 3368 totalrawsize = datasize[2]
3367 3369 datasize[2] /= numrevs
3368 3370 fulltotal = fullsize[2]
3369 3371 if numfull == 0:
3370 3372 fullsize[2] = 0
3371 3373 else:
3372 3374 fullsize[2] /= numfull
3373 3375 semitotal = semisize[2]
3374 3376 snaptotal = {}
3375 3377 if numsemi > 0:
3376 3378 semisize[2] /= numsemi
3377 3379 for depth in snapsizedepth:
3378 3380 snaptotal[depth] = snapsizedepth[depth][2]
3379 3381 snapsizedepth[depth][2] /= numsnapdepth[depth]
3380 3382
3381 3383 deltatotal = deltasize[2]
3382 3384 if numdeltas > 0:
3383 3385 deltasize[2] /= numdeltas
3384 3386 totalsize = fulltotal + semitotal + deltatotal
3385 3387 avgchainlen = sum(chainlengths) / numrevs
3386 3388 maxchainlen = max(chainlengths)
3387 3389 maxchainspan = max(chainspans)
3388 3390 compratio = 1
3389 3391 if totalsize:
3390 3392 compratio = totalrawsize / totalsize
3391 3393
3392 3394 basedfmtstr = b'%%%dd\n'
3393 3395 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3394 3396
3395 3397 def dfmtstr(max):
3396 3398 return basedfmtstr % len(str(max))
3397 3399
3398 3400 def pcfmtstr(max, padding=0):
3399 3401 return basepcfmtstr % (len(str(max)), b' ' * padding)
3400 3402
3401 3403 def pcfmt(value, total):
3402 3404 if total:
3403 3405 return (value, 100 * float(value) / total)
3404 3406 else:
3405 3407 return value, 100.0
3406 3408
3407 3409 ui.writenoi18n(b'format : %d\n' % format)
3408 3410 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3409 3411
3410 3412 ui.write(b'\n')
3411 3413 fmt = pcfmtstr(totalsize)
3412 3414 fmt2 = dfmtstr(totalsize)
3413 3415 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3414 3416 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3415 3417 ui.writenoi18n(
3416 3418 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3417 3419 )
3418 3420 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3419 3421 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3420 3422 ui.writenoi18n(
3421 3423 b' text : '
3422 3424 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3423 3425 )
3424 3426 ui.writenoi18n(
3425 3427 b' delta : '
3426 3428 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3427 3429 )
3428 3430 ui.writenoi18n(
3429 3431 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3430 3432 )
3431 3433 for depth in sorted(numsnapdepth):
3432 3434 ui.write(
3433 3435 (b' lvl-%-3d : ' % depth)
3434 3436 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3435 3437 )
3436 3438 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3437 3439 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3438 3440 ui.writenoi18n(
3439 3441 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3440 3442 )
3441 3443 for depth in sorted(numsnapdepth):
3442 3444 ui.write(
3443 3445 (b' lvl-%-3d : ' % depth)
3444 3446 + fmt % pcfmt(snaptotal[depth], totalsize)
3445 3447 )
3446 3448 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3447 3449
3448 3450 def fmtchunktype(chunktype):
3449 3451 if chunktype == b'empty':
3450 3452 return b' %s : ' % chunktype
3451 3453 elif chunktype in pycompat.bytestr(string.ascii_letters):
3452 3454 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3453 3455 else:
3454 3456 return b' 0x%s : ' % hex(chunktype)
3455 3457
3456 3458 ui.write(b'\n')
3457 3459 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3458 3460 for chunktype in sorted(chunktypecounts):
3459 3461 ui.write(fmtchunktype(chunktype))
3460 3462 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3461 3463 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3462 3464 for chunktype in sorted(chunktypecounts):
3463 3465 ui.write(fmtchunktype(chunktype))
3464 3466 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3465 3467
3466 3468 ui.write(b'\n')
3467 3469 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3468 3470 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3469 3471 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3470 3472 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3471 3473 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3472 3474
3473 3475 if format > 0:
3474 3476 ui.write(b'\n')
3475 3477 ui.writenoi18n(
3476 3478 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3477 3479 % tuple(datasize)
3478 3480 )
3479 3481 ui.writenoi18n(
3480 3482 b'full revision size (min/max/avg) : %d / %d / %d\n'
3481 3483 % tuple(fullsize)
3482 3484 )
3483 3485 ui.writenoi18n(
3484 3486 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3485 3487 % tuple(semisize)
3486 3488 )
3487 3489 for depth in sorted(snapsizedepth):
3488 3490 if depth == 0:
3489 3491 continue
3490 3492 ui.writenoi18n(
3491 3493 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3492 3494 % ((depth,) + tuple(snapsizedepth[depth]))
3493 3495 )
3494 3496 ui.writenoi18n(
3495 3497 b'delta size (min/max/avg) : %d / %d / %d\n'
3496 3498 % tuple(deltasize)
3497 3499 )
3498 3500
3499 3501 if numdeltas > 0:
3500 3502 ui.write(b'\n')
3501 3503 fmt = pcfmtstr(numdeltas)
3502 3504 fmt2 = pcfmtstr(numdeltas, 4)
3503 3505 ui.writenoi18n(
3504 3506 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3505 3507 )
3506 3508 if numprev > 0:
3507 3509 ui.writenoi18n(
3508 3510 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3509 3511 )
3510 3512 ui.writenoi18n(
3511 3513 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3512 3514 )
3513 3515 ui.writenoi18n(
3514 3516 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3515 3517 )
3516 3518 if gdelta:
3517 3519 ui.writenoi18n(
3518 3520 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3519 3521 )
3520 3522 ui.writenoi18n(
3521 3523 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3522 3524 )
3523 3525 ui.writenoi18n(
3524 3526 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3525 3527 )
3526 3528
3527 3529
3528 3530 @command(
3529 3531 b'debugrevlogindex',
3530 3532 cmdutil.debugrevlogopts
3531 3533 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3532 3534 _(b'[-f FORMAT] -c|-m|FILE'),
3533 3535 optionalrepo=True,
3534 3536 )
3535 3537 def debugrevlogindex(ui, repo, file_=None, **opts):
3536 3538 """dump the contents of a revlog index"""
3537 3539 opts = pycompat.byteskwargs(opts)
3538 3540 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3539 3541 format = opts.get(b'format', 0)
3540 3542 if format not in (0, 1):
3541 3543 raise error.Abort(_(b"unknown format %d") % format)
3542 3544
3543 3545 if ui.debugflag:
3544 3546 shortfn = hex
3545 3547 else:
3546 3548 shortfn = short
3547 3549
3548 3550 # There might not be anything in r, so have a sane default
3549 3551 idlen = 12
3550 3552 for i in r:
3551 3553 idlen = len(shortfn(r.node(i)))
3552 3554 break
3553 3555
3554 3556 if format == 0:
3555 3557 if ui.verbose:
3556 3558 ui.writenoi18n(
3557 3559 b" rev offset length linkrev %s %s p2\n"
3558 3560 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3559 3561 )
3560 3562 else:
3561 3563 ui.writenoi18n(
3562 3564 b" rev linkrev %s %s p2\n"
3563 3565 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3564 3566 )
3565 3567 elif format == 1:
3566 3568 if ui.verbose:
3567 3569 ui.writenoi18n(
3568 3570 (
3569 3571 b" rev flag offset length size link p1"
3570 3572 b" p2 %s\n"
3571 3573 )
3572 3574 % b"nodeid".rjust(idlen)
3573 3575 )
3574 3576 else:
3575 3577 ui.writenoi18n(
3576 3578 b" rev flag size link p1 p2 %s\n"
3577 3579 % b"nodeid".rjust(idlen)
3578 3580 )
3579 3581
3580 3582 for i in r:
3581 3583 node = r.node(i)
3582 3584 if format == 0:
3583 3585 try:
3584 3586 pp = r.parents(node)
3585 3587 except Exception:
3586 3588 pp = [repo.nullid, repo.nullid]
3587 3589 if ui.verbose:
3588 3590 ui.write(
3589 3591 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3590 3592 % (
3591 3593 i,
3592 3594 r.start(i),
3593 3595 r.length(i),
3594 3596 r.linkrev(i),
3595 3597 shortfn(node),
3596 3598 shortfn(pp[0]),
3597 3599 shortfn(pp[1]),
3598 3600 )
3599 3601 )
3600 3602 else:
3601 3603 ui.write(
3602 3604 b"% 6d % 7d %s %s %s\n"
3603 3605 % (
3604 3606 i,
3605 3607 r.linkrev(i),
3606 3608 shortfn(node),
3607 3609 shortfn(pp[0]),
3608 3610 shortfn(pp[1]),
3609 3611 )
3610 3612 )
3611 3613 elif format == 1:
3612 3614 pr = r.parentrevs(i)
3613 3615 if ui.verbose:
3614 3616 ui.write(
3615 3617 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3616 3618 % (
3617 3619 i,
3618 3620 r.flags(i),
3619 3621 r.start(i),
3620 3622 r.length(i),
3621 3623 r.rawsize(i),
3622 3624 r.linkrev(i),
3623 3625 pr[0],
3624 3626 pr[1],
3625 3627 shortfn(node),
3626 3628 )
3627 3629 )
3628 3630 else:
3629 3631 ui.write(
3630 3632 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3631 3633 % (
3632 3634 i,
3633 3635 r.flags(i),
3634 3636 r.rawsize(i),
3635 3637 r.linkrev(i),
3636 3638 pr[0],
3637 3639 pr[1],
3638 3640 shortfn(node),
3639 3641 )
3640 3642 )
3641 3643
3642 3644
3643 3645 @command(
3644 3646 b'debugrevspec',
3645 3647 [
3646 3648 (
3647 3649 b'',
3648 3650 b'optimize',
3649 3651 None,
3650 3652 _(b'print parsed tree after optimizing (DEPRECATED)'),
3651 3653 ),
3652 3654 (
3653 3655 b'',
3654 3656 b'show-revs',
3655 3657 True,
3656 3658 _(b'print list of result revisions (default)'),
3657 3659 ),
3658 3660 (
3659 3661 b's',
3660 3662 b'show-set',
3661 3663 None,
3662 3664 _(b'print internal representation of result set'),
3663 3665 ),
3664 3666 (
3665 3667 b'p',
3666 3668 b'show-stage',
3667 3669 [],
3668 3670 _(b'print parsed tree at the given stage'),
3669 3671 _(b'NAME'),
3670 3672 ),
3671 3673 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3672 3674 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3673 3675 ],
3674 3676 b'REVSPEC',
3675 3677 )
3676 3678 def debugrevspec(ui, repo, expr, **opts):
3677 3679 """parse and apply a revision specification
3678 3680
3679 3681 Use -p/--show-stage option to print the parsed tree at the given stages.
3680 3682 Use -p all to print tree at every stage.
3681 3683
3682 3684 Use --no-show-revs option with -s or -p to print only the set
3683 3685 representation or the parsed tree respectively.
3684 3686
3685 3687 Use --verify-optimized to compare the optimized result with the unoptimized
3686 3688 one. Returns 1 if the optimized result differs.
3687 3689 """
3688 3690 opts = pycompat.byteskwargs(opts)
3689 3691 aliases = ui.configitems(b'revsetalias')
3690 3692 stages = [
3691 3693 (b'parsed', lambda tree: tree),
3692 3694 (
3693 3695 b'expanded',
3694 3696 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3695 3697 ),
3696 3698 (b'concatenated', revsetlang.foldconcat),
3697 3699 (b'analyzed', revsetlang.analyze),
3698 3700 (b'optimized', revsetlang.optimize),
3699 3701 ]
3700 3702 if opts[b'no_optimized']:
3701 3703 stages = stages[:-1]
3702 3704 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3703 3705 raise error.Abort(
3704 3706 _(b'cannot use --verify-optimized with --no-optimized')
3705 3707 )
3706 3708 stagenames = {n for n, f in stages}
3707 3709
3708 3710 showalways = set()
3709 3711 showchanged = set()
3710 3712 if ui.verbose and not opts[b'show_stage']:
3711 3713 # show parsed tree by --verbose (deprecated)
3712 3714 showalways.add(b'parsed')
3713 3715 showchanged.update([b'expanded', b'concatenated'])
3714 3716 if opts[b'optimize']:
3715 3717 showalways.add(b'optimized')
3716 3718 if opts[b'show_stage'] and opts[b'optimize']:
3717 3719 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3718 3720 if opts[b'show_stage'] == [b'all']:
3719 3721 showalways.update(stagenames)
3720 3722 else:
3721 3723 for n in opts[b'show_stage']:
3722 3724 if n not in stagenames:
3723 3725 raise error.Abort(_(b'invalid stage name: %s') % n)
3724 3726 showalways.update(opts[b'show_stage'])
3725 3727
3726 3728 treebystage = {}
3727 3729 printedtree = None
3728 3730 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3729 3731 for n, f in stages:
3730 3732 treebystage[n] = tree = f(tree)
3731 3733 if n in showalways or (n in showchanged and tree != printedtree):
3732 3734 if opts[b'show_stage'] or n != b'parsed':
3733 3735 ui.write(b"* %s:\n" % n)
3734 3736 ui.write(revsetlang.prettyformat(tree), b"\n")
3735 3737 printedtree = tree
3736 3738
3737 3739 if opts[b'verify_optimized']:
3738 3740 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3739 3741 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3740 3742 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3741 3743 ui.writenoi18n(
3742 3744 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3743 3745 )
3744 3746 ui.writenoi18n(
3745 3747 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3746 3748 )
3747 3749 arevs = list(arevs)
3748 3750 brevs = list(brevs)
3749 3751 if arevs == brevs:
3750 3752 return 0
3751 3753 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3752 3754 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3753 3755 sm = difflib.SequenceMatcher(None, arevs, brevs)
3754 3756 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3755 3757 if tag in ('delete', 'replace'):
3756 3758 for c in arevs[alo:ahi]:
3757 3759 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3758 3760 if tag in ('insert', 'replace'):
3759 3761 for c in brevs[blo:bhi]:
3760 3762 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3761 3763 if tag == 'equal':
3762 3764 for c in arevs[alo:ahi]:
3763 3765 ui.write(b' %d\n' % c)
3764 3766 return 1
3765 3767
3766 3768 func = revset.makematcher(tree)
3767 3769 revs = func(repo)
3768 3770 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3769 3771 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3770 3772 if not opts[b'show_revs']:
3771 3773 return
3772 3774 for c in revs:
3773 3775 ui.write(b"%d\n" % c)
3774 3776
3775 3777
3776 3778 @command(
3777 3779 b'debugserve',
3778 3780 [
3779 3781 (
3780 3782 b'',
3781 3783 b'sshstdio',
3782 3784 False,
3783 3785 _(b'run an SSH server bound to process handles'),
3784 3786 ),
3785 3787 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3786 3788 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3787 3789 ],
3788 3790 b'',
3789 3791 )
3790 3792 def debugserve(ui, repo, **opts):
3791 3793 """run a server with advanced settings
3792 3794
3793 3795 This command is similar to :hg:`serve`. It exists partially as a
3794 3796 workaround to the fact that ``hg serve --stdio`` must have specific
3795 3797 arguments for security reasons.
3796 3798 """
3797 3799 opts = pycompat.byteskwargs(opts)
3798 3800
3799 3801 if not opts[b'sshstdio']:
3800 3802 raise error.Abort(_(b'only --sshstdio is currently supported'))
3801 3803
3802 3804 logfh = None
3803 3805
3804 3806 if opts[b'logiofd'] and opts[b'logiofile']:
3805 3807 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3806 3808
3807 3809 if opts[b'logiofd']:
3808 3810 # Ideally we would be line buffered. But line buffering in binary
3809 3811 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3810 3812 # buffering could have performance impacts. But since this isn't
3811 3813 # performance critical code, it should be fine.
3812 3814 try:
3813 3815 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3814 3816 except OSError as e:
3815 3817 if e.errno != errno.ESPIPE:
3816 3818 raise
3817 3819 # can't seek a pipe, so `ab` mode fails on py3
3818 3820 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3819 3821 elif opts[b'logiofile']:
3820 3822 logfh = open(opts[b'logiofile'], b'ab', 0)
3821 3823
3822 3824 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3823 3825 s.serve_forever()
3824 3826
3825 3827
3826 3828 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3827 3829 def debugsetparents(ui, repo, rev1, rev2=None):
3828 3830 """manually set the parents of the current working directory (DANGEROUS)
3829 3831
3830 3832 This command is not what you are looking for and should not be used. Using
3831 3833 this command will most certainly results in slight corruption of the file
3832 3834 level histories withing your repository. DO NOT USE THIS COMMAND.
3833 3835
3834 3836 The command update the p1 and p2 field in the dirstate, and not touching
3835 3837 anything else. This useful for writing repository conversion tools, but
3836 3838 should be used with extreme care. For example, neither the working
3837 3839 directory nor the dirstate is updated, so file status may be incorrect
3838 3840 after running this command. Only used if you are one of the few people that
3839 3841 deeply unstand both conversion tools and file level histories. If you are
3840 3842 reading this help, you are not one of this people (most of them sailed west
3841 3843 from Mithlond anyway.
3842 3844
3843 3845 So one last time DO NOT USE THIS COMMAND.
3844 3846
3845 3847 Returns 0 on success.
3846 3848 """
3847 3849
3848 3850 node1 = scmutil.revsingle(repo, rev1).node()
3849 3851 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3850 3852
3851 3853 with repo.wlock():
3852 3854 repo.setparents(node1, node2)
3853 3855
3854 3856
3855 3857 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3856 3858 def debugsidedata(ui, repo, file_, rev=None, **opts):
3857 3859 """dump the side data for a cl/manifest/file revision
3858 3860
3859 3861 Use --verbose to dump the sidedata content."""
3860 3862 opts = pycompat.byteskwargs(opts)
3861 3863 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3862 3864 if rev is not None:
3863 3865 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3864 3866 file_, rev = None, file_
3865 3867 elif rev is None:
3866 3868 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3867 3869 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3868 3870 r = getattr(r, '_revlog', r)
3869 3871 try:
3870 3872 sidedata = r.sidedata(r.lookup(rev))
3871 3873 except KeyError:
3872 3874 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3873 3875 if sidedata:
3874 3876 sidedata = list(sidedata.items())
3875 3877 sidedata.sort()
3876 3878 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3877 3879 for key, value in sidedata:
3878 3880 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3879 3881 if ui.verbose:
3880 3882 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3881 3883
3882 3884
3883 3885 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3884 3886 def debugssl(ui, repo, source=None, **opts):
3885 3887 """test a secure connection to a server
3886 3888
3887 3889 This builds the certificate chain for the server on Windows, installing the
3888 3890 missing intermediates and trusted root via Windows Update if necessary. It
3889 3891 does nothing on other platforms.
3890 3892
3891 3893 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3892 3894 that server is used. See :hg:`help urls` for more information.
3893 3895
3894 3896 If the update succeeds, retry the original operation. Otherwise, the cause
3895 3897 of the SSL error is likely another issue.
3896 3898 """
3897 3899 if not pycompat.iswindows:
3898 3900 raise error.Abort(
3899 3901 _(b'certificate chain building is only possible on Windows')
3900 3902 )
3901 3903
3902 3904 if not source:
3903 3905 if not repo:
3904 3906 raise error.Abort(
3905 3907 _(
3906 3908 b"there is no Mercurial repository here, and no "
3907 3909 b"server specified"
3908 3910 )
3909 3911 )
3910 3912 source = b"default"
3911 3913
3912 3914 source, branches = urlutil.get_unique_pull_path(
3913 3915 b'debugssl', repo, ui, source
3914 3916 )
3915 3917 url = urlutil.url(source)
3916 3918
3917 3919 defaultport = {b'https': 443, b'ssh': 22}
3918 3920 if url.scheme in defaultport:
3919 3921 try:
3920 3922 addr = (url.host, int(url.port or defaultport[url.scheme]))
3921 3923 except ValueError:
3922 3924 raise error.Abort(_(b"malformed port number in URL"))
3923 3925 else:
3924 3926 raise error.Abort(_(b"only https and ssh connections are supported"))
3925 3927
3926 3928 from . import win32
3927 3929
3928 3930 s = ssl.wrap_socket(
3929 3931 socket.socket(),
3930 3932 ssl_version=ssl.PROTOCOL_TLS,
3931 3933 cert_reqs=ssl.CERT_NONE,
3932 3934 ca_certs=None,
3933 3935 )
3934 3936
3935 3937 try:
3936 3938 s.connect(addr)
3937 3939 cert = s.getpeercert(True)
3938 3940
3939 3941 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3940 3942
3941 3943 complete = win32.checkcertificatechain(cert, build=False)
3942 3944
3943 3945 if not complete:
3944 3946 ui.status(_(b'certificate chain is incomplete, updating... '))
3945 3947
3946 3948 if not win32.checkcertificatechain(cert):
3947 3949 ui.status(_(b'failed.\n'))
3948 3950 else:
3949 3951 ui.status(_(b'done.\n'))
3950 3952 else:
3951 3953 ui.status(_(b'full certificate chain is available\n'))
3952 3954 finally:
3953 3955 s.close()
3954 3956
3955 3957
3956 3958 @command(
3957 3959 b"debugbackupbundle",
3958 3960 [
3959 3961 (
3960 3962 b"",
3961 3963 b"recover",
3962 3964 b"",
3963 3965 b"brings the specified changeset back into the repository",
3964 3966 )
3965 3967 ]
3966 3968 + cmdutil.logopts,
3967 3969 _(b"hg debugbackupbundle [--recover HASH]"),
3968 3970 )
3969 3971 def debugbackupbundle(ui, repo, *pats, **opts):
3970 3972 """lists the changesets available in backup bundles
3971 3973
3972 3974 Without any arguments, this command prints a list of the changesets in each
3973 3975 backup bundle.
3974 3976
3975 3977 --recover takes a changeset hash and unbundles the first bundle that
3976 3978 contains that hash, which puts that changeset back in your repository.
3977 3979
3978 3980 --verbose will print the entire commit message and the bundle path for that
3979 3981 backup.
3980 3982 """
3981 3983 backups = list(
3982 3984 filter(
3983 3985 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3984 3986 )
3985 3987 )
3986 3988 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3987 3989
3988 3990 opts = pycompat.byteskwargs(opts)
3989 3991 opts[b"bundle"] = b""
3990 3992 opts[b"force"] = None
3991 3993 limit = logcmdutil.getlimit(opts)
3992 3994
3993 3995 def display(other, chlist, displayer):
3994 3996 if opts.get(b"newest_first"):
3995 3997 chlist.reverse()
3996 3998 count = 0
3997 3999 for n in chlist:
3998 4000 if limit is not None and count >= limit:
3999 4001 break
4000 4002 parents = [
4001 4003 True for p in other.changelog.parents(n) if p != repo.nullid
4002 4004 ]
4003 4005 if opts.get(b"no_merges") and len(parents) == 2:
4004 4006 continue
4005 4007 count += 1
4006 4008 displayer.show(other[n])
4007 4009
4008 4010 recovernode = opts.get(b"recover")
4009 4011 if recovernode:
4010 4012 if scmutil.isrevsymbol(repo, recovernode):
4011 4013 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4012 4014 return
4013 4015 elif backups:
4014 4016 msg = _(
4015 4017 b"Recover changesets using: hg debugbackupbundle --recover "
4016 4018 b"<changeset hash>\n\nAvailable backup changesets:"
4017 4019 )
4018 4020 ui.status(msg, label=b"status.removed")
4019 4021 else:
4020 4022 ui.status(_(b"no backup changesets found\n"))
4021 4023 return
4022 4024
4023 4025 for backup in backups:
4024 4026 # Much of this is copied from the hg incoming logic
4025 4027 source = os.path.relpath(backup, encoding.getcwd())
4026 4028 source, branches = urlutil.get_unique_pull_path(
4027 4029 b'debugbackupbundle',
4028 4030 repo,
4029 4031 ui,
4030 4032 source,
4031 4033 default_branches=opts.get(b'branch'),
4032 4034 )
4033 4035 try:
4034 4036 other = hg.peer(repo, opts, source)
4035 4037 except error.LookupError as ex:
4036 4038 msg = _(b"\nwarning: unable to open bundle %s") % source
4037 4039 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4038 4040 ui.warn(msg, hint=hint)
4039 4041 continue
4040 4042 revs, checkout = hg.addbranchrevs(
4041 4043 repo, other, branches, opts.get(b"rev")
4042 4044 )
4043 4045
4044 4046 if revs:
4045 4047 revs = [other.lookup(rev) for rev in revs]
4046 4048
4047 4049 with ui.silent():
4048 4050 try:
4049 4051 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4050 4052 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4051 4053 )
4052 4054 except error.LookupError:
4053 4055 continue
4054 4056
4055 4057 try:
4056 4058 if not chlist:
4057 4059 continue
4058 4060 if recovernode:
4059 4061 with repo.lock(), repo.transaction(b"unbundle") as tr:
4060 4062 if scmutil.isrevsymbol(other, recovernode):
4061 4063 ui.status(_(b"Unbundling %s\n") % (recovernode))
4062 4064 f = hg.openpath(ui, source)
4063 4065 gen = exchange.readbundle(ui, f, source)
4064 4066 if isinstance(gen, bundle2.unbundle20):
4065 4067 bundle2.applybundle(
4066 4068 repo,
4067 4069 gen,
4068 4070 tr,
4069 4071 source=b"unbundle",
4070 4072 url=b"bundle:" + source,
4071 4073 )
4072 4074 else:
4073 4075 gen.apply(repo, b"unbundle", b"bundle:" + source)
4074 4076 break
4075 4077 else:
4076 4078 backupdate = encoding.strtolocal(
4077 4079 time.strftime(
4078 4080 "%a %H:%M, %Y-%m-%d",
4079 4081 time.localtime(os.path.getmtime(source)),
4080 4082 )
4081 4083 )
4082 4084 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4083 4085 if ui.verbose:
4084 4086 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4085 4087 else:
4086 4088 opts[
4087 4089 b"template"
4088 4090 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4089 4091 displayer = logcmdutil.changesetdisplayer(
4090 4092 ui, other, opts, False
4091 4093 )
4092 4094 display(other, chlist, displayer)
4093 4095 displayer.close()
4094 4096 finally:
4095 4097 cleanupfn()
4096 4098
4097 4099
4098 4100 @command(
4099 4101 b'debugsub',
4100 4102 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4101 4103 _(b'[-r REV] [REV]'),
4102 4104 )
4103 4105 def debugsub(ui, repo, rev=None):
4104 4106 ctx = scmutil.revsingle(repo, rev, None)
4105 4107 for k, v in sorted(ctx.substate.items()):
4106 4108 ui.writenoi18n(b'path %s\n' % k)
4107 4109 ui.writenoi18n(b' source %s\n' % v[0])
4108 4110 ui.writenoi18n(b' revision %s\n' % v[1])
4109 4111
4110 4112
4111 4113 @command(b'debugshell', optionalrepo=True)
4112 4114 def debugshell(ui, repo):
4113 4115 """run an interactive Python interpreter
4114 4116
4115 4117 The local namespace is provided with a reference to the ui and
4116 4118 the repo instance (if available).
4117 4119 """
4118 4120 import code
4119 4121
4120 4122 imported_objects = {
4121 4123 'ui': ui,
4122 4124 'repo': repo,
4123 4125 }
4124 4126
4125 4127 code.interact(local=imported_objects)
4126 4128
4127 4129
4128 4130 @command(
4129 4131 b'debugsuccessorssets',
4130 4132 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4131 4133 _(b'[REV]'),
4132 4134 )
4133 4135 def debugsuccessorssets(ui, repo, *revs, **opts):
4134 4136 """show set of successors for revision
4135 4137
4136 4138 A successors set of changeset A is a consistent group of revisions that
4137 4139 succeed A. It contains non-obsolete changesets only unless closests
4138 4140 successors set is set.
4139 4141
4140 4142 In most cases a changeset A has a single successors set containing a single
4141 4143 successor (changeset A replaced by A').
4142 4144
4143 4145 A changeset that is made obsolete with no successors are called "pruned".
4144 4146 Such changesets have no successors sets at all.
4145 4147
4146 4148 A changeset that has been "split" will have a successors set containing
4147 4149 more than one successor.
4148 4150
4149 4151 A changeset that has been rewritten in multiple different ways is called
4150 4152 "divergent". Such changesets have multiple successor sets (each of which
4151 4153 may also be split, i.e. have multiple successors).
4152 4154
4153 4155 Results are displayed as follows::
4154 4156
4155 4157 <rev1>
4156 4158 <successors-1A>
4157 4159 <rev2>
4158 4160 <successors-2A>
4159 4161 <successors-2B1> <successors-2B2> <successors-2B3>
4160 4162
4161 4163 Here rev2 has two possible (i.e. divergent) successors sets. The first
4162 4164 holds one element, whereas the second holds three (i.e. the changeset has
4163 4165 been split).
4164 4166 """
4165 4167 # passed to successorssets caching computation from one call to another
4166 4168 cache = {}
4167 4169 ctx2str = bytes
4168 4170 node2str = short
4169 4171 for rev in logcmdutil.revrange(repo, revs):
4170 4172 ctx = repo[rev]
4171 4173 ui.write(b'%s\n' % ctx2str(ctx))
4172 4174 for succsset in obsutil.successorssets(
4173 4175 repo, ctx.node(), closest=opts['closest'], cache=cache
4174 4176 ):
4175 4177 if succsset:
4176 4178 ui.write(b' ')
4177 4179 ui.write(node2str(succsset[0]))
4178 4180 for node in succsset[1:]:
4179 4181 ui.write(b' ')
4180 4182 ui.write(node2str(node))
4181 4183 ui.write(b'\n')
4182 4184
4183 4185
4184 4186 @command(b'debugtagscache', [])
4185 4187 def debugtagscache(ui, repo):
4186 4188 """display the contents of .hg/cache/hgtagsfnodes1"""
4187 4189 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4188 4190 flog = repo.file(b'.hgtags')
4189 4191 for r in repo:
4190 4192 node = repo[r].node()
4191 4193 tagsnode = cache.getfnode(node, computemissing=False)
4192 4194 if tagsnode:
4193 4195 tagsnodedisplay = hex(tagsnode)
4194 4196 if not flog.hasnode(tagsnode):
4195 4197 tagsnodedisplay += b' (unknown node)'
4196 4198 elif tagsnode is None:
4197 4199 tagsnodedisplay = b'missing'
4198 4200 else:
4199 4201 tagsnodedisplay = b'invalid'
4200 4202
4201 4203 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4202 4204
4203 4205
4204 4206 @command(
4205 4207 b'debugtemplate',
4206 4208 [
4207 4209 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4208 4210 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4209 4211 ],
4210 4212 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4211 4213 optionalrepo=True,
4212 4214 )
4213 4215 def debugtemplate(ui, repo, tmpl, **opts):
4214 4216 """parse and apply a template
4215 4217
4216 4218 If -r/--rev is given, the template is processed as a log template and
4217 4219 applied to the given changesets. Otherwise, it is processed as a generic
4218 4220 template.
4219 4221
4220 4222 Use --verbose to print the parsed tree.
4221 4223 """
4222 4224 revs = None
4223 4225 if opts['rev']:
4224 4226 if repo is None:
4225 4227 raise error.RepoError(
4226 4228 _(b'there is no Mercurial repository here (.hg not found)')
4227 4229 )
4228 4230 revs = logcmdutil.revrange(repo, opts['rev'])
4229 4231
4230 4232 props = {}
4231 4233 for d in opts['define']:
4232 4234 try:
4233 4235 k, v = (e.strip() for e in d.split(b'=', 1))
4234 4236 if not k or k == b'ui':
4235 4237 raise ValueError
4236 4238 props[k] = v
4237 4239 except ValueError:
4238 4240 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4239 4241
4240 4242 if ui.verbose:
4241 4243 aliases = ui.configitems(b'templatealias')
4242 4244 tree = templater.parse(tmpl)
4243 4245 ui.note(templater.prettyformat(tree), b'\n')
4244 4246 newtree = templater.expandaliases(tree, aliases)
4245 4247 if newtree != tree:
4246 4248 ui.notenoi18n(
4247 4249 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4248 4250 )
4249 4251
4250 4252 if revs is None:
4251 4253 tres = formatter.templateresources(ui, repo)
4252 4254 t = formatter.maketemplater(ui, tmpl, resources=tres)
4253 4255 if ui.verbose:
4254 4256 kwds, funcs = t.symbolsuseddefault()
4255 4257 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4256 4258 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4257 4259 ui.write(t.renderdefault(props))
4258 4260 else:
4259 4261 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4260 4262 if ui.verbose:
4261 4263 kwds, funcs = displayer.t.symbolsuseddefault()
4262 4264 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4263 4265 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4264 4266 for r in revs:
4265 4267 displayer.show(repo[r], **pycompat.strkwargs(props))
4266 4268 displayer.close()
4267 4269
4268 4270
4269 4271 @command(
4270 4272 b'debuguigetpass',
4271 4273 [
4272 4274 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4273 4275 ],
4274 4276 _(b'[-p TEXT]'),
4275 4277 norepo=True,
4276 4278 )
4277 4279 def debuguigetpass(ui, prompt=b''):
4278 4280 """show prompt to type password"""
4279 4281 r = ui.getpass(prompt)
4280 4282 if r is None:
4281 4283 r = b"<default response>"
4282 4284 ui.writenoi18n(b'response: %s\n' % r)
4283 4285
4284 4286
4285 4287 @command(
4286 4288 b'debuguiprompt',
4287 4289 [
4288 4290 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4289 4291 ],
4290 4292 _(b'[-p TEXT]'),
4291 4293 norepo=True,
4292 4294 )
4293 4295 def debuguiprompt(ui, prompt=b''):
4294 4296 """show plain prompt"""
4295 4297 r = ui.prompt(prompt)
4296 4298 ui.writenoi18n(b'response: %s\n' % r)
4297 4299
4298 4300
4299 4301 @command(b'debugupdatecaches', [])
4300 4302 def debugupdatecaches(ui, repo, *pats, **opts):
4301 4303 """warm all known caches in the repository"""
4302 4304 with repo.wlock(), repo.lock():
4303 4305 repo.updatecaches(caches=repository.CACHES_ALL)
4304 4306
4305 4307
4306 4308 @command(
4307 4309 b'debugupgraderepo',
4308 4310 [
4309 4311 (
4310 4312 b'o',
4311 4313 b'optimize',
4312 4314 [],
4313 4315 _(b'extra optimization to perform'),
4314 4316 _(b'NAME'),
4315 4317 ),
4316 4318 (b'', b'run', False, _(b'performs an upgrade')),
4317 4319 (b'', b'backup', True, _(b'keep the old repository content around')),
4318 4320 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4319 4321 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4320 4322 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4321 4323 ],
4322 4324 )
4323 4325 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4324 4326 """upgrade a repository to use different features
4325 4327
4326 4328 If no arguments are specified, the repository is evaluated for upgrade
4327 4329 and a list of problems and potential optimizations is printed.
4328 4330
4329 4331 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4330 4332 can be influenced via additional arguments. More details will be provided
4331 4333 by the command output when run without ``--run``.
4332 4334
4333 4335 During the upgrade, the repository will be locked and no writes will be
4334 4336 allowed.
4335 4337
4336 4338 At the end of the upgrade, the repository may not be readable while new
4337 4339 repository data is swapped in. This window will be as long as it takes to
4338 4340 rename some directories inside the ``.hg`` directory. On most machines, this
4339 4341 should complete almost instantaneously and the chances of a consumer being
4340 4342 unable to access the repository should be low.
4341 4343
4342 4344 By default, all revlogs will be upgraded. You can restrict this using flags
4343 4345 such as `--manifest`:
4344 4346
4345 4347 * `--manifest`: only optimize the manifest
4346 4348 * `--no-manifest`: optimize all revlog but the manifest
4347 4349 * `--changelog`: optimize the changelog only
4348 4350 * `--no-changelog --no-manifest`: optimize filelogs only
4349 4351 * `--filelogs`: optimize the filelogs only
4350 4352 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4351 4353 """
4352 4354 return upgrade.upgraderepo(
4353 4355 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4354 4356 )
4355 4357
4356 4358
4357 4359 @command(
4358 4360 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4359 4361 )
4360 4362 def debugwalk(ui, repo, *pats, **opts):
4361 4363 """show how files match on given patterns"""
4362 4364 opts = pycompat.byteskwargs(opts)
4363 4365 m = scmutil.match(repo[None], pats, opts)
4364 4366 if ui.verbose:
4365 4367 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4366 4368 items = list(repo[None].walk(m))
4367 4369 if not items:
4368 4370 return
4369 4371 f = lambda fn: fn
4370 4372 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4371 4373 f = lambda fn: util.normpath(fn)
4372 4374 fmt = b'f %%-%ds %%-%ds %%s' % (
4373 4375 max([len(abs) for abs in items]),
4374 4376 max([len(repo.pathto(abs)) for abs in items]),
4375 4377 )
4376 4378 for abs in items:
4377 4379 line = fmt % (
4378 4380 abs,
4379 4381 f(repo.pathto(abs)),
4380 4382 m.exact(abs) and b'exact' or b'',
4381 4383 )
4382 4384 ui.write(b"%s\n" % line.rstrip())
4383 4385
4384 4386
4385 4387 @command(b'debugwhyunstable', [], _(b'REV'))
4386 4388 def debugwhyunstable(ui, repo, rev):
4387 4389 """explain instabilities of a changeset"""
4388 4390 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4389 4391 dnodes = b''
4390 4392 if entry.get(b'divergentnodes'):
4391 4393 dnodes = (
4392 4394 b' '.join(
4393 4395 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4394 4396 for ctx in entry[b'divergentnodes']
4395 4397 )
4396 4398 + b' '
4397 4399 )
4398 4400 ui.write(
4399 4401 b'%s: %s%s %s\n'
4400 4402 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4401 4403 )
4402 4404
4403 4405
4404 4406 @command(
4405 4407 b'debugwireargs',
4406 4408 [
4407 4409 (b'', b'three', b'', b'three'),
4408 4410 (b'', b'four', b'', b'four'),
4409 4411 (b'', b'five', b'', b'five'),
4410 4412 ]
4411 4413 + cmdutil.remoteopts,
4412 4414 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4413 4415 norepo=True,
4414 4416 )
4415 4417 def debugwireargs(ui, repopath, *vals, **opts):
4416 4418 opts = pycompat.byteskwargs(opts)
4417 4419 repo = hg.peer(ui, opts, repopath)
4418 4420 try:
4419 4421 for opt in cmdutil.remoteopts:
4420 4422 del opts[opt[1]]
4421 4423 args = {}
4422 4424 for k, v in opts.items():
4423 4425 if v:
4424 4426 args[k] = v
4425 4427 args = pycompat.strkwargs(args)
4426 4428 # run twice to check that we don't mess up the stream for the next command
4427 4429 res1 = repo.debugwireargs(*vals, **args)
4428 4430 res2 = repo.debugwireargs(*vals, **args)
4429 4431 ui.write(b"%s\n" % res1)
4430 4432 if res1 != res2:
4431 4433 ui.warn(b"%s\n" % res2)
4432 4434 finally:
4433 4435 repo.close()
4434 4436
4435 4437
4436 4438 def _parsewirelangblocks(fh):
4437 4439 activeaction = None
4438 4440 blocklines = []
4439 4441 lastindent = 0
4440 4442
4441 4443 for line in fh:
4442 4444 line = line.rstrip()
4443 4445 if not line:
4444 4446 continue
4445 4447
4446 4448 if line.startswith(b'#'):
4447 4449 continue
4448 4450
4449 4451 if not line.startswith(b' '):
4450 4452 # New block. Flush previous one.
4451 4453 if activeaction:
4452 4454 yield activeaction, blocklines
4453 4455
4454 4456 activeaction = line
4455 4457 blocklines = []
4456 4458 lastindent = 0
4457 4459 continue
4458 4460
4459 4461 # Else we start with an indent.
4460 4462
4461 4463 if not activeaction:
4462 4464 raise error.Abort(_(b'indented line outside of block'))
4463 4465
4464 4466 indent = len(line) - len(line.lstrip())
4465 4467
4466 4468 # If this line is indented more than the last line, concatenate it.
4467 4469 if indent > lastindent and blocklines:
4468 4470 blocklines[-1] += line.lstrip()
4469 4471 else:
4470 4472 blocklines.append(line)
4471 4473 lastindent = indent
4472 4474
4473 4475 # Flush last block.
4474 4476 if activeaction:
4475 4477 yield activeaction, blocklines
4476 4478
4477 4479
4478 4480 @command(
4479 4481 b'debugwireproto',
4480 4482 [
4481 4483 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4482 4484 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4483 4485 (
4484 4486 b'',
4485 4487 b'noreadstderr',
4486 4488 False,
4487 4489 _(b'do not read from stderr of the remote'),
4488 4490 ),
4489 4491 (
4490 4492 b'',
4491 4493 b'nologhandshake',
4492 4494 False,
4493 4495 _(b'do not log I/O related to the peer handshake'),
4494 4496 ),
4495 4497 ]
4496 4498 + cmdutil.remoteopts,
4497 4499 _(b'[PATH]'),
4498 4500 optionalrepo=True,
4499 4501 )
4500 4502 def debugwireproto(ui, repo, path=None, **opts):
4501 4503 """send wire protocol commands to a server
4502 4504
4503 4505 This command can be used to issue wire protocol commands to remote
4504 4506 peers and to debug the raw data being exchanged.
4505 4507
4506 4508 ``--localssh`` will start an SSH server against the current repository
4507 4509 and connect to that. By default, the connection will perform a handshake
4508 4510 and establish an appropriate peer instance.
4509 4511
4510 4512 ``--peer`` can be used to bypass the handshake protocol and construct a
4511 4513 peer instance using the specified class type. Valid values are ``raw``,
4512 4514 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4513 4515 don't support higher-level command actions.
4514 4516
4515 4517 ``--noreadstderr`` can be used to disable automatic reading from stderr
4516 4518 of the peer (for SSH connections only). Disabling automatic reading of
4517 4519 stderr is useful for making output more deterministic.
4518 4520
4519 4521 Commands are issued via a mini language which is specified via stdin.
4520 4522 The language consists of individual actions to perform. An action is
4521 4523 defined by a block. A block is defined as a line with no leading
4522 4524 space followed by 0 or more lines with leading space. Blocks are
4523 4525 effectively a high-level command with additional metadata.
4524 4526
4525 4527 Lines beginning with ``#`` are ignored.
4526 4528
4527 4529 The following sections denote available actions.
4528 4530
4529 4531 raw
4530 4532 ---
4531 4533
4532 4534 Send raw data to the server.
4533 4535
4534 4536 The block payload contains the raw data to send as one atomic send
4535 4537 operation. The data may not actually be delivered in a single system
4536 4538 call: it depends on the abilities of the transport being used.
4537 4539
4538 4540 Each line in the block is de-indented and concatenated. Then, that
4539 4541 value is evaluated as a Python b'' literal. This allows the use of
4540 4542 backslash escaping, etc.
4541 4543
4542 4544 raw+
4543 4545 ----
4544 4546
4545 4547 Behaves like ``raw`` except flushes output afterwards.
4546 4548
4547 4549 command <X>
4548 4550 -----------
4549 4551
4550 4552 Send a request to run a named command, whose name follows the ``command``
4551 4553 string.
4552 4554
4553 4555 Arguments to the command are defined as lines in this block. The format of
4554 4556 each line is ``<key> <value>``. e.g.::
4555 4557
4556 4558 command listkeys
4557 4559 namespace bookmarks
4558 4560
4559 4561 If the value begins with ``eval:``, it will be interpreted as a Python
4560 4562 literal expression. Otherwise values are interpreted as Python b'' literals.
4561 4563 This allows sending complex types and encoding special byte sequences via
4562 4564 backslash escaping.
4563 4565
4564 4566 The following arguments have special meaning:
4565 4567
4566 4568 ``PUSHFILE``
4567 4569 When defined, the *push* mechanism of the peer will be used instead
4568 4570 of the static request-response mechanism and the content of the
4569 4571 file specified in the value of this argument will be sent as the
4570 4572 command payload.
4571 4573
4572 4574 This can be used to submit a local bundle file to the remote.
4573 4575
4574 4576 batchbegin
4575 4577 ----------
4576 4578
4577 4579 Instruct the peer to begin a batched send.
4578 4580
4579 4581 All ``command`` blocks are queued for execution until the next
4580 4582 ``batchsubmit`` block.
4581 4583
4582 4584 batchsubmit
4583 4585 -----------
4584 4586
4585 4587 Submit previously queued ``command`` blocks as a batch request.
4586 4588
4587 4589 This action MUST be paired with a ``batchbegin`` action.
4588 4590
4589 4591 httprequest <method> <path>
4590 4592 ---------------------------
4591 4593
4592 4594 (HTTP peer only)
4593 4595
4594 4596 Send an HTTP request to the peer.
4595 4597
4596 4598 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4597 4599
4598 4600 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4599 4601 headers to add to the request. e.g. ``Accept: foo``.
4600 4602
4601 4603 The following arguments are special:
4602 4604
4603 4605 ``BODYFILE``
4604 4606 The content of the file defined as the value to this argument will be
4605 4607 transferred verbatim as the HTTP request body.
4606 4608
4607 4609 ``frame <type> <flags> <payload>``
4608 4610 Send a unified protocol frame as part of the request body.
4609 4611
4610 4612 All frames will be collected and sent as the body to the HTTP
4611 4613 request.
4612 4614
4613 4615 close
4614 4616 -----
4615 4617
4616 4618 Close the connection to the server.
4617 4619
4618 4620 flush
4619 4621 -----
4620 4622
4621 4623 Flush data written to the server.
4622 4624
4623 4625 readavailable
4624 4626 -------------
4625 4627
4626 4628 Close the write end of the connection and read all available data from
4627 4629 the server.
4628 4630
4629 4631 If the connection to the server encompasses multiple pipes, we poll both
4630 4632 pipes and read available data.
4631 4633
4632 4634 readline
4633 4635 --------
4634 4636
4635 4637 Read a line of output from the server. If there are multiple output
4636 4638 pipes, reads only the main pipe.
4637 4639
4638 4640 ereadline
4639 4641 ---------
4640 4642
4641 4643 Like ``readline``, but read from the stderr pipe, if available.
4642 4644
4643 4645 read <X>
4644 4646 --------
4645 4647
4646 4648 ``read()`` N bytes from the server's main output pipe.
4647 4649
4648 4650 eread <X>
4649 4651 ---------
4650 4652
4651 4653 ``read()`` N bytes from the server's stderr pipe, if available.
4652 4654
4653 4655 Specifying Unified Frame-Based Protocol Frames
4654 4656 ----------------------------------------------
4655 4657
4656 4658 It is possible to emit a *Unified Frame-Based Protocol* by using special
4657 4659 syntax.
4658 4660
4659 4661 A frame is composed as a type, flags, and payload. These can be parsed
4660 4662 from a string of the form:
4661 4663
4662 4664 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4663 4665
4664 4666 ``request-id`` and ``stream-id`` are integers defining the request and
4665 4667 stream identifiers.
4666 4668
4667 4669 ``type`` can be an integer value for the frame type or the string name
4668 4670 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4669 4671 ``command-name``.
4670 4672
4671 4673 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4672 4674 components. Each component (and there can be just one) can be an integer
4673 4675 or a flag name for stream flags or frame flags, respectively. Values are
4674 4676 resolved to integers and then bitwise OR'd together.
4675 4677
4676 4678 ``payload`` represents the raw frame payload. If it begins with
4677 4679 ``cbor:``, the following string is evaluated as Python code and the
4678 4680 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4679 4681 as a Python byte string literal.
4680 4682 """
4681 4683 opts = pycompat.byteskwargs(opts)
4682 4684
4683 4685 if opts[b'localssh'] and not repo:
4684 4686 raise error.Abort(_(b'--localssh requires a repository'))
4685 4687
4686 4688 if opts[b'peer'] and opts[b'peer'] not in (
4687 4689 b'raw',
4688 4690 b'ssh1',
4689 4691 ):
4690 4692 raise error.Abort(
4691 4693 _(b'invalid value for --peer'),
4692 4694 hint=_(b'valid values are "raw" and "ssh1"'),
4693 4695 )
4694 4696
4695 4697 if path and opts[b'localssh']:
4696 4698 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4697 4699
4698 4700 if ui.interactive():
4699 4701 ui.write(_(b'(waiting for commands on stdin)\n'))
4700 4702
4701 4703 blocks = list(_parsewirelangblocks(ui.fin))
4702 4704
4703 4705 proc = None
4704 4706 stdin = None
4705 4707 stdout = None
4706 4708 stderr = None
4707 4709 opener = None
4708 4710
4709 4711 if opts[b'localssh']:
4710 4712 # We start the SSH server in its own process so there is process
4711 4713 # separation. This prevents a whole class of potential bugs around
4712 4714 # shared state from interfering with server operation.
4713 4715 args = procutil.hgcmd() + [
4714 4716 b'-R',
4715 4717 repo.root,
4716 4718 b'debugserve',
4717 4719 b'--sshstdio',
4718 4720 ]
4719 4721 proc = subprocess.Popen(
4720 4722 pycompat.rapply(procutil.tonativestr, args),
4721 4723 stdin=subprocess.PIPE,
4722 4724 stdout=subprocess.PIPE,
4723 4725 stderr=subprocess.PIPE,
4724 4726 bufsize=0,
4725 4727 )
4726 4728
4727 4729 stdin = proc.stdin
4728 4730 stdout = proc.stdout
4729 4731 stderr = proc.stderr
4730 4732
4731 4733 # We turn the pipes into observers so we can log I/O.
4732 4734 if ui.verbose or opts[b'peer'] == b'raw':
4733 4735 stdin = util.makeloggingfileobject(
4734 4736 ui, proc.stdin, b'i', logdata=True
4735 4737 )
4736 4738 stdout = util.makeloggingfileobject(
4737 4739 ui, proc.stdout, b'o', logdata=True
4738 4740 )
4739 4741 stderr = util.makeloggingfileobject(
4740 4742 ui, proc.stderr, b'e', logdata=True
4741 4743 )
4742 4744
4743 4745 # --localssh also implies the peer connection settings.
4744 4746
4745 4747 url = b'ssh://localserver'
4746 4748 autoreadstderr = not opts[b'noreadstderr']
4747 4749
4748 4750 if opts[b'peer'] == b'ssh1':
4749 4751 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4750 4752 peer = sshpeer.sshv1peer(
4751 4753 ui,
4752 4754 url,
4753 4755 proc,
4754 4756 stdin,
4755 4757 stdout,
4756 4758 stderr,
4757 4759 None,
4758 4760 autoreadstderr=autoreadstderr,
4759 4761 )
4760 4762 elif opts[b'peer'] == b'raw':
4761 4763 ui.write(_(b'using raw connection to peer\n'))
4762 4764 peer = None
4763 4765 else:
4764 4766 ui.write(_(b'creating ssh peer from handshake results\n'))
4765 4767 peer = sshpeer.makepeer(
4766 4768 ui,
4767 4769 url,
4768 4770 proc,
4769 4771 stdin,
4770 4772 stdout,
4771 4773 stderr,
4772 4774 autoreadstderr=autoreadstderr,
4773 4775 )
4774 4776
4775 4777 elif path:
4776 4778 # We bypass hg.peer() so we can proxy the sockets.
4777 4779 # TODO consider not doing this because we skip
4778 4780 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4779 4781 u = urlutil.url(path)
4780 4782 if u.scheme != b'http':
4781 4783 raise error.Abort(_(b'only http:// paths are currently supported'))
4782 4784
4783 4785 url, authinfo = u.authinfo()
4784 4786 openerargs = {
4785 4787 'useragent': b'Mercurial debugwireproto',
4786 4788 }
4787 4789
4788 4790 # Turn pipes/sockets into observers so we can log I/O.
4789 4791 if ui.verbose:
4790 4792 openerargs.update(
4791 4793 {
4792 4794 'loggingfh': ui,
4793 4795 'loggingname': b's',
4794 4796 'loggingopts': {
4795 4797 'logdata': True,
4796 4798 'logdataapis': False,
4797 4799 },
4798 4800 }
4799 4801 )
4800 4802
4801 4803 if ui.debugflag:
4802 4804 openerargs['loggingopts']['logdataapis'] = True
4803 4805
4804 4806 # Don't send default headers when in raw mode. This allows us to
4805 4807 # bypass most of the behavior of our URL handling code so we can
4806 4808 # have near complete control over what's sent on the wire.
4807 4809 if opts[b'peer'] == b'raw':
4808 4810 openerargs['sendaccept'] = False
4809 4811
4810 4812 opener = urlmod.opener(ui, authinfo, **openerargs)
4811 4813
4812 4814 if opts[b'peer'] == b'raw':
4813 4815 ui.write(_(b'using raw connection to peer\n'))
4814 4816 peer = None
4815 4817 elif opts[b'peer']:
4816 4818 raise error.Abort(
4817 4819 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4818 4820 )
4819 4821 else:
4820 4822 peer = httppeer.makepeer(ui, path, opener=opener)
4821 4823
4822 4824 # We /could/ populate stdin/stdout with sock.makefile()...
4823 4825 else:
4824 4826 raise error.Abort(_(b'unsupported connection configuration'))
4825 4827
4826 4828 batchedcommands = None
4827 4829
4828 4830 # Now perform actions based on the parsed wire language instructions.
4829 4831 for action, lines in blocks:
4830 4832 if action in (b'raw', b'raw+'):
4831 4833 if not stdin:
4832 4834 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4833 4835
4834 4836 # Concatenate the data together.
4835 4837 data = b''.join(l.lstrip() for l in lines)
4836 4838 data = stringutil.unescapestr(data)
4837 4839 stdin.write(data)
4838 4840
4839 4841 if action == b'raw+':
4840 4842 stdin.flush()
4841 4843 elif action == b'flush':
4842 4844 if not stdin:
4843 4845 raise error.Abort(_(b'cannot call flush on this peer'))
4844 4846 stdin.flush()
4845 4847 elif action.startswith(b'command'):
4846 4848 if not peer:
4847 4849 raise error.Abort(
4848 4850 _(
4849 4851 b'cannot send commands unless peer instance '
4850 4852 b'is available'
4851 4853 )
4852 4854 )
4853 4855
4854 4856 command = action.split(b' ', 1)[1]
4855 4857
4856 4858 args = {}
4857 4859 for line in lines:
4858 4860 # We need to allow empty values.
4859 4861 fields = line.lstrip().split(b' ', 1)
4860 4862 if len(fields) == 1:
4861 4863 key = fields[0]
4862 4864 value = b''
4863 4865 else:
4864 4866 key, value = fields
4865 4867
4866 4868 if value.startswith(b'eval:'):
4867 4869 value = stringutil.evalpythonliteral(value[5:])
4868 4870 else:
4869 4871 value = stringutil.unescapestr(value)
4870 4872
4871 4873 args[key] = value
4872 4874
4873 4875 if batchedcommands is not None:
4874 4876 batchedcommands.append((command, args))
4875 4877 continue
4876 4878
4877 4879 ui.status(_(b'sending %s command\n') % command)
4878 4880
4879 4881 if b'PUSHFILE' in args:
4880 4882 with open(args[b'PUSHFILE'], 'rb') as fh:
4881 4883 del args[b'PUSHFILE']
4882 4884 res, output = peer._callpush(
4883 4885 command, fh, **pycompat.strkwargs(args)
4884 4886 )
4885 4887 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4886 4888 ui.status(
4887 4889 _(b'remote output: %s\n') % stringutil.escapestr(output)
4888 4890 )
4889 4891 else:
4890 4892 with peer.commandexecutor() as e:
4891 4893 res = e.callcommand(command, args).result()
4892 4894
4893 4895 ui.status(
4894 4896 _(b'response: %s\n')
4895 4897 % stringutil.pprint(res, bprefix=True, indent=2)
4896 4898 )
4897 4899
4898 4900 elif action == b'batchbegin':
4899 4901 if batchedcommands is not None:
4900 4902 raise error.Abort(_(b'nested batchbegin not allowed'))
4901 4903
4902 4904 batchedcommands = []
4903 4905 elif action == b'batchsubmit':
4904 4906 # There is a batching API we could go through. But it would be
4905 4907 # difficult to normalize requests into function calls. It is easier
4906 4908 # to bypass this layer and normalize to commands + args.
4907 4909 ui.status(
4908 4910 _(b'sending batch with %d sub-commands\n')
4909 4911 % len(batchedcommands)
4910 4912 )
4911 4913 assert peer is not None
4912 4914 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4913 4915 ui.status(
4914 4916 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4915 4917 )
4916 4918
4917 4919 batchedcommands = None
4918 4920
4919 4921 elif action.startswith(b'httprequest '):
4920 4922 if not opener:
4921 4923 raise error.Abort(
4922 4924 _(b'cannot use httprequest without an HTTP peer')
4923 4925 )
4924 4926
4925 4927 request = action.split(b' ', 2)
4926 4928 if len(request) != 3:
4927 4929 raise error.Abort(
4928 4930 _(
4929 4931 b'invalid httprequest: expected format is '
4930 4932 b'"httprequest <method> <path>'
4931 4933 )
4932 4934 )
4933 4935
4934 4936 method, httppath = request[1:]
4935 4937 headers = {}
4936 4938 body = None
4937 4939 frames = []
4938 4940 for line in lines:
4939 4941 line = line.lstrip()
4940 4942 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4941 4943 if m:
4942 4944 # Headers need to use native strings.
4943 4945 key = pycompat.strurl(m.group(1))
4944 4946 value = pycompat.strurl(m.group(2))
4945 4947 headers[key] = value
4946 4948 continue
4947 4949
4948 4950 if line.startswith(b'BODYFILE '):
4949 4951 with open(line.split(b' ', 1), b'rb') as fh:
4950 4952 body = fh.read()
4951 4953 elif line.startswith(b'frame '):
4952 4954 frame = wireprotoframing.makeframefromhumanstring(
4953 4955 line[len(b'frame ') :]
4954 4956 )
4955 4957
4956 4958 frames.append(frame)
4957 4959 else:
4958 4960 raise error.Abort(
4959 4961 _(b'unknown argument to httprequest: %s') % line
4960 4962 )
4961 4963
4962 4964 url = path + httppath
4963 4965
4964 4966 if frames:
4965 4967 body = b''.join(bytes(f) for f in frames)
4966 4968
4967 4969 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4968 4970
4969 4971 # urllib.Request insists on using has_data() as a proxy for
4970 4972 # determining the request method. Override that to use our
4971 4973 # explicitly requested method.
4972 4974 req.get_method = lambda: pycompat.sysstr(method)
4973 4975
4974 4976 try:
4975 4977 res = opener.open(req)
4976 4978 body = res.read()
4977 4979 except util.urlerr.urlerror as e:
4978 4980 # read() method must be called, but only exists in Python 2
4979 4981 getattr(e, 'read', lambda: None)()
4980 4982 continue
4981 4983
4982 4984 ct = res.headers.get('Content-Type')
4983 4985 if ct == 'application/mercurial-cbor':
4984 4986 ui.write(
4985 4987 _(b'cbor> %s\n')
4986 4988 % stringutil.pprint(
4987 4989 cborutil.decodeall(body), bprefix=True, indent=2
4988 4990 )
4989 4991 )
4990 4992
4991 4993 elif action == b'close':
4992 4994 assert peer is not None
4993 4995 peer.close()
4994 4996 elif action == b'readavailable':
4995 4997 if not stdout or not stderr:
4996 4998 raise error.Abort(
4997 4999 _(b'readavailable not available on this peer')
4998 5000 )
4999 5001
5000 5002 stdin.close()
5001 5003 stdout.read()
5002 5004 stderr.read()
5003 5005
5004 5006 elif action == b'readline':
5005 5007 if not stdout:
5006 5008 raise error.Abort(_(b'readline not available on this peer'))
5007 5009 stdout.readline()
5008 5010 elif action == b'ereadline':
5009 5011 if not stderr:
5010 5012 raise error.Abort(_(b'ereadline not available on this peer'))
5011 5013 stderr.readline()
5012 5014 elif action.startswith(b'read '):
5013 5015 count = int(action.split(b' ', 1)[1])
5014 5016 if not stdout:
5015 5017 raise error.Abort(_(b'read not available on this peer'))
5016 5018 stdout.read(count)
5017 5019 elif action.startswith(b'eread '):
5018 5020 count = int(action.split(b' ', 1)[1])
5019 5021 if not stderr:
5020 5022 raise error.Abort(_(b'eread not available on this peer'))
5021 5023 stderr.read(count)
5022 5024 else:
5023 5025 raise error.Abort(_(b'unknown action: %s') % action)
5024 5026
5025 5027 if batchedcommands is not None:
5026 5028 raise error.Abort(_(b'unclosed "batchbegin" request'))
5027 5029
5028 5030 if peer:
5029 5031 peer.close()
5030 5032
5031 5033 if proc:
5032 5034 proc.kill()
@@ -1,55 +1,139 b''
1 1 # revlogutils/debug.py - utility used for revlog debuging
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 # Copyright 2022 Octobus <contact@octobus.net>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from .. import (
10 10 node as nodemod,
11 11 )
12 12
13 from . import (
14 constants,
15 )
16
17 INDEX_ENTRY_DEBUG_COLUMN = []
18
19 NODE_SIZE = object()
20
21
22 class _column_base:
23 """constains the definition of a revlog column
24
25 name: the column header,
26 value_func: the function called to get a value,
27 size: the width of the column.
28 """
29
30 def __init__(self, name, value_func, size=None):
31 self.name = name
32 self.value_func = value_func
33 if size is not NODE_SIZE:
34 if size is None:
35 size = 8 # arbitrary default
36 size = max(len(name), size)
37 self._size = size
38
39 def get_size(self, node_size):
40 if self._size is NODE_SIZE:
41 return node_size
42 else:
43 return self._size
44
45
46 def debug_column(name, size=None):
47 """decorated function is registered as a column
48
49 name: the name of the column,
50 size: the expected size of the column.
51 """
52
53 def register(func):
54 entry = _column_base(
55 name=name,
56 value_func=func,
57 size=size,
58 )
59 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
60 return entry
61
62 return register
63
64
65 @debug_column(b"rev", size=6)
66 def _rev(index, rev, entry, hexfn):
67 return b"%d" % rev
68
69
70 @debug_column(b"linkrev", size=6)
71 def _linkrev(index, rev, entry, hexfn):
72 return b"%d" % entry[constants.ENTRY_LINK_REV]
73
74
75 @debug_column(b"nodeid", size=NODE_SIZE)
76 def _nodeid(index, rev, entry, hexfn):
77 return hexfn(entry[constants.ENTRY_NODE_ID])
78
79
80 @debug_column(b"p1-nodeid", size=NODE_SIZE)
81 def _p1_node(index, rev, entry, hexfn):
82 parent = entry[constants.ENTRY_PARENT_1]
83 p_entry = index[parent]
84 return hexfn(p_entry[constants.ENTRY_NODE_ID])
85
86
87 @debug_column(b"p2-nodeid", size=NODE_SIZE)
88 def _p2_node(index, rev, entry, hexfn):
89 parent = entry[constants.ENTRY_PARENT_2]
90 p_entry = index[parent]
91 return hexfn(p_entry[constants.ENTRY_NODE_ID])
92
13 93
14 94 def debug_index(
15 95 ui,
16 96 repo,
17 97 formatter,
18 98 revlog,
19 99 full_node,
20 100 ):
21 101 """display index data for a revlog"""
22 102 if full_node:
23 103 hexfn = nodemod.hex
24 104 else:
25 105 hexfn = nodemod.short
26 106
27 107 idlen = 12
28 108 for i in revlog:
29 109 idlen = len(hexfn(revlog.node(i)))
30 110 break
31 111
32 112 fm = formatter
33 113
34 fm.plain(
35 b' rev linkrev %s %s %s\n'
36 % (
37 b'nodeid'.rjust(idlen),
38 b'p1-nodeid'.rjust(idlen),
39 b'p2-nodeid'.rjust(idlen),
40 )
41 )
114 header_pieces = []
115 for column in INDEX_ENTRY_DEBUG_COLUMN:
116 size = column.get_size(idlen)
117 name = column.name
118 header_pieces.append(name.rjust(size))
119
120 fm.plain(b' '.join(header_pieces) + b'\n')
121
122 index = revlog.index
42 123
43 124 for rev in revlog:
44 node = revlog.node(rev)
45 parents = revlog.parents(node)
125 fm.startitem()
126 entry = index[rev]
127 first = True
128 for column in INDEX_ENTRY_DEBUG_COLUMN:
129 if not first:
130 fm.plain(b' ')
131 first = False
46 132
47 fm.startitem()
48 fm.write(b'rev', b'%6d ', rev)
49 fm.write(b'linkrev', b'%7d ', revlog.linkrev(rev))
50 fm.write(b'node', b'%s ', hexfn(node))
51 fm.write(b'p1', b'%s ', hexfn(parents[0]))
52 fm.write(b'p2', b'%s', hexfn(parents[1]))
133 size = column.get_size(idlen)
134 value = column.value_func(index, rev, entry, hexfn)
135 display = b"%%%ds" % size
136 fm.write(column.name, display, value)
53 137 fm.plain(b'\n')
54 138
55 139 fm.end()
General Comments 0
You need to be logged in to leave comments. Login now