##// END OF EJS Templates
debugantivirusrunning: use bytes when opening a vfs file...
Matt Harbison -
r52832:8d9767bf default
parent child Browse files
Show More
@@ -1,4769 +1,4769
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import annotations
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 bundlerepo,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 dirstateutils,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filelog,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 manifest,
63 63 mergestate as mergestatemod,
64 64 metadata,
65 65 obsolete,
66 66 obsutil,
67 67 pathutil,
68 68 phases,
69 69 policy,
70 70 pvec,
71 71 pycompat,
72 72 registrar,
73 73 repair,
74 74 repoview,
75 75 requirements,
76 76 revlog,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 verify,
93 93 vfs as vfsmod,
94 94 wireprotoframing,
95 95 wireprotoserver,
96 96 )
97 97 from .interfaces import repository
98 98 from .stabletailgraph import stabletailsort
99 99 from .utils import (
100 100 cborutil,
101 101 compression,
102 102 dateutil,
103 103 procutil,
104 104 stringutil,
105 105 urlutil,
106 106 )
107 107
108 108 from .revlogutils import (
109 109 debug as revlog_debug,
110 110 nodemap,
111 111 rewrite,
112 112 sidedata,
113 113 )
114 114
115 115 release = lockmod.release
116 116
117 117 table = {}
118 118 table.update(strip.command._table)
119 119 command = registrar.command(table)
120 120
121 121
122 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 123 def debugancestor(ui, repo, *args):
124 124 """find the ancestor revision of two revisions in a given index"""
125 125 if len(args) == 3:
126 126 index, rev1, rev2 = args
127 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 128 lookup = r.lookup
129 129 elif len(args) == 2:
130 130 if not repo:
131 131 raise error.Abort(
132 132 _(b'there is no Mercurial repository here (.hg not found)')
133 133 )
134 134 rev1, rev2 = args
135 135 r = repo.changelog
136 136 lookup = repo.lookup
137 137 else:
138 138 raise error.Abort(_(b'either two or three arguments required'))
139 139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 141
142 142
143 143 @command(b'debugantivirusrunning', [])
144 144 def debugantivirusrunning(ui, repo):
145 145 """attempt to trigger an antivirus scanner to see if one is active"""
146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 with repo.cachevfs.open(b'eicar-test-file.com', b'wb') as f:
147 147 f.write(
148 148 util.b85decode(
149 149 # This is a base85-armored version of the EICAR test file. See
150 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 153 )
154 154 )
155 155 # Give an AV engine time to scan the file.
156 156 time.sleep(2)
157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 util.unlink(repo.cachevfs.join(b'eicar-test-file.com'))
158 158
159 159
160 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 161 def debugapplystreamclonebundle(ui, repo, fname):
162 162 """apply a stream clone bundle file"""
163 163 f = hg.openpath(ui, fname)
164 164 gen = exchange.readbundle(ui, f, fname)
165 165 gen.apply(repo)
166 166
167 167
168 168 @command(
169 169 b'debugbuilddag',
170 170 [
171 171 (
172 172 b'm',
173 173 b'mergeable-file',
174 174 None,
175 175 _(b'add single file mergeable changes'),
176 176 ),
177 177 (
178 178 b'o',
179 179 b'overwritten-file',
180 180 None,
181 181 _(b'add single file all revs overwrite'),
182 182 ),
183 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 184 (
185 185 b'',
186 186 b'from-existing',
187 187 None,
188 188 _(b'continue from a non-empty repository'),
189 189 ),
190 190 ],
191 191 _(b'[OPTION]... [TEXT]'),
192 192 )
193 193 def debugbuilddag(
194 194 ui,
195 195 repo,
196 196 text=None,
197 197 mergeable_file=False,
198 198 overwritten_file=False,
199 199 new_file=False,
200 200 from_existing=False,
201 201 ):
202 202 """builds a repo with a given DAG from scratch in the current empty repo
203 203
204 204 The description of the DAG is read from stdin if not given on the
205 205 command line.
206 206
207 207 Elements:
208 208
209 209 - "+n" is a linear run of n nodes based on the current default parent
210 210 - "." is a single node based on the current default parent
211 211 - "$" resets the default parent to null (implied at the start);
212 212 otherwise the default parent is always the last node created
213 213 - "<p" sets the default parent to the backref p
214 214 - "*p" is a fork at parent p, which is a backref
215 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 216 - "/p2" is a merge of the preceding node and p2
217 217 - ":tag" defines a local tag for the preceding node
218 218 - "@branch" sets the named branch for subsequent nodes
219 219 - "#...\\n" is a comment up to the end of the line
220 220
221 221 Whitespace between the above elements is ignored.
222 222
223 223 A backref is either
224 224
225 225 - a number n, which references the node curr-n, where curr is the current
226 226 node, or
227 227 - the name of a local tag you placed earlier using ":tag", or
228 228 - empty to denote the default parent.
229 229
230 230 All string valued-elements are either strictly alphanumeric, or must
231 231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 232 """
233 233
234 234 if text is None:
235 235 ui.status(_(b"reading DAG from stdin\n"))
236 236 text = ui.fin.read()
237 237
238 238 cl = repo.changelog
239 239 if len(cl) > 0 and not from_existing:
240 240 raise error.Abort(_(b'repository is not empty'))
241 241
242 242 # determine number of revs in DAG
243 243 total = 0
244 244 for type, data in dagparser.parsedag(text):
245 245 if type == b'n':
246 246 total += 1
247 247
248 248 if mergeable_file:
249 249 linesperrev = 2
250 250 # make a file with k lines per rev
251 251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 252 initialmergedlines.append(b"")
253 253
254 254 tags = []
255 255 progress = ui.makeprogress(
256 256 _(b'building'), unit=_(b'revisions'), total=total
257 257 )
258 258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 259 at = -1
260 260 atbranch = b'default'
261 261 nodeids = []
262 262 id = 0
263 263 progress.update(id)
264 264 for type, data in dagparser.parsedag(text):
265 265 if type == b'n':
266 266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 267 id, ps = data
268 268
269 269 files = []
270 270 filecontent = {}
271 271
272 272 p2 = None
273 273 if mergeable_file:
274 274 fn = b"mf"
275 275 p1 = repo[ps[0]]
276 276 if len(ps) > 1:
277 277 p2 = repo[ps[1]]
278 278 pa = p1.ancestor(p2)
279 279 base, local, other = [
280 280 x[fn].data() for x in (pa, p1, p2)
281 281 ]
282 282 m3 = simplemerge.Merge3Text(base, local, other)
283 283 ml = [
284 284 l.strip()
285 285 for l in simplemerge.render_minimized(m3)[0]
286 286 ]
287 287 ml.append(b"")
288 288 elif at > 0:
289 289 ml = p1[fn].data().split(b"\n")
290 290 else:
291 291 ml = initialmergedlines
292 292 ml[id * linesperrev] += b" r%i" % id
293 293 mergedtext = b"\n".join(ml)
294 294 files.append(fn)
295 295 filecontent[fn] = mergedtext
296 296
297 297 if overwritten_file:
298 298 fn = b"of"
299 299 files.append(fn)
300 300 filecontent[fn] = b"r%i\n" % id
301 301
302 302 if new_file:
303 303 fn = b"nf%i" % id
304 304 files.append(fn)
305 305 filecontent[fn] = b"r%i\n" % id
306 306 if len(ps) > 1:
307 307 if not p2:
308 308 p2 = repo[ps[1]]
309 309 for fn in p2:
310 310 if fn.startswith(b"nf"):
311 311 files.append(fn)
312 312 filecontent[fn] = p2[fn].data()
313 313
314 314 def fctxfn(repo, cx, path):
315 315 if path in filecontent:
316 316 return context.memfilectx(
317 317 repo, cx, path, filecontent[path]
318 318 )
319 319 return None
320 320
321 321 if len(ps) == 0 or ps[0] < 0:
322 322 pars = [None, None]
323 323 elif len(ps) == 1:
324 324 pars = [nodeids[ps[0]], None]
325 325 else:
326 326 pars = [nodeids[p] for p in ps]
327 327 cx = context.memctx(
328 328 repo,
329 329 pars,
330 330 b"r%i" % id,
331 331 files,
332 332 fctxfn,
333 333 date=(id, 0),
334 334 user=b"debugbuilddag",
335 335 extra={b'branch': atbranch},
336 336 )
337 337 nodeid = repo.commitctx(cx)
338 338 nodeids.append(nodeid)
339 339 at = id
340 340 elif type == b'l':
341 341 id, name = data
342 342 ui.note((b'tag %s\n' % name))
343 343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 344 elif type == b'a':
345 345 ui.note((b'branch %s\n' % data))
346 346 atbranch = data
347 347 progress.update(id)
348 348
349 349 if tags:
350 350 repo.vfs.write(b"localtags", b"".join(tags))
351 351
352 352
353 353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 354 indent_string = b' ' * indent
355 355 if all:
356 356 ui.writenoi18n(
357 357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 358 % indent_string
359 359 )
360 360
361 361 def showchunks(named):
362 362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 363 for deltadata in gen.deltaiter():
364 364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 365 ui.write(
366 366 b"%s%s %s %s %s %s %d\n"
367 367 % (
368 368 indent_string,
369 369 hex(node),
370 370 hex(p1),
371 371 hex(p2),
372 372 hex(cs),
373 373 hex(deltabase),
374 374 len(delta),
375 375 )
376 376 )
377 377
378 378 gen.changelogheader()
379 379 showchunks(b"changelog")
380 380 gen.manifestheader()
381 381 showchunks(b"manifest")
382 382 for chunkdata in iter(gen.filelogheader, {}):
383 383 fname = chunkdata[b'filename']
384 384 showchunks(fname)
385 385 else:
386 386 if isinstance(gen, bundle2.unbundle20):
387 387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 388 gen.changelogheader()
389 389 for deltadata in gen.deltaiter():
390 390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 392
393 393
394 394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 395 """display version and markers contained in 'data'"""
396 396 data = part.read()
397 397 indent_string = b' ' * indent
398 398 try:
399 399 version, markers = obsolete._readmarkers(data)
400 400 except error.UnknownVersion as exc:
401 401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 402 msg %= indent_string, exc.version, len(data)
403 403 ui.write(msg)
404 404 else:
405 405 msg = b"%sversion: %d (%d bytes)\n"
406 406 msg %= indent_string, version, len(data)
407 407 ui.write(msg)
408 408 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
409 409 for rawmarker in sorted(markers):
410 410 m = obsutil.marker(None, rawmarker)
411 411 fm.startitem()
412 412 fm.plain(indent_string)
413 413 cmdutil.showmarker(fm, m)
414 414 fm.end()
415 415
416 416
417 417 def _debugphaseheads(ui, data, indent=0):
418 418 """display version and markers contained in 'data'"""
419 419 indent_string = b' ' * indent
420 420 headsbyphase = phases.binarydecode(data)
421 421 for phase in phases.allphases:
422 422 for head in headsbyphase[phase]:
423 423 ui.write(indent_string)
424 424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 425
426 426
427 427 def _quasirepr(thing):
428 428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 429 return b'{%s}' % (
430 430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 431 )
432 432 return pycompat.bytestr(repr(thing))
433 433
434 434
435 435 def _debugbundle2(ui, gen, all=None, **opts):
436 436 """lists the contents of a bundle2"""
437 437 if not isinstance(gen, bundle2.unbundle20):
438 438 raise error.Abort(_(b'not a bundle2 file'))
439 439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 440 parttypes = opts.get('part_type', [])
441 441 for part in gen.iterparts():
442 442 if parttypes and part.type not in parttypes:
443 443 continue
444 444 msg = b'%s -- %s (mandatory: %r)\n'
445 445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 446 if part.type == b'changegroup':
447 447 version = part.params.get(b'version', b'01')
448 448 cg = changegroup.getunbundler(version, part, b'UN')
449 449 if not ui.quiet:
450 450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 451 if part.type == b'obsmarkers':
452 452 if not ui.quiet:
453 453 _debugobsmarkers(ui, part, indent=4, **opts)
454 454 if part.type == b'phase-heads':
455 455 if not ui.quiet:
456 456 _debugphaseheads(ui, part, indent=4)
457 457
458 458
459 459 @command(
460 460 b'debugbundle',
461 461 [
462 462 (b'a', b'all', None, _(b'show all details')),
463 463 (b'', b'part-type', [], _(b'show only the named part type')),
464 464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 465 ],
466 466 _(b'FILE'),
467 467 norepo=True,
468 468 )
469 469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 470 """lists the contents of a bundle"""
471 471 with hg.openpath(ui, bundlepath) as f:
472 472 if spec:
473 473 spec = exchange.getbundlespec(ui, f)
474 474 ui.write(b'%s\n' % spec)
475 475 return
476 476
477 477 gen = exchange.readbundle(ui, f, bundlepath)
478 478 if isinstance(gen, bundle2.unbundle20):
479 479 return _debugbundle2(ui, gen, all=all, **opts)
480 480 _debugchangegroup(ui, gen, all=all, **opts)
481 481
482 482
483 483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 484 def debugcapabilities(ui, path, **opts):
485 485 """lists the capabilities of a remote peer"""
486 486 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
487 487 try:
488 488 caps = peer.capabilities()
489 489 ui.writenoi18n(b'Main capabilities:\n')
490 490 for c in sorted(caps):
491 491 ui.write(b' %s\n' % c)
492 492 b2caps = bundle2.bundle2caps(peer)
493 493 if b2caps:
494 494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 495 for key, values in sorted(b2caps.items()):
496 496 ui.write(b' %s\n' % key)
497 497 for v in values:
498 498 ui.write(b' %s\n' % v)
499 499 finally:
500 500 peer.close()
501 501
502 502
503 503 @command(
504 504 b'debugchangedfiles',
505 505 [
506 506 (
507 507 b'',
508 508 b'compute',
509 509 False,
510 510 b"compute information instead of reading it from storage",
511 511 ),
512 512 ],
513 513 b'REV',
514 514 )
515 515 def debugchangedfiles(ui, repo, rev, **opts):
516 516 """list the stored files changes for a revision"""
517 517 ctx = logcmdutil.revsingle(repo, rev, None)
518 518 files = None
519 519
520 520 if opts['compute']:
521 521 files = metadata.compute_all_files_changes(ctx)
522 522 else:
523 523 sd = repo.changelog.sidedata(ctx.rev())
524 524 files_block = sd.get(sidedata.SD_FILES)
525 525 if files_block is not None:
526 526 files = metadata.decode_files_sidedata(sd)
527 527 if files is not None:
528 528 for f in sorted(files.touched):
529 529 if f in files.added:
530 530 action = b"added"
531 531 elif f in files.removed:
532 532 action = b"removed"
533 533 elif f in files.merged:
534 534 action = b"merged"
535 535 elif f in files.salvaged:
536 536 action = b"salvaged"
537 537 else:
538 538 action = b"touched"
539 539
540 540 copy_parent = b""
541 541 copy_source = b""
542 542 if f in files.copied_from_p1:
543 543 copy_parent = b"p1"
544 544 copy_source = files.copied_from_p1[f]
545 545 elif f in files.copied_from_p2:
546 546 copy_parent = b"p2"
547 547 copy_source = files.copied_from_p2[f]
548 548
549 549 data = (action, copy_parent, f, copy_source)
550 550 template = b"%-8s %2s: %s, %s;\n"
551 551 ui.write(template % data)
552 552
553 553
554 554 @command(b'debugcheckstate', [], b'')
555 555 def debugcheckstate(ui, repo):
556 556 """validate the correctness of the current dirstate"""
557 557 errors = verify.verifier(repo)._verify_dirstate()
558 558 if errors:
559 559 errstr = _(b"dirstate inconsistent with current parent's manifest")
560 560 raise error.Abort(errstr)
561 561
562 562
563 563 @command(
564 564 b'debugcolor',
565 565 [(b'', b'style', None, _(b'show all configured styles'))],
566 566 b'hg debugcolor',
567 567 )
568 568 def debugcolor(ui, repo, **opts):
569 569 """show available color, effects or style"""
570 570 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
571 571 if opts.get('style'):
572 572 return _debugdisplaystyle(ui)
573 573 else:
574 574 return _debugdisplaycolor(ui)
575 575
576 576
577 577 def _debugdisplaycolor(ui):
578 578 ui = ui.copy()
579 579 ui._styles.clear()
580 580 for effect in color._activeeffects(ui).keys():
581 581 ui._styles[effect] = effect
582 582 if ui._terminfoparams:
583 583 for k, v in ui.configitems(b'color'):
584 584 if k.startswith(b'color.'):
585 585 ui._styles[k] = k[6:]
586 586 elif k.startswith(b'terminfo.'):
587 587 ui._styles[k] = k[9:]
588 588 ui.write(_(b'available colors:\n'))
589 589 # sort label with a '_' after the other to group '_background' entry.
590 590 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
591 591 for colorname, label in items:
592 592 ui.write(b'%s\n' % colorname, label=label)
593 593
594 594
595 595 def _debugdisplaystyle(ui):
596 596 ui.write(_(b'available style:\n'))
597 597 if not ui._styles:
598 598 return
599 599 width = max(len(s) for s in ui._styles)
600 600 for label, effects in sorted(ui._styles.items()):
601 601 ui.write(b'%s' % label, label=label)
602 602 if effects:
603 603 # 50
604 604 ui.write(b': ')
605 605 ui.write(b' ' * (max(0, width - len(label))))
606 606 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
607 607 ui.write(b'\n')
608 608
609 609
610 610 @command(b'debugcreatestreamclonebundle', [], b'FILE')
611 611 def debugcreatestreamclonebundle(ui, repo, fname):
612 612 """create a stream clone bundle file
613 613
614 614 Stream bundles are special bundles that are essentially archives of
615 615 revlog files. They are commonly used for cloning very quickly.
616 616
617 617 This command creates a "version 1" stream clone, which is deprecated in
618 618 favor of newer versions of the stream protocol. Bundles using such newer
619 619 versions can be generated using the `hg bundle` command.
620 620 """
621 621 # TODO we may want to turn this into an abort when this functionality
622 622 # is moved into `hg bundle`.
623 623 if phases.hassecret(repo):
624 624 ui.warn(
625 625 _(
626 626 b'(warning: stream clone bundle will contain secret '
627 627 b'revisions)\n'
628 628 )
629 629 )
630 630
631 631 requirements, gen = streamclone.generatebundlev1(repo)
632 632 changegroup.writechunks(ui, gen, fname)
633 633
634 634 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
635 635
636 636
637 637 @command(
638 638 b'debugdag',
639 639 [
640 640 (b't', b'tags', None, _(b'use tags as labels')),
641 641 (b'b', b'branches', None, _(b'annotate with branch names')),
642 642 (b'', b'dots', None, _(b'use dots for runs')),
643 643 (b's', b'spaces', None, _(b'separate elements by spaces')),
644 644 ],
645 645 _(b'[OPTION]... [FILE [REV]...]'),
646 646 optionalrepo=True,
647 647 )
648 648 def debugdag(ui, repo, file_=None, *revs, **opts):
649 649 """format the changelog or an index DAG as a concise textual description
650 650
651 651 If you pass a revlog index, the revlog's DAG is emitted. If you list
652 652 revision numbers, they get labeled in the output as rN.
653 653
654 654 Otherwise, the changelog DAG of the current repo is emitted.
655 655 """
656 656 spaces = opts.get('spaces')
657 657 dots = opts.get('dots')
658 658 if file_:
659 659 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
660 660 revs = {int(r) for r in revs}
661 661
662 662 def events():
663 663 for r in rlog:
664 664 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
665 665 if r in revs:
666 666 yield b'l', (r, b"r%i" % r)
667 667
668 668 elif repo:
669 669 cl = repo.changelog
670 670 tags = opts.get('tags')
671 671 branches = opts.get('branches')
672 672 if tags:
673 673 labels = {}
674 674 for l, n in repo.tags().items():
675 675 labels.setdefault(cl.rev(n), []).append(l)
676 676
677 677 def events():
678 678 b = b"default"
679 679 for r in cl:
680 680 if branches:
681 681 newb = cl.read(cl.node(r))[5][b'branch']
682 682 if newb != b:
683 683 yield b'a', newb
684 684 b = newb
685 685 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
686 686 if tags:
687 687 ls = labels.get(r)
688 688 if ls:
689 689 for l in ls:
690 690 yield b'l', (r, l)
691 691
692 692 else:
693 693 raise error.Abort(_(b'need repo for changelog dag'))
694 694
695 695 for line in dagparser.dagtextlines(
696 696 events(),
697 697 addspaces=spaces,
698 698 wraplabels=True,
699 699 wrapannotations=True,
700 700 wrapnonlinear=dots,
701 701 usedots=dots,
702 702 maxlinewidth=70,
703 703 ):
704 704 ui.write(line)
705 705 ui.write(b"\n")
706 706
707 707
708 708 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
709 709 def debugdata(ui, repo, file_, rev=None, **opts):
710 710 """dump the contents of a data file revision"""
711 711 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
712 712 if rev is not None:
713 713 raise error.InputError(
714 714 _(b'cannot specify a revision with other arguments')
715 715 )
716 716 file_, rev = None, file_
717 717 elif rev is None:
718 718 raise error.InputError(_(b'please specify a revision'))
719 719 r = cmdutil.openstorage(
720 720 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
721 721 )
722 722 try:
723 723 ui.write(r.rawdata(r.lookup(rev)))
724 724 except KeyError:
725 725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 726
727 727
728 728 @command(
729 729 b'debugdate',
730 730 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 731 _(b'[-e] DATE [RANGE]'),
732 732 norepo=True,
733 733 optionalrepo=True,
734 734 )
735 735 def debugdate(ui, date, range=None, **opts):
736 736 """parse and display a date"""
737 737 if opts["extended"]:
738 738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 739 else:
740 740 d = dateutil.parsedate(date)
741 741 ui.writenoi18n(b"internal: %d %d\n" % d)
742 742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 743 if range:
744 744 m = dateutil.matchdate(range)
745 745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 746
747 747
748 748 @command(
749 749 b'debugdeltachain',
750 750 [
751 751 (
752 752 b'r',
753 753 b'rev',
754 754 [],
755 755 _('restrict processing to these revlog revisions'),
756 756 ),
757 757 (
758 758 b'',
759 759 b'all-info',
760 760 False,
761 761 _('compute all information unless specified otherwise'),
762 762 ),
763 763 (
764 764 b'',
765 765 b'size-info',
766 766 None,
767 767 _('compute information related to deltas size'),
768 768 ),
769 769 (
770 770 b'',
771 771 b'dist-info',
772 772 None,
773 773 _('compute information related to base distance'),
774 774 ),
775 775 (
776 776 b'',
777 777 b'sparse-info',
778 778 None,
779 779 _('compute information related to sparse read'),
780 780 ),
781 781 ]
782 782 + cmdutil.debugrevlogopts
783 783 + cmdutil.formatteropts,
784 784 _(b'-c|-m|FILE'),
785 785 optionalrepo=True,
786 786 )
787 787 def debugdeltachain(ui, repo, file_=None, **opts):
788 788 """dump information about delta chains in a revlog
789 789
790 790 Output can be templatized. Available template keywords are:
791 791
792 792 :``rev``: revision number
793 793 :``p1``: parent 1 revision number (for reference)
794 794 :``p2``: parent 2 revision number (for reference)
795 795
796 796 :``chainid``: delta chain identifier (numbered by unique base)
797 797 :``chainlen``: delta chain length to this revision
798 798
799 799 :``prevrev``: previous revision in delta chain
800 800 :``deltatype``: role of delta / how it was computed
801 801 - base: a full snapshot
802 802 - snap: an intermediate snapshot
803 803 - p1: a delta against the first parent
804 804 - p2: a delta against the second parent
805 805 - skip1: a delta against the same base as p1
806 806 (when p1 has empty delta
807 807 - skip2: a delta against the same base as p2
808 808 (when p2 has empty delta
809 809 - prev: a delta against the previous revision
810 810 - other: a delta against an arbitrary revision
811 811
812 812 :``compsize``: compressed size of revision
813 813 :``uncompsize``: uncompressed size of revision
814 814 :``chainsize``: total size of compressed revisions in chain
815 815 :``chainratio``: total chain size divided by uncompressed revision size
816 816 (new delta chains typically start at ratio 2.00)
817 817
818 818 :``lindist``: linear distance from base revision in delta chain to end
819 819 of this revision
820 820 :``extradist``: total size of revisions not part of this delta chain from
821 821 base of delta chain to end of this revision; a measurement
822 822 of how much extra data we need to read/seek across to read
823 823 the delta chain for this revision
824 824 :``extraratio``: extradist divided by chainsize; another representation of
825 825 how much unrelated data is needed to load this delta chain
826 826
827 827 If the repository is configured to use the sparse read, additional keywords
828 828 are available:
829 829
830 830 :``readsize``: total size of data read from the disk for a revision
831 831 (sum of the sizes of all the blocks)
832 832 :``largestblock``: size of the largest block of data read from the disk
833 833 :``readdensity``: density of useful bytes in the data read from the disk
834 834 :``srchunks``: in how many data hunks the whole revision would be read
835 835
836 836 It is possible to select the information to be computed, this can provide a
837 837 noticeable speedup to the command in some cases.
838 838
839 839 Always computed:
840 840
841 841 - ``rev``
842 842 - ``p1``
843 843 - ``p2``
844 844 - ``chainid``
845 845 - ``chainlen``
846 846 - ``prevrev``
847 847 - ``deltatype``
848 848
849 849 Computed with --no-size-info
850 850
851 851 - ``compsize``
852 852 - ``uncompsize``
853 853 - ``chainsize``
854 854 - ``chainratio``
855 855
856 856 Computed with --no-dist-info
857 857
858 858 - ``lindist``
859 859 - ``extradist``
860 860 - ``extraratio``
861 861
862 862 Skipped with --no-sparse-info
863 863
864 864 - ``readsize``
865 865 - ``largestblock``
866 866 - ``readdensity``
867 867 - ``srchunks``
868 868
869 869 --
870 870
871 871 The sparse read can be enabled with experimental.sparse-read = True
872 872 """
873 873 revs = None
874 874 revs_opt = opts.pop('rev', [])
875 875 if revs_opt:
876 876 revs = [int(r) for r in revs_opt]
877 877
878 878 all_info = opts.pop('all_info', False)
879 879 size_info = opts.pop('size_info', None)
880 880 if size_info is None:
881 881 size_info = all_info
882 882 dist_info = opts.pop('dist_info', None)
883 883 if dist_info is None:
884 884 dist_info = all_info
885 885 sparse_info = opts.pop('sparse_info', None)
886 886 if sparse_info is None:
887 887 sparse_info = all_info
888 888
889 889 revlog = cmdutil.openrevlog(
890 890 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
891 891 )
892 892 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
893 893
894 894 lines = revlog_debug.debug_delta_chain(
895 895 revlog,
896 896 revs=revs,
897 897 size_info=size_info,
898 898 dist_info=dist_info,
899 899 sparse_info=sparse_info,
900 900 )
901 901 # first entry is the header
902 902 header = next(lines)
903 903 fm.plain(header)
904 904 for entry in lines:
905 905 label = b' '.join(e[0] for e in entry)
906 906 format = b' '.join(e[1] for e in entry)
907 907 values = [e[3] for e in entry]
908 908 data = dict((e[2], e[3]) for e in entry)
909 909 fm.startitem()
910 910 fm.write(label, format, *values, **data)
911 911 fm.plain(b'\n')
912 912 fm.end()
913 913
914 914
915 915 @command(
916 916 b'debug-delta-find',
917 917 cmdutil.debugrevlogopts
918 918 + cmdutil.formatteropts
919 919 + [
920 920 (
921 921 b'',
922 922 b'source',
923 923 b'full',
924 924 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
925 925 ),
926 926 ],
927 927 _(b'-c|-m|FILE REV'),
928 928 optionalrepo=True,
929 929 )
930 930 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
931 931 """display the computation to get to a valid delta for storing REV
932 932
933 933 This command will replay the process used to find the "best" delta to store
934 934 a revision and display information about all the steps used to get to that
935 935 result.
936 936
937 937 By default, the process is fed with a the full-text for the revision. This
938 938 can be controlled with the --source flag.
939 939
940 940 The revision use the revision number of the target storage (not changelog
941 941 revision number).
942 942
943 943 note: the process is initiated from a full text of the revision to store.
944 944 """
945 945 if arg_2 is None:
946 946 file_ = None
947 947 rev = arg_1
948 948 else:
949 949 file_ = arg_1
950 950 rev = arg_2
951 951
952 952 rev = int(rev)
953 953
954 954 revlog = cmdutil.openrevlog(
955 955 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
956 956 )
957 957 p1r, p2r = revlog.parentrevs(rev)
958 958
959 959 if source == b'full':
960 960 base_rev = nullrev
961 961 elif source == b'storage':
962 962 base_rev = revlog.deltaparent(rev)
963 963 elif source == b'p1':
964 964 base_rev = p1r
965 965 elif source == b'p2':
966 966 base_rev = p2r
967 967 elif source == b'prev':
968 968 base_rev = rev - 1
969 969 else:
970 970 raise error.InputError(b"invalid --source value: %s" % source)
971 971
972 972 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
973 973
974 974
975 975 @command(
976 976 b'debugdirstate|debugstate',
977 977 [
978 978 (
979 979 b'',
980 980 b'nodates',
981 981 None,
982 982 _(b'do not display the saved mtime (DEPRECATED)'),
983 983 ),
984 984 (b'', b'dates', True, _(b'display the saved mtime')),
985 985 (b'', b'datesort', None, _(b'sort by saved mtime')),
986 986 (
987 987 b'',
988 988 b'docket',
989 989 False,
990 990 _(b'display the docket (metadata file) instead'),
991 991 ),
992 992 (
993 993 b'',
994 994 b'all',
995 995 False,
996 996 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
997 997 ),
998 998 ],
999 999 _(b'[OPTION]...'),
1000 1000 )
1001 1001 def debugstate(ui, repo, **opts):
1002 1002 """show the contents of the current dirstate"""
1003 1003
1004 1004 if opts.get("docket"):
1005 1005 if not repo.dirstate._use_dirstate_v2:
1006 1006 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1007 1007
1008 1008 docket = repo.dirstate._map.docket
1009 1009 (
1010 1010 start_offset,
1011 1011 root_nodes,
1012 1012 nodes_with_entry,
1013 1013 nodes_with_copy,
1014 1014 unused_bytes,
1015 1015 _unused,
1016 1016 ignore_pattern,
1017 1017 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1018 1018
1019 1019 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1020 1020 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1021 1021 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1022 1022 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1023 1023 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1024 1024 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1025 1025 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1026 1026 ui.write(
1027 1027 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1028 1028 )
1029 1029 return
1030 1030
1031 1031 nodates = not opts['dates']
1032 1032 if opts.get('nodates') is not None:
1033 1033 nodates = True
1034 1034 datesort = opts.get('datesort')
1035 1035
1036 1036 if datesort:
1037 1037
1038 1038 def keyfunc(entry):
1039 1039 filename, _state, _mode, _size, mtime = entry
1040 1040 return (mtime, filename)
1041 1041
1042 1042 else:
1043 1043 keyfunc = None # sort by filename
1044 1044 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1045 1045 entries.sort(key=keyfunc)
1046 1046 for entry in entries:
1047 1047 filename, state, mode, size, mtime = entry
1048 1048 if mtime == -1:
1049 1049 timestr = b'unset '
1050 1050 elif nodates:
1051 1051 timestr = b'set '
1052 1052 else:
1053 1053 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1054 1054 timestr = encoding.strtolocal(timestr)
1055 1055 if mode & 0o20000:
1056 1056 mode = b'lnk'
1057 1057 else:
1058 1058 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1059 1059 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1060 1060 for f in repo.dirstate.copies():
1061 1061 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1062 1062
1063 1063
1064 1064 @command(
1065 1065 b'debugdirstateignorepatternshash',
1066 1066 [],
1067 1067 _(b''),
1068 1068 )
1069 1069 def debugdirstateignorepatternshash(ui, repo, **opts):
1070 1070 """show the hash of ignore patterns stored in dirstate if v2,
1071 1071 or nothing for dirstate-v2
1072 1072 """
1073 1073 if repo.dirstate._use_dirstate_v2:
1074 1074 docket = repo.dirstate._map.docket
1075 1075 hash_len = 20 # 160 bits for SHA-1
1076 1076 hash_bytes = docket.tree_metadata[-hash_len:]
1077 1077 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1078 1078
1079 1079
1080 1080 @command(
1081 1081 b'debugdiscovery',
1082 1082 [
1083 1083 (b'', b'old', None, _(b'use old-style discovery')),
1084 1084 (
1085 1085 b'',
1086 1086 b'nonheads',
1087 1087 None,
1088 1088 _(b'use old-style discovery with non-heads included'),
1089 1089 ),
1090 1090 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1091 1091 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1092 1092 (
1093 1093 b'',
1094 1094 b'local-as-revs',
1095 1095 b"",
1096 1096 b'treat local has having these revisions only',
1097 1097 ),
1098 1098 (
1099 1099 b'',
1100 1100 b'remote-as-revs',
1101 1101 b"",
1102 1102 b'use local as remote, with only these revisions',
1103 1103 ),
1104 1104 ]
1105 1105 + cmdutil.remoteopts
1106 1106 + cmdutil.formatteropts,
1107 1107 _(b'[--rev REV] [OTHER]'),
1108 1108 )
1109 1109 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1110 1110 """runs the changeset discovery protocol in isolation
1111 1111
1112 1112 The local peer can be "replaced" by a subset of the local repository by
1113 1113 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1114 1114 can be "replaced" by a subset of the local repository using the
1115 1115 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1116 1116 discovery situations.
1117 1117
1118 1118 The following developer oriented config are relevant for people playing with this command:
1119 1119
1120 1120 * devel.discovery.exchange-heads=True
1121 1121
1122 1122 If False, the discovery will not start with
1123 1123 remote head fetching and local head querying.
1124 1124
1125 1125 * devel.discovery.grow-sample=True
1126 1126
1127 1127 If False, the sample size used in set discovery will not be increased
1128 1128 through the process
1129 1129
1130 1130 * devel.discovery.grow-sample.dynamic=True
1131 1131
1132 1132 When discovery.grow-sample.dynamic is True, the default, the sample size is
1133 1133 adapted to the shape of the undecided set (it is set to the max of:
1134 1134 <target-size>, len(roots(undecided)), len(heads(undecided)
1135 1135
1136 1136 * devel.discovery.grow-sample.rate=1.05
1137 1137
1138 1138 the rate at which the sample grow
1139 1139
1140 1140 * devel.discovery.randomize=True
1141 1141
1142 1142 If andom sampling during discovery are deterministic. It is meant for
1143 1143 integration tests.
1144 1144
1145 1145 * devel.discovery.sample-size=200
1146 1146
1147 1147 Control the initial size of the discovery sample
1148 1148
1149 1149 * devel.discovery.sample-size.initial=100
1150 1150
1151 1151 Control the initial size of the discovery for initial change
1152 1152 """
1153 1153 unfi = repo.unfiltered()
1154 1154
1155 1155 # setup potential extra filtering
1156 1156 local_revs = opts["local_as_revs"]
1157 1157 remote_revs = opts["remote_as_revs"]
1158 1158
1159 1159 # make sure tests are repeatable
1160 1160 random.seed(int(opts['seed']))
1161 1161
1162 1162 if not remote_revs:
1163 1163 path = urlutil.get_unique_pull_path_obj(
1164 1164 b'debugdiscovery', ui, remoteurl
1165 1165 )
1166 1166 branches = (path.branch, [])
1167 1167 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1168 1168 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1169 1169 else:
1170 1170 branches = (None, [])
1171 1171 remote_filtered_revs = logcmdutil.revrange(
1172 1172 unfi, [b"not (::(%s))" % remote_revs]
1173 1173 )
1174 1174 remote_filtered_revs = frozenset(remote_filtered_revs)
1175 1175
1176 1176 def remote_func(x):
1177 1177 return remote_filtered_revs
1178 1178
1179 1179 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1180 1180
1181 1181 remote = repo.peer()
1182 1182 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1183 1183
1184 1184 if local_revs:
1185 1185 local_filtered_revs = logcmdutil.revrange(
1186 1186 unfi, [b"not (::(%s))" % local_revs]
1187 1187 )
1188 1188 local_filtered_revs = frozenset(local_filtered_revs)
1189 1189
1190 1190 def local_func(x):
1191 1191 return local_filtered_revs
1192 1192
1193 1193 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1194 1194 repo = repo.filtered(b'debug-discovery-local-filter')
1195 1195
1196 1196 data = {}
1197 1197 if opts.get('old'):
1198 1198
1199 1199 def doit(pushedrevs, remoteheads, remote=remote):
1200 1200 if not hasattr(remote, 'branches'):
1201 1201 # enable in-client legacy support
1202 1202 remote = localrepo.locallegacypeer(remote.local())
1203 1203 if remote_revs:
1204 1204 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1205 1205 remote._repo = r
1206 1206 common, _in, hds = treediscovery.findcommonincoming(
1207 1207 repo, remote, force=True, audit=data
1208 1208 )
1209 1209 common = set(common)
1210 1210 if not opts.get('nonheads'):
1211 1211 ui.writenoi18n(
1212 1212 b"unpruned common: %s\n"
1213 1213 % b" ".join(sorted(short(n) for n in common))
1214 1214 )
1215 1215
1216 1216 clnode = repo.changelog.node
1217 1217 common = repo.revs(b'heads(::%ln)', common)
1218 1218 common = {clnode(r) for r in common}
1219 1219 return common, hds
1220 1220
1221 1221 else:
1222 1222
1223 1223 def doit(pushedrevs, remoteheads, remote=remote):
1224 1224 nodes = None
1225 1225 if pushedrevs:
1226 1226 revs = logcmdutil.revrange(repo, pushedrevs)
1227 1227 nodes = [repo[r].node() for r in revs]
1228 1228 common, any, hds = setdiscovery.findcommonheads(
1229 1229 ui,
1230 1230 repo,
1231 1231 remote,
1232 1232 ancestorsof=nodes,
1233 1233 audit=data,
1234 1234 abortwhenunrelated=False,
1235 1235 )
1236 1236 return common, hds
1237 1237
1238 1238 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1239 1239 localrevs = opts['rev']
1240 1240
1241 1241 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1242 1242 if fm.strict_format:
1243 1243
1244 1244 @contextlib.contextmanager
1245 1245 def may_capture_output():
1246 1246 ui.pushbuffer()
1247 1247 yield
1248 1248 data[b'output'] = ui.popbuffer()
1249 1249
1250 1250 else:
1251 1251 may_capture_output = util.nullcontextmanager
1252 1252 with may_capture_output():
1253 1253 with util.timedcm('debug-discovery') as t:
1254 1254 common, hds = doit(localrevs, remoterevs)
1255 1255
1256 1256 # compute all statistics
1257 1257 if len(common) == 1 and repo.nullid in common:
1258 1258 common = set()
1259 1259 heads_common = set(common)
1260 1260 heads_remote = set(hds)
1261 1261 heads_local = set(repo.heads())
1262 1262 # note: they cannot be a local or remote head that is in common and not
1263 1263 # itself a head of common.
1264 1264 heads_common_local = heads_common & heads_local
1265 1265 heads_common_remote = heads_common & heads_remote
1266 1266 heads_common_both = heads_common & heads_remote & heads_local
1267 1267
1268 1268 all = repo.revs(b'all()')
1269 1269 common = repo.revs(b'::%ln', common)
1270 1270 roots_common = repo.revs(b'roots(::%ld)', common)
1271 1271 missing = repo.revs(b'not ::%ld', common)
1272 1272 heads_missing = repo.revs(b'heads(%ld)', missing)
1273 1273 roots_missing = repo.revs(b'roots(%ld)', missing)
1274 1274 assert len(common) + len(missing) == len(all)
1275 1275
1276 1276 initial_undecided = repo.revs(
1277 1277 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1278 1278 )
1279 1279 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1280 1280 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1281 1281 common_initial_undecided = initial_undecided & common
1282 1282 missing_initial_undecided = initial_undecided & missing
1283 1283
1284 1284 data[b'elapsed'] = t.elapsed
1285 1285 data[b'nb-common-heads'] = len(heads_common)
1286 1286 data[b'nb-common-heads-local'] = len(heads_common_local)
1287 1287 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1288 1288 data[b'nb-common-heads-both'] = len(heads_common_both)
1289 1289 data[b'nb-common-roots'] = len(roots_common)
1290 1290 data[b'nb-head-local'] = len(heads_local)
1291 1291 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1292 1292 data[b'nb-head-remote'] = len(heads_remote)
1293 1293 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1294 1294 heads_common_remote
1295 1295 )
1296 1296 data[b'nb-revs'] = len(all)
1297 1297 data[b'nb-revs-common'] = len(common)
1298 1298 data[b'nb-revs-missing'] = len(missing)
1299 1299 data[b'nb-missing-heads'] = len(heads_missing)
1300 1300 data[b'nb-missing-roots'] = len(roots_missing)
1301 1301 data[b'nb-ini_und'] = len(initial_undecided)
1302 1302 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1303 1303 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1304 1304 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1305 1305 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1306 1306
1307 1307 fm.startitem()
1308 1308 fm.data(**pycompat.strkwargs(data))
1309 1309 # display discovery summary
1310 1310 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1311 1311 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1312 1312 if b'total-round-trips-heads' in data:
1313 1313 fm.plain(
1314 1314 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1315 1315 )
1316 1316 if b'total-round-trips-branches' in data:
1317 1317 fm.plain(
1318 1318 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1319 1319 % data
1320 1320 )
1321 1321 if b'total-round-trips-between' in data:
1322 1322 fm.plain(
1323 1323 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1324 1324 )
1325 1325 fm.plain(b"queries: %(total-queries)9d\n" % data)
1326 1326 if b'total-queries-branches' in data:
1327 1327 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1328 1328 if b'total-queries-between' in data:
1329 1329 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1330 1330 fm.plain(b"heads summary:\n")
1331 1331 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1332 1332 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1333 1333 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1334 1334 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1335 1335 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1336 1336 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1337 1337 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1338 1338 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1339 1339 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1340 1340 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1341 1341 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1342 1342 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1343 1343 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1344 1344 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1345 1345 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1346 1346 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1347 1347 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1348 1348 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1349 1349 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1350 1350 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1351 1351 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1352 1352 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1353 1353
1354 1354 if ui.verbose:
1355 1355 fm.plain(
1356 1356 b"common heads: %s\n"
1357 1357 % b" ".join(sorted(short(n) for n in heads_common))
1358 1358 )
1359 1359 fm.end()
1360 1360
1361 1361
1362 1362 _chunksize = 4 << 10
1363 1363
1364 1364
1365 1365 @command(
1366 1366 b'debugdownload',
1367 1367 [
1368 1368 (b'o', b'output', b'', _(b'path')),
1369 1369 ],
1370 1370 optionalrepo=True,
1371 1371 )
1372 1372 def debugdownload(ui, repo, url, output=None, **opts):
1373 1373 """download a resource using Mercurial logic and config"""
1374 1374 fh = urlmod.open(ui, url, output)
1375 1375
1376 1376 dest = ui
1377 1377 if output:
1378 1378 dest = open(output, b"wb", _chunksize)
1379 1379 try:
1380 1380 data = fh.read(_chunksize)
1381 1381 while data:
1382 1382 dest.write(data)
1383 1383 data = fh.read(_chunksize)
1384 1384 finally:
1385 1385 if output:
1386 1386 dest.close()
1387 1387
1388 1388
1389 1389 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1390 1390 def debugextensions(ui, repo, **opts):
1391 1391 '''show information about active extensions'''
1392 1392 exts = extensions.extensions(ui)
1393 1393 hgver = util.version()
1394 1394 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1395 1395 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1396 1396 isinternal = extensions.ismoduleinternal(extmod)
1397 1397 extsource = None
1398 1398
1399 1399 if hasattr(extmod, '__file__'):
1400 1400 extsource = pycompat.fsencode(extmod.__file__)
1401 1401 elif getattr(sys, 'oxidized', False):
1402 1402 extsource = pycompat.sysexecutable
1403 1403 if isinternal:
1404 1404 exttestedwith = [] # never expose magic string to users
1405 1405 else:
1406 1406 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1407 1407 extbuglink = getattr(extmod, 'buglink', None)
1408 1408
1409 1409 fm.startitem()
1410 1410
1411 1411 if ui.quiet or ui.verbose:
1412 1412 fm.write(b'name', b'%s\n', extname)
1413 1413 else:
1414 1414 fm.write(b'name', b'%s', extname)
1415 1415 if isinternal or hgver in exttestedwith:
1416 1416 fm.plain(b'\n')
1417 1417 elif not exttestedwith:
1418 1418 fm.plain(_(b' (untested!)\n'))
1419 1419 else:
1420 1420 lasttestedversion = exttestedwith[-1]
1421 1421 fm.plain(b' (%s!)\n' % lasttestedversion)
1422 1422
1423 1423 fm.condwrite(
1424 1424 ui.verbose and extsource,
1425 1425 b'source',
1426 1426 _(b' location: %s\n'),
1427 1427 extsource or b"",
1428 1428 )
1429 1429
1430 1430 if ui.verbose:
1431 1431 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1432 1432 fm.data(bundled=isinternal)
1433 1433
1434 1434 fm.condwrite(
1435 1435 ui.verbose and exttestedwith,
1436 1436 b'testedwith',
1437 1437 _(b' tested with: %s\n'),
1438 1438 fm.formatlist(exttestedwith, name=b'ver'),
1439 1439 )
1440 1440
1441 1441 fm.condwrite(
1442 1442 ui.verbose and extbuglink,
1443 1443 b'buglink',
1444 1444 _(b' bug reporting: %s\n'),
1445 1445 extbuglink or b"",
1446 1446 )
1447 1447
1448 1448 fm.end()
1449 1449
1450 1450
1451 1451 @command(
1452 1452 b'debugfileset',
1453 1453 [
1454 1454 (
1455 1455 b'r',
1456 1456 b'rev',
1457 1457 b'',
1458 1458 _(b'apply the filespec on this revision'),
1459 1459 _(b'REV'),
1460 1460 ),
1461 1461 (
1462 1462 b'',
1463 1463 b'all-files',
1464 1464 False,
1465 1465 _(b'test files from all revisions and working directory'),
1466 1466 ),
1467 1467 (
1468 1468 b's',
1469 1469 b'show-matcher',
1470 1470 None,
1471 1471 _(b'print internal representation of matcher'),
1472 1472 ),
1473 1473 (
1474 1474 b'p',
1475 1475 b'show-stage',
1476 1476 [],
1477 1477 _(b'print parsed tree at the given stage'),
1478 1478 _(b'NAME'),
1479 1479 ),
1480 1480 ],
1481 1481 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1482 1482 )
1483 1483 def debugfileset(ui, repo, expr, **opts):
1484 1484 '''parse and apply a fileset specification'''
1485 1485 from . import fileset
1486 1486
1487 1487 fileset.symbols # force import of fileset so we have predicates to optimize
1488 1488
1489 1489 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1490 1490
1491 1491 stages = [
1492 1492 (b'parsed', pycompat.identity),
1493 1493 (b'analyzed', filesetlang.analyze),
1494 1494 (b'optimized', filesetlang.optimize),
1495 1495 ]
1496 1496 stagenames = {n for n, f in stages}
1497 1497
1498 1498 showalways = set()
1499 1499 if ui.verbose and not opts['show_stage']:
1500 1500 # show parsed tree by --verbose (deprecated)
1501 1501 showalways.add(b'parsed')
1502 1502 if opts['show_stage'] == [b'all']:
1503 1503 showalways.update(stagenames)
1504 1504 else:
1505 1505 for n in opts['show_stage']:
1506 1506 if n not in stagenames:
1507 1507 raise error.Abort(_(b'invalid stage name: %s') % n)
1508 1508 showalways.update(opts['show_stage'])
1509 1509
1510 1510 tree = filesetlang.parse(expr)
1511 1511 for n, f in stages:
1512 1512 tree = f(tree)
1513 1513 if n in showalways:
1514 1514 if opts['show_stage'] or n != b'parsed':
1515 1515 ui.write(b"* %s:\n" % n)
1516 1516 ui.write(filesetlang.prettyformat(tree), b"\n")
1517 1517
1518 1518 files = set()
1519 1519 if opts['all_files']:
1520 1520 for r in repo:
1521 1521 c = repo[r]
1522 1522 files.update(c.files())
1523 1523 files.update(c.substate)
1524 1524 if opts['all_files'] or ctx.rev() is None:
1525 1525 wctx = repo[None]
1526 1526 files.update(
1527 1527 repo.dirstate.walk(
1528 1528 scmutil.matchall(repo),
1529 1529 subrepos=list(wctx.substate),
1530 1530 unknown=True,
1531 1531 ignored=True,
1532 1532 )
1533 1533 )
1534 1534 files.update(wctx.substate)
1535 1535 else:
1536 1536 files.update(ctx.files())
1537 1537 files.update(ctx.substate)
1538 1538
1539 1539 m = ctx.matchfileset(repo.getcwd(), expr)
1540 1540 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1541 1541 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1542 1542 for f in sorted(files):
1543 1543 if not m(f):
1544 1544 continue
1545 1545 ui.write(b"%s\n" % f)
1546 1546
1547 1547
1548 1548 @command(
1549 1549 b"debug-repair-issue6528",
1550 1550 [
1551 1551 (
1552 1552 b'',
1553 1553 b'to-report',
1554 1554 b'',
1555 1555 _(b'build a report of affected revisions to this file'),
1556 1556 _(b'FILE'),
1557 1557 ),
1558 1558 (
1559 1559 b'',
1560 1560 b'from-report',
1561 1561 b'',
1562 1562 _(b'repair revisions listed in this report file'),
1563 1563 _(b'FILE'),
1564 1564 ),
1565 1565 (
1566 1566 b'',
1567 1567 b'paranoid',
1568 1568 False,
1569 1569 _(b'check that both detection methods do the same thing'),
1570 1570 ),
1571 1571 ]
1572 1572 + cmdutil.dryrunopts,
1573 1573 )
1574 1574 def debug_repair_issue6528(ui, repo, **opts):
1575 1575 """find affected revisions and repair them. See issue6528 for more details.
1576 1576
1577 1577 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1578 1578 computation of affected revisions for a given repository across clones.
1579 1579 The report format is line-based (with empty lines ignored):
1580 1580
1581 1581 ```
1582 1582 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1583 1583 ```
1584 1584
1585 1585 There can be multiple broken revisions per filelog, they are separated by
1586 1586 a comma with no spaces. The only space is between the revision(s) and the
1587 1587 filename.
1588 1588
1589 1589 Note that this does *not* mean that this repairs future affected revisions,
1590 1590 that needs a separate fix at the exchange level that was introduced in
1591 1591 Mercurial 5.9.1.
1592 1592
1593 1593 There is a `--paranoid` flag to test that the fast implementation is correct
1594 1594 by checking it against the slow implementation. Since this matter is quite
1595 1595 urgent and testing every edge-case is probably quite costly, we use this
1596 1596 method to test on large repositories as a fuzzing method of sorts.
1597 1597 """
1598 1598 cmdutil.check_incompatible_arguments(
1599 1599 opts, 'to_report', ['from_report', 'dry_run']
1600 1600 )
1601 1601 dry_run = opts.get('dry_run')
1602 1602 to_report = opts.get('to_report')
1603 1603 from_report = opts.get('from_report')
1604 1604 paranoid = opts.get('paranoid')
1605 1605 # TODO maybe add filelog pattern and revision pattern parameters to help
1606 1606 # narrow down the search for users that know what they're looking for?
1607 1607
1608 1608 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1609 1609 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1610 1610 raise error.Abort(_(msg))
1611 1611
1612 1612 rewrite.repair_issue6528(
1613 1613 ui,
1614 1614 repo,
1615 1615 dry_run=dry_run,
1616 1616 to_report=to_report,
1617 1617 from_report=from_report,
1618 1618 paranoid=paranoid,
1619 1619 )
1620 1620
1621 1621
1622 1622 @command(b'debugformat', [] + cmdutil.formatteropts)
1623 1623 def debugformat(ui, repo, **opts):
1624 1624 """display format information about the current repository
1625 1625
1626 1626 Use --verbose to get extra information about current config value and
1627 1627 Mercurial default."""
1628 1628 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1629 1629 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1630 1630
1631 1631 def makeformatname(name):
1632 1632 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1633 1633
1634 1634 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1635 1635 if fm.isplain():
1636 1636
1637 1637 def formatvalue(value):
1638 1638 if hasattr(value, 'startswith'):
1639 1639 return value
1640 1640 if value:
1641 1641 return b'yes'
1642 1642 else:
1643 1643 return b'no'
1644 1644
1645 1645 else:
1646 1646 formatvalue = pycompat.identity
1647 1647
1648 1648 fm.plain(b'format-variant')
1649 1649 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1650 1650 fm.plain(b' repo')
1651 1651 if ui.verbose:
1652 1652 fm.plain(b' config default')
1653 1653 fm.plain(b'\n')
1654 1654 for fv in upgrade.allformatvariant:
1655 1655 fm.startitem()
1656 1656 repovalue = fv.fromrepo(repo)
1657 1657 configvalue = fv.fromconfig(repo)
1658 1658
1659 1659 if repovalue != configvalue:
1660 1660 namelabel = b'formatvariant.name.mismatchconfig'
1661 1661 repolabel = b'formatvariant.repo.mismatchconfig'
1662 1662 elif repovalue != fv.default:
1663 1663 namelabel = b'formatvariant.name.mismatchdefault'
1664 1664 repolabel = b'formatvariant.repo.mismatchdefault'
1665 1665 else:
1666 1666 namelabel = b'formatvariant.name.uptodate'
1667 1667 repolabel = b'formatvariant.repo.uptodate'
1668 1668
1669 1669 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1670 1670 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1671 1671 if fv.default != configvalue:
1672 1672 configlabel = b'formatvariant.config.special'
1673 1673 else:
1674 1674 configlabel = b'formatvariant.config.default'
1675 1675 fm.condwrite(
1676 1676 ui.verbose,
1677 1677 b'config',
1678 1678 b' %6s',
1679 1679 formatvalue(configvalue),
1680 1680 label=configlabel,
1681 1681 )
1682 1682 fm.condwrite(
1683 1683 ui.verbose,
1684 1684 b'default',
1685 1685 b' %7s',
1686 1686 formatvalue(fv.default),
1687 1687 label=b'formatvariant.default',
1688 1688 )
1689 1689 fm.plain(b'\n')
1690 1690 fm.end()
1691 1691
1692 1692
1693 1693 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1694 1694 def debugfsinfo(ui, path=b"."):
1695 1695 """show information detected about current filesystem"""
1696 1696 ui.writenoi18n(b'path: %s\n' % path)
1697 1697 ui.writenoi18n(
1698 1698 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1699 1699 )
1700 1700 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1701 1701 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1702 1702 ui.writenoi18n(
1703 1703 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1704 1704 )
1705 1705 ui.writenoi18n(
1706 1706 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1707 1707 )
1708 1708 casesensitive = b'(unknown)'
1709 1709 try:
1710 1710 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1711 1711 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1712 1712 except OSError:
1713 1713 pass
1714 1714 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1715 1715
1716 1716
1717 1717 @command(
1718 1718 b'debuggetbundle',
1719 1719 [
1720 1720 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1721 1721 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1722 1722 (
1723 1723 b't',
1724 1724 b'type',
1725 1725 b'bzip2',
1726 1726 _(b'bundle compression type to use'),
1727 1727 _(b'TYPE'),
1728 1728 ),
1729 1729 ],
1730 1730 _(b'REPO FILE [-H|-C ID]...'),
1731 1731 norepo=True,
1732 1732 )
1733 1733 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1734 1734 """retrieves a bundle from a repo
1735 1735
1736 1736 Every ID must be a full-length hex node id string. Saves the bundle to the
1737 1737 given file.
1738 1738 """
1739 1739 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1740 1740 if not repo.capable(b'getbundle'):
1741 1741 raise error.Abort(b"getbundle() not supported by target repository")
1742 1742 args = {}
1743 1743 if common:
1744 1744 args['common'] = [bin(s) for s in common]
1745 1745 if head:
1746 1746 args['heads'] = [bin(s) for s in head]
1747 1747 # TODO: get desired bundlecaps from command line.
1748 1748 args['bundlecaps'] = None
1749 1749 bundle = repo.getbundle(b'debug', **args)
1750 1750
1751 1751 bundletype = opts.get('type', b'bzip2').lower()
1752 1752 btypes = {
1753 1753 b'none': b'HG10UN',
1754 1754 b'bzip2': b'HG10BZ',
1755 1755 b'gzip': b'HG10GZ',
1756 1756 b'bundle2': b'HG20',
1757 1757 }
1758 1758 bundletype = btypes.get(bundletype)
1759 1759 if bundletype not in bundle2.bundletypes:
1760 1760 raise error.Abort(_(b'unknown bundle type specified with --type'))
1761 1761 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1762 1762
1763 1763
1764 1764 @command(b'debugignore', [], b'[FILE]...')
1765 1765 def debugignore(ui, repo, *files, **opts):
1766 1766 """display the combined ignore pattern and information about ignored files
1767 1767
1768 1768 With no argument display the combined ignore pattern.
1769 1769
1770 1770 Given space separated file names, shows if the given file is ignored and
1771 1771 if so, show the ignore rule (file and line number) that matched it.
1772 1772 """
1773 1773 ignore = repo.dirstate._ignore
1774 1774 if not files:
1775 1775 # Show all the patterns
1776 1776 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1777 1777 else:
1778 1778 m = scmutil.match(repo[None], pats=files)
1779 1779 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1780 1780 for f in m.files():
1781 1781 nf = util.normpath(f)
1782 1782 ignored = None
1783 1783 ignoredata = None
1784 1784 if nf != b'.':
1785 1785 if ignore(nf):
1786 1786 ignored = nf
1787 1787 ignoredata = repo.dirstate._ignorefileandline(nf)
1788 1788 else:
1789 1789 for p in pathutil.finddirs(nf):
1790 1790 if ignore(p):
1791 1791 ignored = p
1792 1792 ignoredata = repo.dirstate._ignorefileandline(p)
1793 1793 break
1794 1794 if ignored:
1795 1795 if ignored == nf:
1796 1796 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1797 1797 else:
1798 1798 ui.write(
1799 1799 _(
1800 1800 b"%s is ignored because of "
1801 1801 b"containing directory %s\n"
1802 1802 )
1803 1803 % (uipathfn(f), ignored)
1804 1804 )
1805 1805 ignorefile, lineno, line = ignoredata
1806 1806 ui.write(
1807 1807 _(b"(ignore rule in %s, line %d: '%s')\n")
1808 1808 % (ignorefile, lineno, line)
1809 1809 )
1810 1810 else:
1811 1811 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1812 1812
1813 1813
1814 1814 @command(
1815 1815 b'debug-revlog-index|debugindex',
1816 1816 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1817 1817 _(b'-c|-m|FILE'),
1818 1818 )
1819 1819 def debugindex(ui, repo, file_=None, **opts):
1820 1820 """dump index data for a revlog"""
1821 1821 opts = pycompat.byteskwargs(opts)
1822 1822 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1823 1823
1824 1824 fm = ui.formatter(b'debugindex', opts)
1825 1825
1826 1826 revlog = getattr(store, '_revlog', store)
1827 1827
1828 1828 return revlog_debug.debug_index(
1829 1829 ui,
1830 1830 repo,
1831 1831 formatter=fm,
1832 1832 revlog=revlog,
1833 1833 full_node=ui.debugflag,
1834 1834 )
1835 1835
1836 1836
1837 1837 @command(
1838 1838 b'debugindexdot',
1839 1839 cmdutil.debugrevlogopts,
1840 1840 _(b'-c|-m|FILE'),
1841 1841 optionalrepo=True,
1842 1842 )
1843 1843 def debugindexdot(ui, repo, file_=None, **opts):
1844 1844 """dump an index DAG as a graphviz dot file"""
1845 1845 r = cmdutil.openstorage(
1846 1846 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1847 1847 )
1848 1848 ui.writenoi18n(b"digraph G {\n")
1849 1849 for i in r:
1850 1850 node = r.node(i)
1851 1851 pp = r.parents(node)
1852 1852 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1853 1853 if pp[1] != repo.nullid:
1854 1854 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1855 1855 ui.write(b"}\n")
1856 1856
1857 1857
1858 1858 @command(b'debugindexstats', [])
1859 1859 def debugindexstats(ui, repo):
1860 1860 """show stats related to the changelog index"""
1861 1861 repo.changelog.shortest(repo.nullid, 1)
1862 1862 index = repo.changelog.index
1863 1863 if not hasattr(index, 'stats'):
1864 1864 raise error.Abort(_(b'debugindexstats only works with native C code'))
1865 1865 for k, v in sorted(index.stats().items()):
1866 1866 ui.write(b'%s: %d\n' % (k, v))
1867 1867
1868 1868
1869 1869 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1870 1870 def debuginstall(ui, **opts):
1871 1871 """test Mercurial installation
1872 1872
1873 1873 Returns 0 on success.
1874 1874 """
1875 1875 problems = 0
1876 1876
1877 1877 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1878 1878 fm.startitem()
1879 1879
1880 1880 # encoding might be unknown or wrong. don't translate these messages.
1881 1881 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1882 1882 err = None
1883 1883 try:
1884 1884 codecs.lookup(pycompat.sysstr(encoding.encoding))
1885 1885 except LookupError as inst:
1886 1886 err = stringutil.forcebytestr(inst)
1887 1887 problems += 1
1888 1888 fm.condwrite(
1889 1889 err,
1890 1890 b'encodingerror',
1891 1891 b" %s\n (check that your locale is properly set)\n",
1892 1892 err,
1893 1893 )
1894 1894
1895 1895 # Python
1896 1896 pythonlib = None
1897 1897 if hasattr(os, '__file__'):
1898 1898 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1899 1899 elif getattr(sys, 'oxidized', False):
1900 1900 pythonlib = pycompat.sysexecutable
1901 1901
1902 1902 fm.write(
1903 1903 b'pythonexe',
1904 1904 _(b"checking Python executable (%s)\n"),
1905 1905 pycompat.sysexecutable or _(b"unknown"),
1906 1906 )
1907 1907 fm.write(
1908 1908 b'pythonimplementation',
1909 1909 _(b"checking Python implementation (%s)\n"),
1910 1910 pycompat.sysbytes(platform.python_implementation()),
1911 1911 )
1912 1912 fm.write(
1913 1913 b'pythonver',
1914 1914 _(b"checking Python version (%s)\n"),
1915 1915 (b"%d.%d.%d" % sys.version_info[:3]),
1916 1916 )
1917 1917 fm.write(
1918 1918 b'pythonlib',
1919 1919 _(b"checking Python lib (%s)...\n"),
1920 1920 pythonlib or _(b"unknown"),
1921 1921 )
1922 1922
1923 1923 try:
1924 1924 from . import rustext # pytype: disable=import-error
1925 1925
1926 1926 rustext.__doc__ # trigger lazy import
1927 1927 except ImportError:
1928 1928 rustext = None
1929 1929
1930 1930 security = set(sslutil.supportedprotocols)
1931 1931 if sslutil.hassni:
1932 1932 security.add(b'sni')
1933 1933
1934 1934 fm.write(
1935 1935 b'pythonsecurity',
1936 1936 _(b"checking Python security support (%s)\n"),
1937 1937 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1938 1938 )
1939 1939
1940 1940 # These are warnings, not errors. So don't increment problem count. This
1941 1941 # may change in the future.
1942 1942 if b'tls1.2' not in security:
1943 1943 fm.plain(
1944 1944 _(
1945 1945 b' TLS 1.2 not supported by Python install; '
1946 1946 b'network connections lack modern security\n'
1947 1947 )
1948 1948 )
1949 1949 if b'sni' not in security:
1950 1950 fm.plain(
1951 1951 _(
1952 1952 b' SNI not supported by Python install; may have '
1953 1953 b'connectivity issues with some servers\n'
1954 1954 )
1955 1955 )
1956 1956
1957 1957 fm.plain(
1958 1958 _(
1959 1959 b"checking Rust extensions (%s)\n"
1960 1960 % (b'missing' if rustext is None else b'installed')
1961 1961 ),
1962 1962 )
1963 1963
1964 1964 # TODO print CA cert info
1965 1965
1966 1966 # hg version
1967 1967 hgver = util.version()
1968 1968 fm.write(
1969 1969 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1970 1970 )
1971 1971 fm.write(
1972 1972 b'hgverextra',
1973 1973 _(b"checking Mercurial custom build (%s)\n"),
1974 1974 b'+'.join(hgver.split(b'+')[1:]),
1975 1975 )
1976 1976
1977 1977 # compiled modules
1978 1978 hgmodules = None
1979 1979 if hasattr(sys.modules[__name__], '__file__'):
1980 1980 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1981 1981 elif getattr(sys, 'oxidized', False):
1982 1982 hgmodules = pycompat.sysexecutable
1983 1983
1984 1984 fm.write(
1985 1985 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1986 1986 )
1987 1987 fm.write(
1988 1988 b'hgmodules',
1989 1989 _(b"checking installed modules (%s)...\n"),
1990 1990 hgmodules or _(b"unknown"),
1991 1991 )
1992 1992
1993 1993 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1994 1994 rustext = rustandc # for now, that's the only case
1995 1995 cext = policy.policy in (b'c', b'allow') or rustandc
1996 1996 nopure = cext or rustext
1997 1997 if nopure:
1998 1998 err = None
1999 1999 try:
2000 2000 if cext:
2001 2001 from .cext import ( # pytype: disable=import-error
2002 2002 base85,
2003 2003 bdiff,
2004 2004 mpatch,
2005 2005 osutil,
2006 2006 )
2007 2007
2008 2008 # quiet pyflakes
2009 2009 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2010 2010 if rustext:
2011 2011 from .rustext import ( # pytype: disable=import-error
2012 2012 ancestor,
2013 2013 dirstate,
2014 2014 )
2015 2015
2016 2016 dir(ancestor), dir(dirstate) # quiet pyflakes
2017 2017 except Exception as inst:
2018 2018 err = stringutil.forcebytestr(inst)
2019 2019 problems += 1
2020 2020 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2021 2021
2022 2022 compengines = util.compengines._engines.values()
2023 2023 fm.write(
2024 2024 b'compengines',
2025 2025 _(b'checking registered compression engines (%s)\n'),
2026 2026 fm.formatlist(
2027 2027 sorted(e.name() for e in compengines),
2028 2028 name=b'compengine',
2029 2029 fmt=b'%s',
2030 2030 sep=b', ',
2031 2031 ),
2032 2032 )
2033 2033 fm.write(
2034 2034 b'compenginesavail',
2035 2035 _(b'checking available compression engines (%s)\n'),
2036 2036 fm.formatlist(
2037 2037 sorted(e.name() for e in compengines if e.available()),
2038 2038 name=b'compengine',
2039 2039 fmt=b'%s',
2040 2040 sep=b', ',
2041 2041 ),
2042 2042 )
2043 2043 wirecompengines = compression.compengines.supportedwireengines(
2044 2044 compression.SERVERROLE
2045 2045 )
2046 2046 fm.write(
2047 2047 b'compenginesserver',
2048 2048 _(
2049 2049 b'checking available compression engines '
2050 2050 b'for wire protocol (%s)\n'
2051 2051 ),
2052 2052 fm.formatlist(
2053 2053 [e.name() for e in wirecompengines if e.wireprotosupport()],
2054 2054 name=b'compengine',
2055 2055 fmt=b'%s',
2056 2056 sep=b', ',
2057 2057 ),
2058 2058 )
2059 2059 re2 = b'missing'
2060 2060 if util.has_re2():
2061 2061 re2 = b'available'
2062 2062 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2063 2063 fm.data(re2=bool(util._re2))
2064 2064
2065 2065 # templates
2066 2066 p = templater.templatedir()
2067 2067 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2068 2068 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2069 2069 if p:
2070 2070 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2071 2071 if m:
2072 2072 # template found, check if it is working
2073 2073 err = None
2074 2074 try:
2075 2075 templater.templater.frommapfile(m)
2076 2076 except Exception as inst:
2077 2077 err = stringutil.forcebytestr(inst)
2078 2078 p = None
2079 2079 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2080 2080 else:
2081 2081 p = None
2082 2082 fm.condwrite(
2083 2083 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2084 2084 )
2085 2085 fm.condwrite(
2086 2086 not m,
2087 2087 b'defaulttemplatenotfound',
2088 2088 _(b" template '%s' not found\n"),
2089 2089 b"default",
2090 2090 )
2091 2091 if not p:
2092 2092 problems += 1
2093 2093 fm.condwrite(
2094 2094 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2095 2095 )
2096 2096
2097 2097 # editor
2098 2098 editor = ui.geteditor()
2099 2099 editor = util.expandpath(editor)
2100 2100 editorbin = procutil.shellsplit(editor)[0]
2101 2101 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2102 2102 cmdpath = procutil.findexe(editorbin)
2103 2103 fm.condwrite(
2104 2104 not cmdpath and editor == b'vi',
2105 2105 b'vinotfound',
2106 2106 _(
2107 2107 b" No commit editor set and can't find %s in PATH\n"
2108 2108 b" (specify a commit editor in your configuration"
2109 2109 b" file)\n"
2110 2110 ),
2111 2111 not cmdpath and editor == b'vi' and editorbin,
2112 2112 )
2113 2113 fm.condwrite(
2114 2114 not cmdpath and editor != b'vi',
2115 2115 b'editornotfound',
2116 2116 _(
2117 2117 b" Can't find editor '%s' in PATH\n"
2118 2118 b" (specify a commit editor in your configuration"
2119 2119 b" file)\n"
2120 2120 ),
2121 2121 not cmdpath and editorbin,
2122 2122 )
2123 2123 if not cmdpath and editor != b'vi':
2124 2124 problems += 1
2125 2125
2126 2126 # check username
2127 2127 username = None
2128 2128 err = None
2129 2129 try:
2130 2130 username = ui.username()
2131 2131 except error.Abort as e:
2132 2132 err = e.message
2133 2133 problems += 1
2134 2134
2135 2135 fm.condwrite(
2136 2136 username, b'username', _(b"checking username (%s)\n"), username
2137 2137 )
2138 2138 fm.condwrite(
2139 2139 err,
2140 2140 b'usernameerror',
2141 2141 _(
2142 2142 b"checking username...\n %s\n"
2143 2143 b" (specify a username in your configuration file)\n"
2144 2144 ),
2145 2145 err,
2146 2146 )
2147 2147
2148 2148 for name, mod in extensions.extensions():
2149 2149 handler = getattr(mod, 'debuginstall', None)
2150 2150 if handler is not None:
2151 2151 problems += handler(ui, fm)
2152 2152
2153 2153 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2154 2154 if not problems:
2155 2155 fm.data(problems=problems)
2156 2156 fm.condwrite(
2157 2157 problems,
2158 2158 b'problems',
2159 2159 _(b"%d problems detected, please check your install!\n"),
2160 2160 problems,
2161 2161 )
2162 2162 fm.end()
2163 2163
2164 2164 return problems
2165 2165
2166 2166
2167 2167 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2168 2168 def debugknown(ui, repopath, *ids, **opts):
2169 2169 """test whether node ids are known to a repo
2170 2170
2171 2171 Every ID must be a full-length hex node id string. Returns a list of 0s
2172 2172 and 1s indicating unknown/known.
2173 2173 """
2174 2174 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2175 2175 if not repo.capable(b'known'):
2176 2176 raise error.Abort(b"known() not supported by target repository")
2177 2177 flags = repo.known([bin(s) for s in ids])
2178 2178 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2179 2179
2180 2180
2181 2181 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2182 2182 def debuglabelcomplete(ui, repo, *args):
2183 2183 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2184 2184 debugnamecomplete(ui, repo, *args)
2185 2185
2186 2186
2187 2187 @command(
2188 2188 b'debuglocks',
2189 2189 [
2190 2190 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2191 2191 (
2192 2192 b'W',
2193 2193 b'force-free-wlock',
2194 2194 None,
2195 2195 _(b'free the working state lock (DANGEROUS)'),
2196 2196 ),
2197 2197 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2198 2198 (
2199 2199 b'S',
2200 2200 b'set-wlock',
2201 2201 None,
2202 2202 _(b'set the working state lock until stopped'),
2203 2203 ),
2204 2204 ],
2205 2205 _(b'[OPTION]...'),
2206 2206 )
2207 2207 def debuglocks(ui, repo, **opts):
2208 2208 """show or modify state of locks
2209 2209
2210 2210 By default, this command will show which locks are held. This
2211 2211 includes the user and process holding the lock, the amount of time
2212 2212 the lock has been held, and the machine name where the process is
2213 2213 running if it's not local.
2214 2214
2215 2215 Locks protect the integrity of Mercurial's data, so should be
2216 2216 treated with care. System crashes or other interruptions may cause
2217 2217 locks to not be properly released, though Mercurial will usually
2218 2218 detect and remove such stale locks automatically.
2219 2219
2220 2220 However, detecting stale locks may not always be possible (for
2221 2221 instance, on a shared filesystem). Removing locks may also be
2222 2222 blocked by filesystem permissions.
2223 2223
2224 2224 Setting a lock will prevent other commands from changing the data.
2225 2225 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2226 2226 The set locks are removed when the command exits.
2227 2227
2228 2228 Returns 0 if no locks are held.
2229 2229
2230 2230 """
2231 2231
2232 2232 if opts.get('force_free_lock'):
2233 2233 repo.svfs.tryunlink(b'lock')
2234 2234 if opts.get('force_free_wlock'):
2235 2235 repo.vfs.tryunlink(b'wlock')
2236 2236 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2237 2237 return 0
2238 2238
2239 2239 locks = []
2240 2240 try:
2241 2241 if opts.get('set_wlock'):
2242 2242 try:
2243 2243 locks.append(repo.wlock(False))
2244 2244 except error.LockHeld:
2245 2245 raise error.Abort(_(b'wlock is already held'))
2246 2246 if opts.get('set_lock'):
2247 2247 try:
2248 2248 locks.append(repo.lock(False))
2249 2249 except error.LockHeld:
2250 2250 raise error.Abort(_(b'lock is already held'))
2251 2251 if len(locks):
2252 2252 try:
2253 2253 if ui.interactive():
2254 2254 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2255 2255 ui.promptchoice(prompt)
2256 2256 else:
2257 2257 msg = b"%d locks held, waiting for signal\n"
2258 2258 msg %= len(locks)
2259 2259 ui.status(msg)
2260 2260 while True: # XXX wait for a signal
2261 2261 time.sleep(0.1)
2262 2262 except KeyboardInterrupt:
2263 2263 msg = b"signal-received releasing locks\n"
2264 2264 ui.status(msg)
2265 2265 return 0
2266 2266 finally:
2267 2267 release(*locks)
2268 2268
2269 2269 now = time.time()
2270 2270 held = 0
2271 2271
2272 2272 def report(vfs, name, method):
2273 2273 # this causes stale locks to get reaped for more accurate reporting
2274 2274 try:
2275 2275 l = method(False)
2276 2276 except error.LockHeld:
2277 2277 l = None
2278 2278
2279 2279 if l:
2280 2280 l.release()
2281 2281 else:
2282 2282 try:
2283 2283 st = vfs.lstat(name)
2284 2284 age = now - st[stat.ST_MTIME]
2285 2285 user = util.username(st.st_uid)
2286 2286 locker = vfs.readlock(name)
2287 2287 if b":" in locker:
2288 2288 host, pid = locker.split(b':')
2289 2289 if host == socket.gethostname():
2290 2290 locker = b'user %s, process %s' % (user or b'None', pid)
2291 2291 else:
2292 2292 locker = b'user %s, process %s, host %s' % (
2293 2293 user or b'None',
2294 2294 pid,
2295 2295 host,
2296 2296 )
2297 2297 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2298 2298 return 1
2299 2299 except FileNotFoundError:
2300 2300 pass
2301 2301
2302 2302 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2303 2303 return 0
2304 2304
2305 2305 held += report(repo.svfs, b"lock", repo.lock)
2306 2306 held += report(repo.vfs, b"wlock", repo.wlock)
2307 2307
2308 2308 return held
2309 2309
2310 2310
2311 2311 @command(
2312 2312 b'debugmanifestfulltextcache',
2313 2313 [
2314 2314 (b'', b'clear', False, _(b'clear the cache')),
2315 2315 (
2316 2316 b'a',
2317 2317 b'add',
2318 2318 [],
2319 2319 _(b'add the given manifest nodes to the cache'),
2320 2320 _(b'NODE'),
2321 2321 ),
2322 2322 ],
2323 2323 b'',
2324 2324 )
2325 2325 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2326 2326 """show, clear or amend the contents of the manifest fulltext cache"""
2327 2327
2328 2328 def getcache():
2329 2329 r = repo.manifestlog.getstorage(b'')
2330 2330 try:
2331 2331 return r._fulltextcache
2332 2332 except AttributeError:
2333 2333 msg = _(
2334 2334 b"Current revlog implementation doesn't appear to have a "
2335 2335 b"manifest fulltext cache\n"
2336 2336 )
2337 2337 raise error.Abort(msg)
2338 2338
2339 2339 if opts.get('clear'):
2340 2340 with repo.wlock():
2341 2341 cache = getcache()
2342 2342 cache.clear(clear_persisted_data=True)
2343 2343 return
2344 2344
2345 2345 if add:
2346 2346 with repo.wlock():
2347 2347 m = repo.manifestlog
2348 2348 store = m.getstorage(b'')
2349 2349 for n in add:
2350 2350 try:
2351 2351 manifest = m[store.lookup(n)]
2352 2352 except error.LookupError as e:
2353 2353 raise error.Abort(
2354 2354 bytes(e), hint=b"Check your manifest node id"
2355 2355 )
2356 2356 manifest.read() # stores revisision in cache too
2357 2357 return
2358 2358
2359 2359 cache = getcache()
2360 2360 if not len(cache):
2361 2361 ui.write(_(b'cache empty\n'))
2362 2362 else:
2363 2363 ui.write(
2364 2364 _(
2365 2365 b'cache contains %d manifest entries, in order of most to '
2366 2366 b'least recent:\n'
2367 2367 )
2368 2368 % (len(cache),)
2369 2369 )
2370 2370 totalsize = 0
2371 2371 for nodeid in cache:
2372 2372 # Use cache.get to not update the LRU order
2373 2373 data = cache.peek(nodeid)
2374 2374 size = len(data)
2375 2375 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2376 2376 ui.write(
2377 2377 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2378 2378 )
2379 2379 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2380 2380 ui.write(
2381 2381 _(b'total cache data size %s, on-disk %s\n')
2382 2382 % (util.bytecount(totalsize), util.bytecount(ondisk))
2383 2383 )
2384 2384
2385 2385
2386 2386 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2387 2387 def debugmergestate(ui, repo, *args, **opts):
2388 2388 """print merge state
2389 2389
2390 2390 Use --verbose to print out information about whether v1 or v2 merge state
2391 2391 was chosen."""
2392 2392
2393 2393 if ui.verbose:
2394 2394 ms = mergestatemod.mergestate(repo)
2395 2395
2396 2396 # sort so that reasonable information is on top
2397 2397 v1records = ms._readrecordsv1()
2398 2398 v2records = ms._readrecordsv2()
2399 2399
2400 2400 if not v1records and not v2records:
2401 2401 pass
2402 2402 elif not v2records:
2403 2403 ui.writenoi18n(b'no version 2 merge state\n')
2404 2404 elif ms._v1v2match(v1records, v2records):
2405 2405 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2406 2406 else:
2407 2407 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2408 2408
2409 2409 if not opts['template']:
2410 2410 opts['template'] = (
2411 2411 b'{if(commits, "", "no merge state found\n")}'
2412 2412 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2413 2413 b'{files % "file: {path} (state \\"{state}\\")\n'
2414 2414 b'{if(local_path, "'
2415 2415 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2416 2416 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2417 2417 b' other path: {other_path} (node {other_node})\n'
2418 2418 b'")}'
2419 2419 b'{if(rename_side, "'
2420 2420 b' rename side: {rename_side}\n'
2421 2421 b' renamed path: {renamed_path}\n'
2422 2422 b'")}'
2423 2423 b'{extras % " extra: {key} = {value}\n"}'
2424 2424 b'"}'
2425 2425 b'{extras % "extra: {file} ({key} = {value})\n"}'
2426 2426 )
2427 2427
2428 2428 ms = mergestatemod.mergestate.read(repo)
2429 2429
2430 2430 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2431 2431 fm.startitem()
2432 2432
2433 2433 fm_commits = fm.nested(b'commits')
2434 2434 if ms.active():
2435 2435 for name, node, label_index in (
2436 2436 (b'local', ms.local, 0),
2437 2437 (b'other', ms.other, 1),
2438 2438 ):
2439 2439 fm_commits.startitem()
2440 2440 fm_commits.data(name=name)
2441 2441 fm_commits.data(node=hex(node))
2442 2442 if ms._labels and len(ms._labels) > label_index:
2443 2443 fm_commits.data(label=ms._labels[label_index])
2444 2444 fm_commits.end()
2445 2445
2446 2446 fm_files = fm.nested(b'files')
2447 2447 if ms.active():
2448 2448 for f in ms:
2449 2449 fm_files.startitem()
2450 2450 fm_files.data(path=f)
2451 2451 state = ms._state[f]
2452 2452 fm_files.data(state=state[0])
2453 2453 if state[0] in (
2454 2454 mergestatemod.MERGE_RECORD_UNRESOLVED,
2455 2455 mergestatemod.MERGE_RECORD_RESOLVED,
2456 2456 ):
2457 2457 fm_files.data(local_key=state[1])
2458 2458 fm_files.data(local_path=state[2])
2459 2459 fm_files.data(ancestor_path=state[3])
2460 2460 fm_files.data(ancestor_node=state[4])
2461 2461 fm_files.data(other_path=state[5])
2462 2462 fm_files.data(other_node=state[6])
2463 2463 fm_files.data(local_flags=state[7])
2464 2464 elif state[0] in (
2465 2465 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2466 2466 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2467 2467 ):
2468 2468 fm_files.data(renamed_path=state[1])
2469 2469 fm_files.data(rename_side=state[2])
2470 2470 fm_extras = fm_files.nested(b'extras')
2471 2471 for k, v in sorted(ms.extras(f).items()):
2472 2472 fm_extras.startitem()
2473 2473 fm_extras.data(key=k)
2474 2474 fm_extras.data(value=v)
2475 2475 fm_extras.end()
2476 2476
2477 2477 fm_files.end()
2478 2478
2479 2479 fm_extras = fm.nested(b'extras')
2480 2480 for f, d in sorted(ms.allextras().items()):
2481 2481 if f in ms:
2482 2482 # If file is in mergestate, we have already processed it's extras
2483 2483 continue
2484 2484 for k, v in d.items():
2485 2485 fm_extras.startitem()
2486 2486 fm_extras.data(file=f)
2487 2487 fm_extras.data(key=k)
2488 2488 fm_extras.data(value=v)
2489 2489 fm_extras.end()
2490 2490
2491 2491 fm.end()
2492 2492
2493 2493
2494 2494 @command(b'debugnamecomplete', [], _(b'NAME...'))
2495 2495 def debugnamecomplete(ui, repo, *args):
2496 2496 '''complete "names" - tags, open branch names, bookmark names'''
2497 2497
2498 2498 names = set()
2499 2499 # since we previously only listed open branches, we will handle that
2500 2500 # specially (after this for loop)
2501 2501 for name, ns in repo.names.items():
2502 2502 if name != b'branches':
2503 2503 names.update(ns.listnames(repo))
2504 2504 names.update(
2505 2505 tag
2506 2506 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2507 2507 if not closed
2508 2508 )
2509 2509 completions = set()
2510 2510 if not args:
2511 2511 args = [b'']
2512 2512 for a in args:
2513 2513 completions.update(n for n in names if n.startswith(a))
2514 2514 ui.write(b'\n'.join(sorted(completions)))
2515 2515 ui.write(b'\n')
2516 2516
2517 2517
2518 2518 @command(
2519 2519 b'debugnodemap',
2520 2520 (
2521 2521 cmdutil.debugrevlogopts
2522 2522 + [
2523 2523 (
2524 2524 b'',
2525 2525 b'dump-new',
2526 2526 False,
2527 2527 _(b'write a (new) persistent binary nodemap on stdout'),
2528 2528 ),
2529 2529 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2530 2530 (
2531 2531 b'',
2532 2532 b'check',
2533 2533 False,
2534 2534 _(b'check that the data on disk data are correct.'),
2535 2535 ),
2536 2536 (
2537 2537 b'',
2538 2538 b'metadata',
2539 2539 False,
2540 2540 _(b'display the on disk meta data for the nodemap'),
2541 2541 ),
2542 2542 ]
2543 2543 ),
2544 2544 _(b'-c|-m|FILE'),
2545 2545 )
2546 2546 def debugnodemap(ui, repo, file_=None, **opts):
2547 2547 """write and inspect on disk nodemap"""
2548 2548 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2549 2549 if file_ is not None:
2550 2550 raise error.InputError(
2551 2551 _(b'cannot specify a file with other arguments')
2552 2552 )
2553 2553 elif file_ is None:
2554 2554 opts['changelog'] = True
2555 2555 r = cmdutil.openstorage(
2556 2556 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2557 2557 )
2558 2558 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2559 2559 r = r._revlog
2560 2560 if opts['dump_new']:
2561 2561 if hasattr(r.index, "nodemap_data_all"):
2562 2562 data = r.index.nodemap_data_all()
2563 2563 else:
2564 2564 data = nodemap.persistent_data(r.index)
2565 2565 ui.write(data)
2566 2566 elif opts['dump_disk']:
2567 2567 nm_data = nodemap.persisted_data(r)
2568 2568 if nm_data is not None:
2569 2569 docket, data = nm_data
2570 2570 ui.write(data[:])
2571 2571 elif opts['check']:
2572 2572 nm_data = nodemap.persisted_data(r)
2573 2573 if nm_data is not None:
2574 2574 docket, data = nm_data
2575 2575 return nodemap.check_data(ui, r.index, data)
2576 2576 elif opts['metadata']:
2577 2577 nm_data = nodemap.persisted_data(r)
2578 2578 if nm_data is not None:
2579 2579 docket, data = nm_data
2580 2580 ui.write((b"uid: %s\n") % docket.uid)
2581 2581 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2582 2582 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2583 2583 ui.write((b"data-length: %d\n") % docket.data_length)
2584 2584 ui.write((b"data-unused: %d\n") % docket.data_unused)
2585 2585 unused_perc = docket.data_unused * 100.0 / docket.data_length
2586 2586 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2587 2587
2588 2588
2589 2589 @command(
2590 2590 b'debugobsolete',
2591 2591 [
2592 2592 (b'', b'flags', 0, _(b'markers flag')),
2593 2593 (
2594 2594 b'',
2595 2595 b'record-parents',
2596 2596 False,
2597 2597 _(b'record parent information for the precursor'),
2598 2598 ),
2599 2599 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2600 2600 (
2601 2601 b'',
2602 2602 b'exclusive',
2603 2603 False,
2604 2604 _(b'restrict display to markers only relevant to REV'),
2605 2605 ),
2606 2606 (b'', b'index', False, _(b'display index of the marker')),
2607 2607 (b'', b'delete', [], _(b'delete markers specified by indices')),
2608 2608 ]
2609 2609 + cmdutil.commitopts2
2610 2610 + cmdutil.formatteropts,
2611 2611 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2612 2612 )
2613 2613 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2614 2614 """create arbitrary obsolete marker
2615 2615
2616 2616 With no arguments, displays the list of obsolescence markers."""
2617 2617
2618 2618 def parsenodeid(s):
2619 2619 try:
2620 2620 # We do not use revsingle/revrange functions here to accept
2621 2621 # arbitrary node identifiers, possibly not present in the
2622 2622 # local repository.
2623 2623 n = bin(s)
2624 2624 if len(n) != repo.nodeconstants.nodelen:
2625 2625 raise ValueError
2626 2626 return n
2627 2627 except ValueError:
2628 2628 raise error.InputError(
2629 2629 b'changeset references must be full hexadecimal '
2630 2630 b'node identifiers'
2631 2631 )
2632 2632
2633 2633 if opts.get('delete'):
2634 2634 indices = []
2635 2635 for v in opts.get('delete'):
2636 2636 try:
2637 2637 indices.append(int(v))
2638 2638 except ValueError:
2639 2639 raise error.InputError(
2640 2640 _(b'invalid index value: %r') % v,
2641 2641 hint=_(b'use integers for indices'),
2642 2642 )
2643 2643
2644 2644 if repo.currenttransaction():
2645 2645 raise error.Abort(
2646 2646 _(b'cannot delete obsmarkers in the middle of transaction.')
2647 2647 )
2648 2648
2649 2649 with repo.lock():
2650 2650 n = repair.deleteobsmarkers(repo.obsstore, indices)
2651 2651 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2652 2652
2653 2653 return
2654 2654
2655 2655 if precursor is not None:
2656 2656 if opts['rev']:
2657 2657 raise error.InputError(
2658 2658 b'cannot select revision when creating marker'
2659 2659 )
2660 2660 metadata = {}
2661 2661 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2662 2662 succs = tuple(parsenodeid(succ) for succ in successors)
2663 2663 l = repo.lock()
2664 2664 try:
2665 2665 tr = repo.transaction(b'debugobsolete')
2666 2666 try:
2667 2667 date = opts.get('date')
2668 2668 if date:
2669 2669 date = dateutil.parsedate(date)
2670 2670 else:
2671 2671 date = None
2672 2672 prec = parsenodeid(precursor)
2673 2673 parents = None
2674 2674 if opts['record_parents']:
2675 2675 if prec not in repo.unfiltered():
2676 2676 raise error.Abort(
2677 2677 b'cannot used --record-parents on '
2678 2678 b'unknown changesets'
2679 2679 )
2680 2680 parents = repo.unfiltered()[prec].parents()
2681 2681 parents = tuple(p.node() for p in parents)
2682 2682 repo.obsstore.create(
2683 2683 tr,
2684 2684 prec,
2685 2685 succs,
2686 2686 opts['flags'],
2687 2687 parents=parents,
2688 2688 date=date,
2689 2689 metadata=metadata,
2690 2690 ui=ui,
2691 2691 )
2692 2692 tr.close()
2693 2693 except ValueError as exc:
2694 2694 raise error.Abort(
2695 2695 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2696 2696 )
2697 2697 finally:
2698 2698 tr.release()
2699 2699 finally:
2700 2700 l.release()
2701 2701 else:
2702 2702 if opts['rev']:
2703 2703 revs = logcmdutil.revrange(repo, opts['rev'])
2704 2704 nodes = [repo[r].node() for r in revs]
2705 2705 markers = list(
2706 2706 obsutil.getmarkers(
2707 2707 repo, nodes=nodes, exclusive=opts['exclusive']
2708 2708 )
2709 2709 )
2710 2710 markers.sort(key=lambda x: x._data)
2711 2711 else:
2712 2712 markers = obsutil.getmarkers(repo)
2713 2713
2714 2714 markerstoiter = markers
2715 2715 isrelevant = lambda m: True
2716 2716 if opts.get('rev') and opts.get('index'):
2717 2717 markerstoiter = obsutil.getmarkers(repo)
2718 2718 markerset = set(markers)
2719 2719 isrelevant = lambda m: m in markerset
2720 2720
2721 2721 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2722 2722 for i, m in enumerate(markerstoiter):
2723 2723 if not isrelevant(m):
2724 2724 # marker can be irrelevant when we're iterating over a set
2725 2725 # of markers (markerstoiter) which is bigger than the set
2726 2726 # of markers we want to display (markers)
2727 2727 # this can happen if both --index and --rev options are
2728 2728 # provided and thus we need to iterate over all of the markers
2729 2729 # to get the correct indices, but only display the ones that
2730 2730 # are relevant to --rev value
2731 2731 continue
2732 2732 fm.startitem()
2733 2733 ind = i if opts.get('index') else None
2734 2734 cmdutil.showmarker(fm, m, index=ind)
2735 2735 fm.end()
2736 2736
2737 2737
2738 2738 @command(
2739 2739 b'debugp1copies',
2740 2740 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2741 2741 _(b'[-r REV]'),
2742 2742 )
2743 2743 def debugp1copies(ui, repo, **opts):
2744 2744 """dump copy information compared to p1"""
2745 2745
2746 2746 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2747 2747 for dst, src in ctx.p1copies().items():
2748 2748 ui.write(b'%s -> %s\n' % (src, dst))
2749 2749
2750 2750
2751 2751 @command(
2752 2752 b'debugp2copies',
2753 2753 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2754 2754 _(b'[-r REV]'),
2755 2755 )
2756 2756 def debugp2copies(ui, repo, **opts):
2757 2757 """dump copy information compared to p2"""
2758 2758
2759 2759 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2760 2760 for dst, src in ctx.p2copies().items():
2761 2761 ui.write(b'%s -> %s\n' % (src, dst))
2762 2762
2763 2763
2764 2764 @command(
2765 2765 b'debugpathcomplete',
2766 2766 [
2767 2767 (b'f', b'full', None, _(b'complete an entire path')),
2768 2768 (b'n', b'normal', None, _(b'show only normal files')),
2769 2769 (b'a', b'added', None, _(b'show only added files')),
2770 2770 (b'r', b'removed', None, _(b'show only removed files')),
2771 2771 ],
2772 2772 _(b'FILESPEC...'),
2773 2773 )
2774 2774 def debugpathcomplete(ui, repo, *specs, **opts):
2775 2775 """complete part or all of a tracked path
2776 2776
2777 2777 This command supports shells that offer path name completion. It
2778 2778 currently completes only files already known to the dirstate.
2779 2779
2780 2780 Completion extends only to the next path segment unless
2781 2781 --full is specified, in which case entire paths are used."""
2782 2782
2783 2783 def complete(path, acceptable):
2784 2784 dirstate = repo.dirstate
2785 2785 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2786 2786 rootdir = repo.root + pycompat.ossep
2787 2787 if spec != repo.root and not spec.startswith(rootdir):
2788 2788 return [], []
2789 2789 if os.path.isdir(spec):
2790 2790 spec += b'/'
2791 2791 spec = spec[len(rootdir) :]
2792 2792 fixpaths = pycompat.ossep != b'/'
2793 2793 if fixpaths:
2794 2794 spec = spec.replace(pycompat.ossep, b'/')
2795 2795 speclen = len(spec)
2796 2796 fullpaths = opts['full']
2797 2797 files, dirs = set(), set()
2798 2798 adddir, addfile = dirs.add, files.add
2799 2799 for f, st in dirstate.items():
2800 2800 if f.startswith(spec) and st.state in acceptable:
2801 2801 if fixpaths:
2802 2802 f = f.replace(b'/', pycompat.ossep)
2803 2803 if fullpaths:
2804 2804 addfile(f)
2805 2805 continue
2806 2806 s = f.find(pycompat.ossep, speclen)
2807 2807 if s >= 0:
2808 2808 adddir(f[:s])
2809 2809 else:
2810 2810 addfile(f)
2811 2811 return files, dirs
2812 2812
2813 2813 acceptable = b''
2814 2814 if opts['normal']:
2815 2815 acceptable += b'nm'
2816 2816 if opts['added']:
2817 2817 acceptable += b'a'
2818 2818 if opts['removed']:
2819 2819 acceptable += b'r'
2820 2820 cwd = repo.getcwd()
2821 2821 if not specs:
2822 2822 specs = [b'.']
2823 2823
2824 2824 files, dirs = set(), set()
2825 2825 for spec in specs:
2826 2826 f, d = complete(spec, acceptable or b'nmar')
2827 2827 files.update(f)
2828 2828 dirs.update(d)
2829 2829 files.update(dirs)
2830 2830 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2831 2831 ui.write(b'\n')
2832 2832
2833 2833
2834 2834 @command(
2835 2835 b'debugpathcopies',
2836 2836 cmdutil.walkopts,
2837 2837 b'hg debugpathcopies REV1 REV2 [FILE]',
2838 2838 inferrepo=True,
2839 2839 )
2840 2840 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2841 2841 """show copies between two revisions"""
2842 2842 ctx1 = scmutil.revsingle(repo, rev1)
2843 2843 ctx2 = scmutil.revsingle(repo, rev2)
2844 2844 m = scmutil.match(ctx1, pats, opts)
2845 2845 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2846 2846 ui.write(b'%s -> %s\n' % (src, dst))
2847 2847
2848 2848
2849 2849 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2850 2850 def debugpeer(ui, path):
2851 2851 """establish a connection to a peer repository"""
2852 2852 # Always enable peer request logging. Requires --debug to display
2853 2853 # though.
2854 2854 overrides = {
2855 2855 (b'devel', b'debug.peer-request'): True,
2856 2856 }
2857 2857
2858 2858 with ui.configoverride(overrides):
2859 2859 peer = hg.peer(ui, {}, path)
2860 2860
2861 2861 try:
2862 2862 local = peer.local() is not None
2863 2863 canpush = peer.canpush()
2864 2864
2865 2865 ui.write(_(b'url: %s\n') % peer.url())
2866 2866 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2867 2867 ui.write(
2868 2868 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2869 2869 )
2870 2870 finally:
2871 2871 peer.close()
2872 2872
2873 2873
2874 2874 @command(
2875 2875 b'debugpickmergetool',
2876 2876 [
2877 2877 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2878 2878 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2879 2879 ]
2880 2880 + cmdutil.walkopts
2881 2881 + cmdutil.mergetoolopts,
2882 2882 _(b'[PATTERN]...'),
2883 2883 inferrepo=True,
2884 2884 )
2885 2885 def debugpickmergetool(ui, repo, *pats, **opts):
2886 2886 """examine which merge tool is chosen for specified file
2887 2887
2888 2888 As described in :hg:`help merge-tools`, Mercurial examines
2889 2889 configurations below in this order to decide which merge tool is
2890 2890 chosen for specified file.
2891 2891
2892 2892 1. ``--tool`` option
2893 2893 2. ``HGMERGE`` environment variable
2894 2894 3. configurations in ``merge-patterns`` section
2895 2895 4. configuration of ``ui.merge``
2896 2896 5. configurations in ``merge-tools`` section
2897 2897 6. ``hgmerge`` tool (for historical reason only)
2898 2898 7. default tool for fallback (``:merge`` or ``:prompt``)
2899 2899
2900 2900 This command writes out examination result in the style below::
2901 2901
2902 2902 FILE = MERGETOOL
2903 2903
2904 2904 By default, all files known in the first parent context of the
2905 2905 working directory are examined. Use file patterns and/or -I/-X
2906 2906 options to limit target files. -r/--rev is also useful to examine
2907 2907 files in another context without actual updating to it.
2908 2908
2909 2909 With --debug, this command shows warning messages while matching
2910 2910 against ``merge-patterns`` and so on, too. It is recommended to
2911 2911 use this option with explicit file patterns and/or -I/-X options,
2912 2912 because this option increases amount of output per file according
2913 2913 to configurations in hgrc.
2914 2914
2915 2915 With -v/--verbose, this command shows configurations below at
2916 2916 first (only if specified).
2917 2917
2918 2918 - ``--tool`` option
2919 2919 - ``HGMERGE`` environment variable
2920 2920 - configuration of ``ui.merge``
2921 2921
2922 2922 If merge tool is chosen before matching against
2923 2923 ``merge-patterns``, this command can't show any helpful
2924 2924 information, even with --debug. In such case, information above is
2925 2925 useful to know why a merge tool is chosen.
2926 2926 """
2927 2927 overrides = {}
2928 2928 if opts['tool']:
2929 2929 overrides[(b'ui', b'forcemerge')] = opts['tool']
2930 2930 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
2931 2931
2932 2932 with ui.configoverride(overrides, b'debugmergepatterns'):
2933 2933 hgmerge = encoding.environ.get(b"HGMERGE")
2934 2934 if hgmerge is not None:
2935 2935 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2936 2936 uimerge = ui.config(b"ui", b"merge")
2937 2937 if uimerge:
2938 2938 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2939 2939
2940 2940 ctx = scmutil.revsingle(repo, opts.get('rev'))
2941 2941 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
2942 2942 changedelete = opts['changedelete']
2943 2943 for path in ctx.walk(m):
2944 2944 fctx = ctx[path]
2945 2945 with ui.silent(
2946 2946 error=True
2947 2947 ) if not ui.debugflag else util.nullcontextmanager():
2948 2948 tool, toolpath = filemerge._picktool(
2949 2949 repo,
2950 2950 ui,
2951 2951 path,
2952 2952 fctx.isbinary(),
2953 2953 b'l' in fctx.flags(),
2954 2954 changedelete,
2955 2955 )
2956 2956 ui.write(b'%s = %s\n' % (path, tool))
2957 2957
2958 2958
2959 2959 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2960 2960 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2961 2961 """access the pushkey key/value protocol
2962 2962
2963 2963 With two args, list the keys in the given namespace.
2964 2964
2965 2965 With five args, set a key to new if it currently is set to old.
2966 2966 Reports success or failure.
2967 2967 """
2968 2968
2969 2969 target = hg.peer(ui, {}, repopath)
2970 2970 try:
2971 2971 if keyinfo:
2972 2972 key, old, new = keyinfo
2973 2973 with target.commandexecutor() as e:
2974 2974 r = e.callcommand(
2975 2975 b'pushkey',
2976 2976 {
2977 2977 b'namespace': namespace,
2978 2978 b'key': key,
2979 2979 b'old': old,
2980 2980 b'new': new,
2981 2981 },
2982 2982 ).result()
2983 2983
2984 2984 ui.status(pycompat.bytestr(r) + b'\n')
2985 2985 return not r
2986 2986 else:
2987 2987 for k, v in sorted(target.listkeys(namespace).items()):
2988 2988 ui.write(
2989 2989 b"%s\t%s\n"
2990 2990 % (stringutil.escapestr(k), stringutil.escapestr(v))
2991 2991 )
2992 2992 finally:
2993 2993 target.close()
2994 2994
2995 2995
2996 2996 @command(b'debugpvec', [], _(b'A B'))
2997 2997 def debugpvec(ui, repo, a, b=None):
2998 2998 ca = scmutil.revsingle(repo, a)
2999 2999 cb = scmutil.revsingle(repo, b)
3000 3000 pa = pvec.ctxpvec(ca)
3001 3001 pb = pvec.ctxpvec(cb)
3002 3002 if pa == pb:
3003 3003 rel = b"="
3004 3004 elif pa > pb:
3005 3005 rel = b">"
3006 3006 elif pa < pb:
3007 3007 rel = b"<"
3008 3008 elif pa | pb:
3009 3009 rel = b"|"
3010 3010 ui.write(_(b"a: %s\n") % pa)
3011 3011 ui.write(_(b"b: %s\n") % pb)
3012 3012 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3013 3013 ui.write(
3014 3014 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3015 3015 % (
3016 3016 abs(pa._depth - pb._depth),
3017 3017 pvec._hamming(pa._vec, pb._vec),
3018 3018 pa.distance(pb),
3019 3019 rel,
3020 3020 )
3021 3021 )
3022 3022
3023 3023
3024 3024 @command(
3025 3025 b'debugrebuilddirstate|debugrebuildstate',
3026 3026 [
3027 3027 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3028 3028 (
3029 3029 b'',
3030 3030 b'minimal',
3031 3031 None,
3032 3032 _(
3033 3033 b'only rebuild files that are inconsistent with '
3034 3034 b'the working copy parent'
3035 3035 ),
3036 3036 ),
3037 3037 ],
3038 3038 _(b'[-r REV]'),
3039 3039 )
3040 3040 def debugrebuilddirstate(ui, repo, rev, **opts):
3041 3041 """rebuild the dirstate as it would look like for the given revision
3042 3042
3043 3043 If no revision is specified the first current parent will be used.
3044 3044
3045 3045 The dirstate will be set to the files of the given revision.
3046 3046 The actual working directory content or existing dirstate
3047 3047 information such as adds or removes is not considered.
3048 3048
3049 3049 ``minimal`` will only rebuild the dirstate status for files that claim to be
3050 3050 tracked but are not in the parent manifest, or that exist in the parent
3051 3051 manifest but are not in the dirstate. It will not change adds, removes, or
3052 3052 modified files that are in the working copy parent.
3053 3053
3054 3054 One use of this command is to make the next :hg:`status` invocation
3055 3055 check the actual file content.
3056 3056 """
3057 3057 ctx = scmutil.revsingle(repo, rev)
3058 3058 with repo.wlock():
3059 3059 if repo.currenttransaction() is not None:
3060 3060 msg = b'rebuild the dirstate outside of a transaction'
3061 3061 raise error.ProgrammingError(msg)
3062 3062 dirstate = repo.dirstate
3063 3063 changedfiles = None
3064 3064 # See command doc for what minimal does.
3065 3065 if opts.get('minimal'):
3066 3066 manifestfiles = set(ctx.manifest().keys())
3067 3067 dirstatefiles = set(dirstate)
3068 3068 manifestonly = manifestfiles - dirstatefiles
3069 3069 dsonly = dirstatefiles - manifestfiles
3070 3070 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3071 3071 changedfiles = manifestonly | dsnotadded
3072 3072
3073 3073 with dirstate.changing_parents(repo):
3074 3074 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3075 3075
3076 3076
3077 3077 @command(
3078 3078 b'debugrebuildfncache',
3079 3079 [
3080 3080 (
3081 3081 b'',
3082 3082 b'only-data',
3083 3083 False,
3084 3084 _(b'only look for wrong .d files (much faster)'),
3085 3085 )
3086 3086 ],
3087 3087 b'',
3088 3088 )
3089 3089 def debugrebuildfncache(ui, repo, **opts):
3090 3090 """rebuild the fncache file"""
3091 3091 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3092 3092
3093 3093
3094 3094 @command(
3095 3095 b'debugrename',
3096 3096 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3097 3097 _(b'[-r REV] [FILE]...'),
3098 3098 )
3099 3099 def debugrename(ui, repo, *pats, **opts):
3100 3100 """dump rename information"""
3101 3101
3102 3102 ctx = scmutil.revsingle(repo, opts.get('rev'))
3103 3103 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3104 3104 for abs in ctx.walk(m):
3105 3105 fctx = ctx[abs]
3106 3106 o = fctx.filelog().renamed(fctx.filenode())
3107 3107 rel = repo.pathto(abs)
3108 3108 if o:
3109 3109 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3110 3110 else:
3111 3111 ui.write(_(b"%s not renamed\n") % rel)
3112 3112
3113 3113
3114 3114 @command(b'debugrequires|debugrequirements', [], b'')
3115 3115 def debugrequirements(ui, repo):
3116 3116 """print the current repo requirements"""
3117 3117 for r in sorted(repo.requirements):
3118 3118 ui.write(b"%s\n" % r)
3119 3119
3120 3120
3121 3121 @command(
3122 3122 b'debugrevlog',
3123 3123 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3124 3124 _(b'-c|-m|FILE'),
3125 3125 optionalrepo=True,
3126 3126 )
3127 3127 def debugrevlog(ui, repo, file_=None, **opts):
3128 3128 """show data and statistics about a revlog"""
3129 3129 r = cmdutil.openrevlog(
3130 3130 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3131 3131 )
3132 3132
3133 3133 if opts.get("dump"):
3134 3134 revlog_debug.dump(ui, r)
3135 3135 else:
3136 3136 revlog_debug.debug_revlog(ui, r)
3137 3137 return 0
3138 3138
3139 3139
3140 3140 @command(
3141 3141 b'debugrevlogindex',
3142 3142 cmdutil.debugrevlogopts
3143 3143 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3144 3144 _(b'[-f FORMAT] -c|-m|FILE'),
3145 3145 optionalrepo=True,
3146 3146 )
3147 3147 def debugrevlogindex(ui, repo, file_=None, **opts):
3148 3148 """dump the contents of a revlog index"""
3149 3149 r = cmdutil.openrevlog(
3150 3150 repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts)
3151 3151 )
3152 3152 format = opts.get('format', 0)
3153 3153 if format not in (0, 1):
3154 3154 raise error.Abort(_(b"unknown format %d") % format)
3155 3155
3156 3156 if ui.debugflag:
3157 3157 shortfn = hex
3158 3158 else:
3159 3159 shortfn = short
3160 3160
3161 3161 # There might not be anything in r, so have a sane default
3162 3162 idlen = 12
3163 3163 for i in r:
3164 3164 idlen = len(shortfn(r.node(i)))
3165 3165 break
3166 3166
3167 3167 if format == 0:
3168 3168 if ui.verbose:
3169 3169 ui.writenoi18n(
3170 3170 b" rev offset length linkrev %s %s p2\n"
3171 3171 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3172 3172 )
3173 3173 else:
3174 3174 ui.writenoi18n(
3175 3175 b" rev linkrev %s %s p2\n"
3176 3176 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3177 3177 )
3178 3178 elif format == 1:
3179 3179 if ui.verbose:
3180 3180 ui.writenoi18n(
3181 3181 (
3182 3182 b" rev flag offset length size link p1"
3183 3183 b" p2 %s\n"
3184 3184 )
3185 3185 % b"nodeid".rjust(idlen)
3186 3186 )
3187 3187 else:
3188 3188 ui.writenoi18n(
3189 3189 b" rev flag size link p1 p2 %s\n"
3190 3190 % b"nodeid".rjust(idlen)
3191 3191 )
3192 3192
3193 3193 for i in r:
3194 3194 node = r.node(i)
3195 3195 if format == 0:
3196 3196 try:
3197 3197 pp = r.parents(node)
3198 3198 except Exception:
3199 3199 pp = [repo.nullid, repo.nullid]
3200 3200 if ui.verbose:
3201 3201 ui.write(
3202 3202 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3203 3203 % (
3204 3204 i,
3205 3205 r.start(i),
3206 3206 r.length(i),
3207 3207 r.linkrev(i),
3208 3208 shortfn(node),
3209 3209 shortfn(pp[0]),
3210 3210 shortfn(pp[1]),
3211 3211 )
3212 3212 )
3213 3213 else:
3214 3214 ui.write(
3215 3215 b"% 6d % 7d %s %s %s\n"
3216 3216 % (
3217 3217 i,
3218 3218 r.linkrev(i),
3219 3219 shortfn(node),
3220 3220 shortfn(pp[0]),
3221 3221 shortfn(pp[1]),
3222 3222 )
3223 3223 )
3224 3224 elif format == 1:
3225 3225 pr = r.parentrevs(i)
3226 3226 if ui.verbose:
3227 3227 ui.write(
3228 3228 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3229 3229 % (
3230 3230 i,
3231 3231 r.flags(i),
3232 3232 r.start(i),
3233 3233 r.length(i),
3234 3234 r.rawsize(i),
3235 3235 r.linkrev(i),
3236 3236 pr[0],
3237 3237 pr[1],
3238 3238 shortfn(node),
3239 3239 )
3240 3240 )
3241 3241 else:
3242 3242 ui.write(
3243 3243 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3244 3244 % (
3245 3245 i,
3246 3246 r.flags(i),
3247 3247 r.rawsize(i),
3248 3248 r.linkrev(i),
3249 3249 pr[0],
3250 3250 pr[1],
3251 3251 shortfn(node),
3252 3252 )
3253 3253 )
3254 3254
3255 3255
3256 3256 @command(
3257 3257 b'debugrevspec',
3258 3258 [
3259 3259 (
3260 3260 b'',
3261 3261 b'optimize',
3262 3262 None,
3263 3263 _(b'print parsed tree after optimizing (DEPRECATED)'),
3264 3264 ),
3265 3265 (
3266 3266 b'',
3267 3267 b'show-revs',
3268 3268 True,
3269 3269 _(b'print list of result revisions (default)'),
3270 3270 ),
3271 3271 (
3272 3272 b's',
3273 3273 b'show-set',
3274 3274 None,
3275 3275 _(b'print internal representation of result set'),
3276 3276 ),
3277 3277 (
3278 3278 b'p',
3279 3279 b'show-stage',
3280 3280 [],
3281 3281 _(b'print parsed tree at the given stage'),
3282 3282 _(b'NAME'),
3283 3283 ),
3284 3284 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3285 3285 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3286 3286 ],
3287 3287 b'REVSPEC',
3288 3288 )
3289 3289 def debugrevspec(ui, repo, expr, **opts):
3290 3290 """parse and apply a revision specification
3291 3291
3292 3292 Use -p/--show-stage option to print the parsed tree at the given stages.
3293 3293 Use -p all to print tree at every stage.
3294 3294
3295 3295 Use --no-show-revs option with -s or -p to print only the set
3296 3296 representation or the parsed tree respectively.
3297 3297
3298 3298 Use --verify-optimized to compare the optimized result with the unoptimized
3299 3299 one. Returns 1 if the optimized result differs.
3300 3300 """
3301 3301 aliases = ui.configitems(b'revsetalias')
3302 3302 stages = [
3303 3303 (b'parsed', lambda tree: tree),
3304 3304 (
3305 3305 b'expanded',
3306 3306 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3307 3307 ),
3308 3308 (b'concatenated', revsetlang.foldconcat),
3309 3309 (b'analyzed', revsetlang.analyze),
3310 3310 (b'optimized', revsetlang.optimize),
3311 3311 ]
3312 3312 if opts['no_optimized']:
3313 3313 stages = stages[:-1]
3314 3314 if opts['verify_optimized'] and opts['no_optimized']:
3315 3315 raise error.Abort(
3316 3316 _(b'cannot use --verify-optimized with --no-optimized')
3317 3317 )
3318 3318 stagenames = {n for n, f in stages}
3319 3319
3320 3320 showalways = set()
3321 3321 showchanged = set()
3322 3322 if ui.verbose and not opts['show_stage']:
3323 3323 # show parsed tree by --verbose (deprecated)
3324 3324 showalways.add(b'parsed')
3325 3325 showchanged.update([b'expanded', b'concatenated'])
3326 3326 if opts['optimize']:
3327 3327 showalways.add(b'optimized')
3328 3328 if opts['show_stage'] and opts['optimize']:
3329 3329 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3330 3330 if opts['show_stage'] == [b'all']:
3331 3331 showalways.update(stagenames)
3332 3332 else:
3333 3333 for n in opts['show_stage']:
3334 3334 if n not in stagenames:
3335 3335 raise error.Abort(_(b'invalid stage name: %s') % n)
3336 3336 showalways.update(opts['show_stage'])
3337 3337
3338 3338 treebystage = {}
3339 3339 printedtree = None
3340 3340 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3341 3341 for n, f in stages:
3342 3342 treebystage[n] = tree = f(tree)
3343 3343 if n in showalways or (n in showchanged and tree != printedtree):
3344 3344 if opts['show_stage'] or n != b'parsed':
3345 3345 ui.write(b"* %s:\n" % n)
3346 3346 ui.write(revsetlang.prettyformat(tree), b"\n")
3347 3347 printedtree = tree
3348 3348
3349 3349 if opts['verify_optimized']:
3350 3350 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3351 3351 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3352 3352 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3353 3353 ui.writenoi18n(
3354 3354 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3355 3355 )
3356 3356 ui.writenoi18n(
3357 3357 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3358 3358 )
3359 3359 arevs = list(arevs)
3360 3360 brevs = list(brevs)
3361 3361 if arevs == brevs:
3362 3362 return 0
3363 3363 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3364 3364 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3365 3365 sm = difflib.SequenceMatcher(None, arevs, brevs)
3366 3366 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3367 3367 if tag in ('delete', 'replace'):
3368 3368 for c in arevs[alo:ahi]:
3369 3369 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3370 3370 if tag in ('insert', 'replace'):
3371 3371 for c in brevs[blo:bhi]:
3372 3372 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3373 3373 if tag == 'equal':
3374 3374 for c in arevs[alo:ahi]:
3375 3375 ui.write(b' %d\n' % c)
3376 3376 return 1
3377 3377
3378 3378 func = revset.makematcher(tree)
3379 3379 revs = func(repo)
3380 3380 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3381 3381 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3382 3382 if not opts['show_revs']:
3383 3383 return
3384 3384 for c in revs:
3385 3385 ui.write(b"%d\n" % c)
3386 3386
3387 3387
3388 3388 @command(
3389 3389 b'debugserve',
3390 3390 [
3391 3391 (
3392 3392 b'',
3393 3393 b'sshstdio',
3394 3394 False,
3395 3395 _(b'run an SSH server bound to process handles'),
3396 3396 ),
3397 3397 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3398 3398 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3399 3399 ],
3400 3400 b'',
3401 3401 )
3402 3402 def debugserve(ui, repo, **opts):
3403 3403 """run a server with advanced settings
3404 3404
3405 3405 This command is similar to :hg:`serve`. It exists partially as a
3406 3406 workaround to the fact that ``hg serve --stdio`` must have specific
3407 3407 arguments for security reasons.
3408 3408 """
3409 3409 if not opts['sshstdio']:
3410 3410 raise error.Abort(_(b'only --sshstdio is currently supported'))
3411 3411
3412 3412 logfh = None
3413 3413
3414 3414 if opts['logiofd'] and opts['logiofile']:
3415 3415 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3416 3416
3417 3417 if opts['logiofd']:
3418 3418 # Ideally we would be line buffered. But line buffering in binary
3419 3419 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3420 3420 # buffering could have performance impacts. But since this isn't
3421 3421 # performance critical code, it should be fine.
3422 3422 try:
3423 3423 logfh = os.fdopen(int(opts['logiofd']), 'ab', 0)
3424 3424 except OSError as e:
3425 3425 if e.errno != errno.ESPIPE:
3426 3426 raise
3427 3427 # can't seek a pipe, so `ab` mode fails on py3
3428 3428 logfh = os.fdopen(int(opts['logiofd']), 'wb', 0)
3429 3429 elif opts['logiofile']:
3430 3430 logfh = open(opts['logiofile'], b'ab', 0)
3431 3431
3432 3432 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3433 3433 s.serve_forever()
3434 3434
3435 3435
3436 3436 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3437 3437 def debugsetparents(ui, repo, rev1, rev2=None):
3438 3438 """manually set the parents of the current working directory (DANGEROUS)
3439 3439
3440 3440 This command is not what you are looking for and should not be used. Using
3441 3441 this command will most certainly results in slight corruption of the file
3442 3442 level histories within your repository. DO NOT USE THIS COMMAND.
3443 3443
3444 3444 The command updates the p1 and p2 fields in the dirstate, without touching
3445 3445 anything else. This useful for writing repository conversion tools, but
3446 3446 should be used with extreme care. For example, neither the working
3447 3447 directory nor the dirstate is updated, so file statuses may be incorrect
3448 3448 after running this command. Use it only if you are one of the few people who
3449 3449 deeply understands both conversion tools and file level histories. If you are
3450 3450 reading this help, you are not one of those people (most of them sailed west
3451 3451 from Mithlond anyway).
3452 3452
3453 3453 So, one more time, DO NOT USE THIS COMMAND.
3454 3454
3455 3455 Returns 0 on success.
3456 3456 """
3457 3457
3458 3458 node1 = scmutil.revsingle(repo, rev1).node()
3459 3459 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3460 3460
3461 3461 with repo.wlock():
3462 3462 repo.setparents(node1, node2)
3463 3463
3464 3464
3465 3465 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3466 3466 def debugsidedata(ui, repo, file_, rev=None, **opts):
3467 3467 """dump the side data for a cl/manifest/file revision
3468 3468
3469 3469 Use --verbose to dump the sidedata content."""
3470 3470 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
3471 3471 if rev is not None:
3472 3472 raise error.InputError(
3473 3473 _(b'cannot specify a revision with other arguments')
3474 3474 )
3475 3475 file_, rev = None, file_
3476 3476 elif rev is None:
3477 3477 raise error.InputError(_(b'please specify a revision'))
3478 3478 r = cmdutil.openstorage(
3479 3479 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
3480 3480 )
3481 3481 r = getattr(r, '_revlog', r)
3482 3482 try:
3483 3483 sidedata = r.sidedata(r.lookup(rev))
3484 3484 except KeyError:
3485 3485 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3486 3486 if sidedata:
3487 3487 sidedata = list(sidedata.items())
3488 3488 sidedata.sort()
3489 3489 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3490 3490 for key, value in sidedata:
3491 3491 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3492 3492 if ui.verbose:
3493 3493 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3494 3494
3495 3495
3496 3496 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3497 3497 def debugssl(ui, repo, source=None, **opts):
3498 3498 """test a secure connection to a server
3499 3499
3500 3500 This builds the certificate chain for the server on Windows, installing the
3501 3501 missing intermediates and trusted root via Windows Update if necessary. It
3502 3502 does nothing on other platforms.
3503 3503
3504 3504 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3505 3505 that server is used. See :hg:`help urls` for more information.
3506 3506
3507 3507 If the update succeeds, retry the original operation. Otherwise, the cause
3508 3508 of the SSL error is likely another issue.
3509 3509 """
3510 3510 if not pycompat.iswindows:
3511 3511 raise error.Abort(
3512 3512 _(b'certificate chain building is only possible on Windows')
3513 3513 )
3514 3514
3515 3515 if not source:
3516 3516 if not repo:
3517 3517 raise error.Abort(
3518 3518 _(
3519 3519 b"there is no Mercurial repository here, and no "
3520 3520 b"server specified"
3521 3521 )
3522 3522 )
3523 3523 source = b"default"
3524 3524
3525 3525 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3526 3526 url = path.url
3527 3527
3528 3528 defaultport = {b'https': 443, b'ssh': 22}
3529 3529 if url.scheme in defaultport:
3530 3530 try:
3531 3531 addr = (url.host, int(url.port or defaultport[url.scheme]))
3532 3532 except ValueError:
3533 3533 raise error.Abort(_(b"malformed port number in URL"))
3534 3534 else:
3535 3535 raise error.Abort(_(b"only https and ssh connections are supported"))
3536 3536
3537 3537 from . import win32
3538 3538
3539 3539 s = ssl.wrap_socket(
3540 3540 socket.socket(),
3541 3541 ssl_version=ssl.PROTOCOL_TLS,
3542 3542 cert_reqs=ssl.CERT_NONE,
3543 3543 ca_certs=None,
3544 3544 )
3545 3545
3546 3546 try:
3547 3547 s.connect(addr)
3548 3548 cert = s.getpeercert(True)
3549 3549
3550 3550 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3551 3551
3552 3552 complete = win32.checkcertificatechain(cert, build=False)
3553 3553
3554 3554 if not complete:
3555 3555 ui.status(_(b'certificate chain is incomplete, updating... '))
3556 3556
3557 3557 if not win32.checkcertificatechain(cert):
3558 3558 ui.status(_(b'failed.\n'))
3559 3559 else:
3560 3560 ui.status(_(b'done.\n'))
3561 3561 else:
3562 3562 ui.status(_(b'full certificate chain is available\n'))
3563 3563 finally:
3564 3564 s.close()
3565 3565
3566 3566
3567 3567 @command(
3568 3568 b'debug::stable-tail-sort',
3569 3569 [
3570 3570 (
3571 3571 b'T',
3572 3572 b'template',
3573 3573 b'{rev}\n',
3574 3574 _(b'display with template'),
3575 3575 _(b'TEMPLATE'),
3576 3576 ),
3577 3577 ],
3578 3578 b'REV',
3579 3579 )
3580 3580 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3581 3581 """display the stable-tail sort of the ancestors of a given node"""
3582 3582 rev = logcmdutil.revsingle(repo, revspec).rev()
3583 3583 cl = repo.changelog
3584 3584
3585 3585 displayer = logcmdutil.maketemplater(ui, repo, template)
3586 3586 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3587 3587 for ancestor_rev in sorted_revs:
3588 3588 displayer.show(repo[ancestor_rev])
3589 3589
3590 3590
3591 3591 @command(
3592 3592 b'debug::stable-tail-sort-leaps',
3593 3593 [
3594 3594 (
3595 3595 b'T',
3596 3596 b'template',
3597 3597 b'{rev}',
3598 3598 _(b'display with template'),
3599 3599 _(b'TEMPLATE'),
3600 3600 ),
3601 3601 (b's', b'specific', False, _(b'restrict to specific leaps')),
3602 3602 ],
3603 3603 b'REV',
3604 3604 )
3605 3605 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3606 3606 """display the leaps in the stable-tail sort of a node, one per line"""
3607 3607 rev = logcmdutil.revsingle(repo, rspec).rev()
3608 3608
3609 3609 if specific:
3610 3610 get_leaps = stabletailsort._find_specific_leaps_naive
3611 3611 else:
3612 3612 get_leaps = stabletailsort._find_all_leaps_naive
3613 3613
3614 3614 displayer = logcmdutil.maketemplater(ui, repo, template)
3615 3615 for source, target in get_leaps(repo.changelog, rev):
3616 3616 displayer.show(repo[source])
3617 3617 displayer.show(repo[target])
3618 3618 ui.write(b'\n')
3619 3619
3620 3620
3621 3621 @command(
3622 3622 b"debugbackupbundle",
3623 3623 [
3624 3624 (
3625 3625 b"",
3626 3626 b"recover",
3627 3627 b"",
3628 3628 b"brings the specified changeset back into the repository",
3629 3629 )
3630 3630 ]
3631 3631 + cmdutil.logopts,
3632 3632 _(b"hg debugbackupbundle [--recover HASH]"),
3633 3633 )
3634 3634 def debugbackupbundle(ui, repo, *pats, **opts):
3635 3635 """lists the changesets available in backup bundles
3636 3636
3637 3637 Without any arguments, this command prints a list of the changesets in each
3638 3638 backup bundle.
3639 3639
3640 3640 --recover takes a changeset hash and unbundles the first bundle that
3641 3641 contains that hash, which puts that changeset back in your repository.
3642 3642
3643 3643 --verbose will print the entire commit message and the bundle path for that
3644 3644 backup.
3645 3645 """
3646 3646 backups = list(
3647 3647 filter(
3648 3648 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3649 3649 )
3650 3650 )
3651 3651 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3652 3652
3653 3653 opts["bundle"] = b""
3654 3654 opts["force"] = None
3655 3655 limit = logcmdutil.getlimit(pycompat.byteskwargs(opts))
3656 3656
3657 3657 def display(other, chlist, displayer):
3658 3658 if opts.get("newest_first"):
3659 3659 chlist.reverse()
3660 3660 count = 0
3661 3661 for n in chlist:
3662 3662 if limit is not None and count >= limit:
3663 3663 break
3664 3664 parents = [
3665 3665 True for p in other.changelog.parents(n) if p != repo.nullid
3666 3666 ]
3667 3667 if opts.get("no_merges") and len(parents) == 2:
3668 3668 continue
3669 3669 count += 1
3670 3670 displayer.show(other[n])
3671 3671
3672 3672 recovernode = opts.get("recover")
3673 3673 if recovernode:
3674 3674 if scmutil.isrevsymbol(repo, recovernode):
3675 3675 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3676 3676 return
3677 3677 elif backups:
3678 3678 msg = _(
3679 3679 b"Recover changesets using: hg debugbackupbundle --recover "
3680 3680 b"<changeset hash>\n\nAvailable backup changesets:"
3681 3681 )
3682 3682 ui.status(msg, label=b"status.removed")
3683 3683 else:
3684 3684 ui.status(_(b"no backup changesets found\n"))
3685 3685 return
3686 3686
3687 3687 for backup in backups:
3688 3688 # Much of this is copied from the hg incoming logic
3689 3689 source = os.path.relpath(backup, encoding.getcwd())
3690 3690 path = urlutil.get_unique_pull_path_obj(
3691 3691 b'debugbackupbundle',
3692 3692 ui,
3693 3693 source,
3694 3694 )
3695 3695 try:
3696 3696 other = hg.peer(repo, pycompat.byteskwargs(opts), path)
3697 3697 except error.LookupError as ex:
3698 3698 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3699 3699 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3700 3700 ui.warn(msg, hint=hint)
3701 3701 continue
3702 3702 branches = (path.branch, opts.get('branch', []))
3703 3703 revs, checkout = hg.addbranchrevs(
3704 3704 repo, other, branches, opts.get("rev")
3705 3705 )
3706 3706
3707 3707 if revs:
3708 3708 revs = [other.lookup(rev) for rev in revs]
3709 3709
3710 3710 with ui.silent():
3711 3711 try:
3712 3712 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3713 3713 ui, repo, other, revs, opts["bundle"], opts["force"]
3714 3714 )
3715 3715 except error.LookupError:
3716 3716 continue
3717 3717
3718 3718 try:
3719 3719 if not chlist:
3720 3720 continue
3721 3721 if recovernode:
3722 3722 with repo.lock(), repo.transaction(b"unbundle") as tr:
3723 3723 if scmutil.isrevsymbol(other, recovernode):
3724 3724 ui.status(_(b"Unbundling %s\n") % (recovernode))
3725 3725 f = hg.openpath(ui, path.loc)
3726 3726 gen = exchange.readbundle(ui, f, path.loc)
3727 3727 if isinstance(gen, bundle2.unbundle20):
3728 3728 bundle2.applybundle(
3729 3729 repo,
3730 3730 gen,
3731 3731 tr,
3732 3732 source=b"unbundle",
3733 3733 url=b"bundle:" + path.loc,
3734 3734 )
3735 3735 else:
3736 3736 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3737 3737 break
3738 3738 else:
3739 3739 backupdate = encoding.strtolocal(
3740 3740 time.strftime(
3741 3741 "%a %H:%M, %Y-%m-%d",
3742 3742 time.localtime(os.path.getmtime(path.loc)),
3743 3743 )
3744 3744 )
3745 3745 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3746 3746 if ui.verbose:
3747 3747 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3748 3748 else:
3749 3749 opts[
3750 3750 "template"
3751 3751 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3752 3752 displayer = logcmdutil.changesetdisplayer(
3753 3753 ui, other, pycompat.byteskwargs(opts), False
3754 3754 )
3755 3755 display(other, chlist, displayer)
3756 3756 displayer.close()
3757 3757 finally:
3758 3758 cleanupfn()
3759 3759
3760 3760
3761 3761 @command(
3762 3762 b'debugsub',
3763 3763 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3764 3764 _(b'[-r REV] [REV]'),
3765 3765 )
3766 3766 def debugsub(ui, repo, rev=None):
3767 3767 ctx = scmutil.revsingle(repo, rev, None)
3768 3768 for k, v in sorted(ctx.substate.items()):
3769 3769 ui.writenoi18n(b'path %s\n' % k)
3770 3770 ui.writenoi18n(b' source %s\n' % v[0])
3771 3771 ui.writenoi18n(b' revision %s\n' % v[1])
3772 3772
3773 3773
3774 3774 @command(
3775 3775 b'debugshell',
3776 3776 [
3777 3777 (
3778 3778 b'c',
3779 3779 b'command',
3780 3780 b'',
3781 3781 _(b'program passed in as a string'),
3782 3782 _(b'COMMAND'),
3783 3783 )
3784 3784 ],
3785 3785 _(b'[-c COMMAND]'),
3786 3786 optionalrepo=True,
3787 3787 )
3788 3788 def debugshell(ui, repo, **opts):
3789 3789 """run an interactive Python interpreter
3790 3790
3791 3791 The local namespace is provided with a reference to the ui and
3792 3792 the repo instance (if available).
3793 3793 """
3794 3794 import code
3795 3795
3796 3796 imported_objects = {
3797 3797 'ui': ui,
3798 3798 'repo': repo,
3799 3799 }
3800 3800
3801 3801 # py2exe disables initialization of the site module, which is responsible
3802 3802 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3803 3803 # the stuff that site normally does here, so that the interpreter can be
3804 3804 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3805 3805 # py.exe, or py2exe.
3806 3806 if getattr(sys, "frozen", None) == 'console_exe':
3807 3807 try:
3808 3808 import site
3809 3809
3810 3810 site.setcopyright()
3811 3811 site.sethelper()
3812 3812 site.setquit()
3813 3813 except ImportError:
3814 3814 site = None # Keep PyCharm happy
3815 3815
3816 3816 command = opts.get('command')
3817 3817 if command:
3818 3818 compiled = code.compile_command(encoding.strfromlocal(command))
3819 3819 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3820 3820 return
3821 3821
3822 3822 code.interact(local=imported_objects)
3823 3823
3824 3824
3825 3825 @command(
3826 3826 b'debug-revlog-stats',
3827 3827 [
3828 3828 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3829 3829 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3830 3830 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3831 3831 ]
3832 3832 + cmdutil.formatteropts,
3833 3833 )
3834 3834 def debug_revlog_stats(ui, repo, **opts):
3835 3835 """display statistics about revlogs in the store"""
3836 3836 changelog = opts["changelog"]
3837 3837 manifest = opts["manifest"]
3838 3838 filelogs = opts["filelogs"]
3839 3839
3840 3840 if changelog is None and manifest is None and filelogs is None:
3841 3841 changelog = True
3842 3842 manifest = True
3843 3843 filelogs = True
3844 3844
3845 3845 repo = repo.unfiltered()
3846 3846 fm = ui.formatter(b'debug-revlog-stats', pycompat.byteskwargs(opts))
3847 3847 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3848 3848 fm.end()
3849 3849
3850 3850
3851 3851 @command(
3852 3852 b'debugsuccessorssets',
3853 3853 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3854 3854 _(b'[REV]'),
3855 3855 )
3856 3856 def debugsuccessorssets(ui, repo, *revs, **opts):
3857 3857 """show set of successors for revision
3858 3858
3859 3859 A successors set of changeset A is a consistent group of revisions that
3860 3860 succeed A. It contains non-obsolete changesets only unless closests
3861 3861 successors set is set.
3862 3862
3863 3863 In most cases a changeset A has a single successors set containing a single
3864 3864 successor (changeset A replaced by A').
3865 3865
3866 3866 A changeset that is made obsolete with no successors are called "pruned".
3867 3867 Such changesets have no successors sets at all.
3868 3868
3869 3869 A changeset that has been "split" will have a successors set containing
3870 3870 more than one successor.
3871 3871
3872 3872 A changeset that has been rewritten in multiple different ways is called
3873 3873 "divergent". Such changesets have multiple successor sets (each of which
3874 3874 may also be split, i.e. have multiple successors).
3875 3875
3876 3876 Results are displayed as follows::
3877 3877
3878 3878 <rev1>
3879 3879 <successors-1A>
3880 3880 <rev2>
3881 3881 <successors-2A>
3882 3882 <successors-2B1> <successors-2B2> <successors-2B3>
3883 3883
3884 3884 Here rev2 has two possible (i.e. divergent) successors sets. The first
3885 3885 holds one element, whereas the second holds three (i.e. the changeset has
3886 3886 been split).
3887 3887 """
3888 3888 # passed to successorssets caching computation from one call to another
3889 3889 cache = {}
3890 3890 ctx2str = bytes
3891 3891 node2str = short
3892 3892 for rev in logcmdutil.revrange(repo, revs):
3893 3893 ctx = repo[rev]
3894 3894 ui.write(b'%s\n' % ctx2str(ctx))
3895 3895 for succsset in obsutil.successorssets(
3896 3896 repo, ctx.node(), closest=opts['closest'], cache=cache
3897 3897 ):
3898 3898 if succsset:
3899 3899 ui.write(b' ')
3900 3900 ui.write(node2str(succsset[0]))
3901 3901 for node in succsset[1:]:
3902 3902 ui.write(b' ')
3903 3903 ui.write(node2str(node))
3904 3904 ui.write(b'\n')
3905 3905
3906 3906
3907 3907 @command(b'debugtagscache', [])
3908 3908 def debugtagscache(ui, repo):
3909 3909 """display the contents of .hg/cache/hgtagsfnodes1"""
3910 3910 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3911 3911 flog = repo.file(b'.hgtags')
3912 3912 for r in repo:
3913 3913 node = repo[r].node()
3914 3914 tagsnode = cache.getfnode(node, computemissing=False)
3915 3915 if tagsnode:
3916 3916 tagsnodedisplay = hex(tagsnode)
3917 3917 if not flog.hasnode(tagsnode):
3918 3918 tagsnodedisplay += b' (unknown node)'
3919 3919 elif tagsnode is None:
3920 3920 tagsnodedisplay = b'missing'
3921 3921 else:
3922 3922 tagsnodedisplay = b'invalid'
3923 3923
3924 3924 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3925 3925
3926 3926
3927 3927 @command(
3928 3928 b'debugtemplate',
3929 3929 [
3930 3930 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3931 3931 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3932 3932 ],
3933 3933 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3934 3934 optionalrepo=True,
3935 3935 )
3936 3936 def debugtemplate(ui, repo, tmpl, **opts):
3937 3937 """parse and apply a template
3938 3938
3939 3939 If -r/--rev is given, the template is processed as a log template and
3940 3940 applied to the given changesets. Otherwise, it is processed as a generic
3941 3941 template.
3942 3942
3943 3943 Use --verbose to print the parsed tree.
3944 3944 """
3945 3945 revs = None
3946 3946 if opts['rev']:
3947 3947 if repo is None:
3948 3948 raise error.RepoError(
3949 3949 _(b'there is no Mercurial repository here (.hg not found)')
3950 3950 )
3951 3951 revs = logcmdutil.revrange(repo, opts['rev'])
3952 3952
3953 3953 props = {}
3954 3954 for d in opts['define']:
3955 3955 try:
3956 3956 k, v = (e.strip() for e in d.split(b'=', 1))
3957 3957 if not k or k == b'ui':
3958 3958 raise ValueError
3959 3959 props[k] = v
3960 3960 except ValueError:
3961 3961 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3962 3962
3963 3963 if ui.verbose:
3964 3964 aliases = ui.configitems(b'templatealias')
3965 3965 tree = templater.parse(tmpl)
3966 3966 ui.note(templater.prettyformat(tree), b'\n')
3967 3967 newtree = templater.expandaliases(tree, aliases)
3968 3968 if newtree != tree:
3969 3969 ui.notenoi18n(
3970 3970 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3971 3971 )
3972 3972
3973 3973 if revs is None:
3974 3974 tres = formatter.templateresources(ui, repo)
3975 3975 t = formatter.maketemplater(ui, tmpl, resources=tres)
3976 3976 if ui.verbose:
3977 3977 kwds, funcs = t.symbolsuseddefault()
3978 3978 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3979 3979 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3980 3980 ui.write(t.renderdefault(props))
3981 3981 else:
3982 3982 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3983 3983 if ui.verbose:
3984 3984 kwds, funcs = displayer.t.symbolsuseddefault()
3985 3985 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3986 3986 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3987 3987 for r in revs:
3988 3988 displayer.show(repo[r], **pycompat.strkwargs(props))
3989 3989 displayer.close()
3990 3990
3991 3991
3992 3992 @command(
3993 3993 b'debuguigetpass',
3994 3994 [
3995 3995 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3996 3996 ],
3997 3997 _(b'[-p TEXT]'),
3998 3998 norepo=True,
3999 3999 )
4000 4000 def debuguigetpass(ui, prompt=b''):
4001 4001 """show prompt to type password"""
4002 4002 r = ui.getpass(prompt)
4003 4003 if r is None:
4004 4004 r = b"<default response>"
4005 4005 ui.writenoi18n(b'response: %s\n' % r)
4006 4006
4007 4007
4008 4008 @command(
4009 4009 b'debuguiprompt',
4010 4010 [
4011 4011 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4012 4012 ],
4013 4013 _(b'[-p TEXT]'),
4014 4014 norepo=True,
4015 4015 )
4016 4016 def debuguiprompt(ui, prompt=b''):
4017 4017 """show plain prompt"""
4018 4018 r = ui.prompt(prompt)
4019 4019 ui.writenoi18n(b'response: %s\n' % r)
4020 4020
4021 4021
4022 4022 @command(b'debugupdatecaches', [])
4023 4023 def debugupdatecaches(ui, repo, *pats, **opts):
4024 4024 """warm all known caches in the repository"""
4025 4025 with repo.wlock(), repo.lock():
4026 4026 repo.updatecaches(caches=repository.CACHES_ALL)
4027 4027
4028 4028
4029 4029 @command(
4030 4030 b'debugupgraderepo',
4031 4031 [
4032 4032 (
4033 4033 b'o',
4034 4034 b'optimize',
4035 4035 [],
4036 4036 _(b'extra optimization to perform'),
4037 4037 _(b'NAME'),
4038 4038 ),
4039 4039 (b'', b'run', False, _(b'performs an upgrade')),
4040 4040 (b'', b'backup', True, _(b'keep the old repository content around')),
4041 4041 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4042 4042 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4043 4043 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4044 4044 ],
4045 4045 )
4046 4046 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4047 4047 """upgrade a repository to use different features
4048 4048
4049 4049 If no arguments are specified, the repository is evaluated for upgrade
4050 4050 and a list of problems and potential optimizations is printed.
4051 4051
4052 4052 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4053 4053 can be influenced via additional arguments. More details will be provided
4054 4054 by the command output when run without ``--run``.
4055 4055
4056 4056 During the upgrade, the repository will be locked and no writes will be
4057 4057 allowed.
4058 4058
4059 4059 At the end of the upgrade, the repository may not be readable while new
4060 4060 repository data is swapped in. This window will be as long as it takes to
4061 4061 rename some directories inside the ``.hg`` directory. On most machines, this
4062 4062 should complete almost instantaneously and the chances of a consumer being
4063 4063 unable to access the repository should be low.
4064 4064
4065 4065 By default, all revlogs will be upgraded. You can restrict this using flags
4066 4066 such as `--manifest`:
4067 4067
4068 4068 * `--manifest`: only optimize the manifest
4069 4069 * `--no-manifest`: optimize all revlog but the manifest
4070 4070 * `--changelog`: optimize the changelog only
4071 4071 * `--no-changelog --no-manifest`: optimize filelogs only
4072 4072 * `--filelogs`: optimize the filelogs only
4073 4073 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4074 4074 """
4075 4075 return upgrade.upgraderepo(
4076 4076 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4077 4077 )
4078 4078
4079 4079
4080 4080 @command(
4081 4081 b'debug::unbundle',
4082 4082 [],
4083 4083 _(b'FILE...'),
4084 4084 helpcategory=command.CATEGORY_IMPORT_EXPORT,
4085 4085 )
4086 4086 def debugunbundle(ui, repo, fname1, *fnames):
4087 4087 """same as `hg unbundle`, but pretent to come from a push
4088 4088
4089 4089 This is useful to debug behavior and performance change in this case.
4090 4090 """
4091 4091 fnames = (fname1,) + fnames
4092 4092 cmdutil.unbundle_files(ui, repo, fnames)
4093 4093
4094 4094
4095 4095 @command(
4096 4096 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4097 4097 )
4098 4098 def debugwalk(ui, repo, *pats, **opts):
4099 4099 """show how files match on given patterns"""
4100 4100 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
4101 4101 if ui.verbose:
4102 4102 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4103 4103 items = list(repo[None].walk(m))
4104 4104 if not items:
4105 4105 return
4106 4106 f = lambda fn: fn
4107 4107 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4108 4108 f = lambda fn: util.normpath(fn)
4109 4109 fmt = b'f %%-%ds %%-%ds %%s' % (
4110 4110 max([len(abs) for abs in items]),
4111 4111 max([len(repo.pathto(abs)) for abs in items]),
4112 4112 )
4113 4113 for abs in items:
4114 4114 line = fmt % (
4115 4115 abs,
4116 4116 f(repo.pathto(abs)),
4117 4117 m.exact(abs) and b'exact' or b'',
4118 4118 )
4119 4119 ui.write(b"%s\n" % line.rstrip())
4120 4120
4121 4121
4122 4122 @command(b'debugwhyunstable', [], _(b'REV'))
4123 4123 def debugwhyunstable(ui, repo, rev):
4124 4124 """explain instabilities of a changeset"""
4125 4125 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4126 4126 dnodes = b''
4127 4127 if entry.get(b'divergentnodes'):
4128 4128 dnodes = (
4129 4129 b' '.join(
4130 4130 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4131 4131 for ctx in entry[b'divergentnodes']
4132 4132 )
4133 4133 + b' '
4134 4134 )
4135 4135 ui.write(
4136 4136 b'%s: %s%s %s\n'
4137 4137 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4138 4138 )
4139 4139
4140 4140
4141 4141 @command(
4142 4142 b'debugwireargs',
4143 4143 [
4144 4144 (b'', b'three', b'', b'three'),
4145 4145 (b'', b'four', b'', b'four'),
4146 4146 (b'', b'five', b'', b'five'),
4147 4147 ]
4148 4148 + cmdutil.remoteopts,
4149 4149 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4150 4150 norepo=True,
4151 4151 )
4152 4152 def debugwireargs(ui, repopath, *vals, **opts):
4153 4153 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
4154 4154 try:
4155 4155 for opt in cmdutil.remoteopts:
4156 4156 del opts[pycompat.sysstr(opt[1])]
4157 4157 args = {}
4158 4158 for k, v in opts.items():
4159 4159 if v:
4160 4160 args[k] = v
4161 4161
4162 4162 # run twice to check that we don't mess up the stream for the next command
4163 4163 res1 = repo.debugwireargs(*vals, **args)
4164 4164 res2 = repo.debugwireargs(*vals, **args)
4165 4165 ui.write(b"%s\n" % res1)
4166 4166 if res1 != res2:
4167 4167 ui.warn(b"%s\n" % res2)
4168 4168 finally:
4169 4169 repo.close()
4170 4170
4171 4171
4172 4172 def _parsewirelangblocks(fh):
4173 4173 activeaction = None
4174 4174 blocklines = []
4175 4175 lastindent = 0
4176 4176
4177 4177 for line in fh:
4178 4178 line = line.rstrip()
4179 4179 if not line:
4180 4180 continue
4181 4181
4182 4182 if line.startswith(b'#'):
4183 4183 continue
4184 4184
4185 4185 if not line.startswith(b' '):
4186 4186 # New block. Flush previous one.
4187 4187 if activeaction:
4188 4188 yield activeaction, blocklines
4189 4189
4190 4190 activeaction = line
4191 4191 blocklines = []
4192 4192 lastindent = 0
4193 4193 continue
4194 4194
4195 4195 # Else we start with an indent.
4196 4196
4197 4197 if not activeaction:
4198 4198 raise error.Abort(_(b'indented line outside of block'))
4199 4199
4200 4200 indent = len(line) - len(line.lstrip())
4201 4201
4202 4202 # If this line is indented more than the last line, concatenate it.
4203 4203 if indent > lastindent and blocklines:
4204 4204 blocklines[-1] += line.lstrip()
4205 4205 else:
4206 4206 blocklines.append(line)
4207 4207 lastindent = indent
4208 4208
4209 4209 # Flush last block.
4210 4210 if activeaction:
4211 4211 yield activeaction, blocklines
4212 4212
4213 4213
4214 4214 @command(
4215 4215 b'debugwireproto',
4216 4216 [
4217 4217 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4218 4218 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4219 4219 (
4220 4220 b'',
4221 4221 b'noreadstderr',
4222 4222 False,
4223 4223 _(b'do not read from stderr of the remote'),
4224 4224 ),
4225 4225 (
4226 4226 b'',
4227 4227 b'nologhandshake',
4228 4228 False,
4229 4229 _(b'do not log I/O related to the peer handshake'),
4230 4230 ),
4231 4231 ]
4232 4232 + cmdutil.remoteopts,
4233 4233 _(b'[PATH]'),
4234 4234 optionalrepo=True,
4235 4235 )
4236 4236 def debugwireproto(ui, repo, path=None, **opts):
4237 4237 """send wire protocol commands to a server
4238 4238
4239 4239 This command can be used to issue wire protocol commands to remote
4240 4240 peers and to debug the raw data being exchanged.
4241 4241
4242 4242 ``--localssh`` will start an SSH server against the current repository
4243 4243 and connect to that. By default, the connection will perform a handshake
4244 4244 and establish an appropriate peer instance.
4245 4245
4246 4246 ``--peer`` can be used to bypass the handshake protocol and construct a
4247 4247 peer instance using the specified class type. Valid values are ``raw``,
4248 4248 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4249 4249 don't support higher-level command actions.
4250 4250
4251 4251 ``--noreadstderr`` can be used to disable automatic reading from stderr
4252 4252 of the peer (for SSH connections only). Disabling automatic reading of
4253 4253 stderr is useful for making output more deterministic.
4254 4254
4255 4255 Commands are issued via a mini language which is specified via stdin.
4256 4256 The language consists of individual actions to perform. An action is
4257 4257 defined by a block. A block is defined as a line with no leading
4258 4258 space followed by 0 or more lines with leading space. Blocks are
4259 4259 effectively a high-level command with additional metadata.
4260 4260
4261 4261 Lines beginning with ``#`` are ignored.
4262 4262
4263 4263 The following sections denote available actions.
4264 4264
4265 4265 raw
4266 4266 ---
4267 4267
4268 4268 Send raw data to the server.
4269 4269
4270 4270 The block payload contains the raw data to send as one atomic send
4271 4271 operation. The data may not actually be delivered in a single system
4272 4272 call: it depends on the abilities of the transport being used.
4273 4273
4274 4274 Each line in the block is de-indented and concatenated. Then, that
4275 4275 value is evaluated as a Python b'' literal. This allows the use of
4276 4276 backslash escaping, etc.
4277 4277
4278 4278 raw+
4279 4279 ----
4280 4280
4281 4281 Behaves like ``raw`` except flushes output afterwards.
4282 4282
4283 4283 command <X>
4284 4284 -----------
4285 4285
4286 4286 Send a request to run a named command, whose name follows the ``command``
4287 4287 string.
4288 4288
4289 4289 Arguments to the command are defined as lines in this block. The format of
4290 4290 each line is ``<key> <value>``. e.g.::
4291 4291
4292 4292 command listkeys
4293 4293 namespace bookmarks
4294 4294
4295 4295 If the value begins with ``eval:``, it will be interpreted as a Python
4296 4296 literal expression. Otherwise values are interpreted as Python b'' literals.
4297 4297 This allows sending complex types and encoding special byte sequences via
4298 4298 backslash escaping.
4299 4299
4300 4300 The following arguments have special meaning:
4301 4301
4302 4302 ``PUSHFILE``
4303 4303 When defined, the *push* mechanism of the peer will be used instead
4304 4304 of the static request-response mechanism and the content of the
4305 4305 file specified in the value of this argument will be sent as the
4306 4306 command payload.
4307 4307
4308 4308 This can be used to submit a local bundle file to the remote.
4309 4309
4310 4310 batchbegin
4311 4311 ----------
4312 4312
4313 4313 Instruct the peer to begin a batched send.
4314 4314
4315 4315 All ``command`` blocks are queued for execution until the next
4316 4316 ``batchsubmit`` block.
4317 4317
4318 4318 batchsubmit
4319 4319 -----------
4320 4320
4321 4321 Submit previously queued ``command`` blocks as a batch request.
4322 4322
4323 4323 This action MUST be paired with a ``batchbegin`` action.
4324 4324
4325 4325 httprequest <method> <path>
4326 4326 ---------------------------
4327 4327
4328 4328 (HTTP peer only)
4329 4329
4330 4330 Send an HTTP request to the peer.
4331 4331
4332 4332 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4333 4333
4334 4334 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4335 4335 headers to add to the request. e.g. ``Accept: foo``.
4336 4336
4337 4337 The following arguments are special:
4338 4338
4339 4339 ``BODYFILE``
4340 4340 The content of the file defined as the value to this argument will be
4341 4341 transferred verbatim as the HTTP request body.
4342 4342
4343 4343 ``frame <type> <flags> <payload>``
4344 4344 Send a unified protocol frame as part of the request body.
4345 4345
4346 4346 All frames will be collected and sent as the body to the HTTP
4347 4347 request.
4348 4348
4349 4349 close
4350 4350 -----
4351 4351
4352 4352 Close the connection to the server.
4353 4353
4354 4354 flush
4355 4355 -----
4356 4356
4357 4357 Flush data written to the server.
4358 4358
4359 4359 readavailable
4360 4360 -------------
4361 4361
4362 4362 Close the write end of the connection and read all available data from
4363 4363 the server.
4364 4364
4365 4365 If the connection to the server encompasses multiple pipes, we poll both
4366 4366 pipes and read available data.
4367 4367
4368 4368 readline
4369 4369 --------
4370 4370
4371 4371 Read a line of output from the server. If there are multiple output
4372 4372 pipes, reads only the main pipe.
4373 4373
4374 4374 ereadline
4375 4375 ---------
4376 4376
4377 4377 Like ``readline``, but read from the stderr pipe, if available.
4378 4378
4379 4379 read <X>
4380 4380 --------
4381 4381
4382 4382 ``read()`` N bytes from the server's main output pipe.
4383 4383
4384 4384 eread <X>
4385 4385 ---------
4386 4386
4387 4387 ``read()`` N bytes from the server's stderr pipe, if available.
4388 4388
4389 4389 Specifying Unified Frame-Based Protocol Frames
4390 4390 ----------------------------------------------
4391 4391
4392 4392 It is possible to emit a *Unified Frame-Based Protocol* by using special
4393 4393 syntax.
4394 4394
4395 4395 A frame is composed as a type, flags, and payload. These can be parsed
4396 4396 from a string of the form:
4397 4397
4398 4398 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4399 4399
4400 4400 ``request-id`` and ``stream-id`` are integers defining the request and
4401 4401 stream identifiers.
4402 4402
4403 4403 ``type`` can be an integer value for the frame type or the string name
4404 4404 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4405 4405 ``command-name``.
4406 4406
4407 4407 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4408 4408 components. Each component (and there can be just one) can be an integer
4409 4409 or a flag name for stream flags or frame flags, respectively. Values are
4410 4410 resolved to integers and then bitwise OR'd together.
4411 4411
4412 4412 ``payload`` represents the raw frame payload. If it begins with
4413 4413 ``cbor:``, the following string is evaluated as Python code and the
4414 4414 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4415 4415 as a Python byte string literal.
4416 4416 """
4417 4417 if opts['localssh'] and not repo:
4418 4418 raise error.Abort(_(b'--localssh requires a repository'))
4419 4419
4420 4420 if opts['peer'] and opts['peer'] not in (
4421 4421 b'raw',
4422 4422 b'ssh1',
4423 4423 ):
4424 4424 raise error.Abort(
4425 4425 _(b'invalid value for --peer'),
4426 4426 hint=_(b'valid values are "raw" and "ssh1"'),
4427 4427 )
4428 4428
4429 4429 if path and opts['localssh']:
4430 4430 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4431 4431
4432 4432 if ui.interactive():
4433 4433 ui.write(_(b'(waiting for commands on stdin)\n'))
4434 4434
4435 4435 blocks = list(_parsewirelangblocks(ui.fin))
4436 4436
4437 4437 proc = None
4438 4438 stdin = None
4439 4439 stdout = None
4440 4440 stderr = None
4441 4441 opener = None
4442 4442
4443 4443 if opts['localssh']:
4444 4444 # We start the SSH server in its own process so there is process
4445 4445 # separation. This prevents a whole class of potential bugs around
4446 4446 # shared state from interfering with server operation.
4447 4447 args = procutil.hgcmd() + [
4448 4448 b'-R',
4449 4449 repo.root,
4450 4450 b'debugserve',
4451 4451 b'--sshstdio',
4452 4452 ]
4453 4453 proc = subprocess.Popen(
4454 4454 pycompat.rapply(procutil.tonativestr, args),
4455 4455 stdin=subprocess.PIPE,
4456 4456 stdout=subprocess.PIPE,
4457 4457 stderr=subprocess.PIPE,
4458 4458 bufsize=0,
4459 4459 )
4460 4460
4461 4461 stdin = proc.stdin
4462 4462 stdout = proc.stdout
4463 4463 stderr = proc.stderr
4464 4464
4465 4465 # We turn the pipes into observers so we can log I/O.
4466 4466 if ui.verbose or opts['peer'] == b'raw':
4467 4467 stdin = util.makeloggingfileobject(
4468 4468 ui, proc.stdin, b'i', logdata=True
4469 4469 )
4470 4470 stdout = util.makeloggingfileobject(
4471 4471 ui, proc.stdout, b'o', logdata=True
4472 4472 )
4473 4473 stderr = util.makeloggingfileobject(
4474 4474 ui, proc.stderr, b'e', logdata=True
4475 4475 )
4476 4476
4477 4477 # --localssh also implies the peer connection settings.
4478 4478
4479 4479 url = b'ssh://localserver'
4480 4480 autoreadstderr = not opts['noreadstderr']
4481 4481
4482 4482 if opts['peer'] == b'ssh1':
4483 4483 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4484 4484 peer = sshpeer.sshv1peer(
4485 4485 ui,
4486 4486 url,
4487 4487 proc,
4488 4488 stdin,
4489 4489 stdout,
4490 4490 stderr,
4491 4491 None,
4492 4492 autoreadstderr=autoreadstderr,
4493 4493 )
4494 4494 elif opts['peer'] == b'raw':
4495 4495 ui.write(_(b'using raw connection to peer\n'))
4496 4496 peer = None
4497 4497 else:
4498 4498 ui.write(_(b'creating ssh peer from handshake results\n'))
4499 4499 peer = sshpeer._make_peer(
4500 4500 ui,
4501 4501 url,
4502 4502 proc,
4503 4503 stdin,
4504 4504 stdout,
4505 4505 stderr,
4506 4506 autoreadstderr=autoreadstderr,
4507 4507 )
4508 4508
4509 4509 elif path:
4510 4510 # We bypass hg.peer() so we can proxy the sockets.
4511 4511 # TODO consider not doing this because we skip
4512 4512 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4513 4513 u = urlutil.url(path)
4514 4514 if u.scheme not in (b'http', b'https'):
4515 4515 raise error.Abort(
4516 4516 _(b'only http:// and https:// paths are currently supported')
4517 4517 )
4518 4518
4519 4519 url, authinfo = u.authinfo()
4520 4520 openerargs = {
4521 4521 'useragent': b'Mercurial debugwireproto',
4522 4522 }
4523 4523
4524 4524 # Turn pipes/sockets into observers so we can log I/O.
4525 4525 if ui.verbose:
4526 4526 openerargs.update(
4527 4527 {
4528 4528 'loggingfh': ui,
4529 4529 'loggingname': b's',
4530 4530 'loggingopts': {
4531 4531 'logdata': True,
4532 4532 'logdataapis': False,
4533 4533 },
4534 4534 }
4535 4535 )
4536 4536
4537 4537 if ui.debugflag:
4538 4538 openerargs['loggingopts']['logdataapis'] = True
4539 4539
4540 4540 # Don't send default headers when in raw mode. This allows us to
4541 4541 # bypass most of the behavior of our URL handling code so we can
4542 4542 # have near complete control over what's sent on the wire.
4543 4543 if opts['peer'] == b'raw':
4544 4544 openerargs['sendaccept'] = False
4545 4545
4546 4546 opener = urlmod.opener(ui, authinfo, **openerargs)
4547 4547
4548 4548 if opts['peer'] == b'raw':
4549 4549 ui.write(_(b'using raw connection to peer\n'))
4550 4550 peer = None
4551 4551 elif opts['peer']:
4552 4552 raise error.Abort(
4553 4553 _(b'--peer %s not supported with HTTP peers') % opts['peer']
4554 4554 )
4555 4555 else:
4556 4556 peer_path = urlutil.try_path(ui, path)
4557 4557 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4558 4558
4559 4559 # We /could/ populate stdin/stdout with sock.makefile()...
4560 4560 else:
4561 4561 raise error.Abort(_(b'unsupported connection configuration'))
4562 4562
4563 4563 batchedcommands = None
4564 4564
4565 4565 # Now perform actions based on the parsed wire language instructions.
4566 4566 for action, lines in blocks:
4567 4567 if action in (b'raw', b'raw+'):
4568 4568 if not stdin:
4569 4569 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4570 4570
4571 4571 # Concatenate the data together.
4572 4572 data = b''.join(l.lstrip() for l in lines)
4573 4573 data = stringutil.unescapestr(data)
4574 4574 stdin.write(data)
4575 4575
4576 4576 if action == b'raw+':
4577 4577 stdin.flush()
4578 4578 elif action == b'flush':
4579 4579 if not stdin:
4580 4580 raise error.Abort(_(b'cannot call flush on this peer'))
4581 4581 stdin.flush()
4582 4582 elif action.startswith(b'command'):
4583 4583 if not peer:
4584 4584 raise error.Abort(
4585 4585 _(
4586 4586 b'cannot send commands unless peer instance '
4587 4587 b'is available'
4588 4588 )
4589 4589 )
4590 4590
4591 4591 command = action.split(b' ', 1)[1]
4592 4592
4593 4593 args = {}
4594 4594 for line in lines:
4595 4595 # We need to allow empty values.
4596 4596 fields = line.lstrip().split(b' ', 1)
4597 4597 if len(fields) == 1:
4598 4598 key = fields[0]
4599 4599 value = b''
4600 4600 else:
4601 4601 key, value = fields
4602 4602
4603 4603 if value.startswith(b'eval:'):
4604 4604 value = stringutil.evalpythonliteral(value[5:])
4605 4605 else:
4606 4606 value = stringutil.unescapestr(value)
4607 4607
4608 4608 args[key] = value
4609 4609
4610 4610 if batchedcommands is not None:
4611 4611 batchedcommands.append((command, args))
4612 4612 continue
4613 4613
4614 4614 ui.status(_(b'sending %s command\n') % command)
4615 4615
4616 4616 if b'PUSHFILE' in args:
4617 4617 with open(args[b'PUSHFILE'], 'rb') as fh:
4618 4618 del args[b'PUSHFILE']
4619 4619 res, output = peer._callpush(
4620 4620 command, fh, **pycompat.strkwargs(args)
4621 4621 )
4622 4622 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4623 4623 ui.status(
4624 4624 _(b'remote output: %s\n') % stringutil.escapestr(output)
4625 4625 )
4626 4626 else:
4627 4627 with peer.commandexecutor() as e:
4628 4628 res = e.callcommand(command, args).result()
4629 4629
4630 4630 ui.status(
4631 4631 _(b'response: %s\n')
4632 4632 % stringutil.pprint(res, bprefix=True, indent=2)
4633 4633 )
4634 4634
4635 4635 elif action == b'batchbegin':
4636 4636 if batchedcommands is not None:
4637 4637 raise error.Abort(_(b'nested batchbegin not allowed'))
4638 4638
4639 4639 batchedcommands = []
4640 4640 elif action == b'batchsubmit':
4641 4641 # There is a batching API we could go through. But it would be
4642 4642 # difficult to normalize requests into function calls. It is easier
4643 4643 # to bypass this layer and normalize to commands + args.
4644 4644 ui.status(
4645 4645 _(b'sending batch with %d sub-commands\n')
4646 4646 % len(batchedcommands)
4647 4647 )
4648 4648 assert peer is not None
4649 4649 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4650 4650 ui.status(
4651 4651 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4652 4652 )
4653 4653
4654 4654 batchedcommands = None
4655 4655
4656 4656 elif action.startswith(b'httprequest '):
4657 4657 if not opener:
4658 4658 raise error.Abort(
4659 4659 _(b'cannot use httprequest without an HTTP peer')
4660 4660 )
4661 4661
4662 4662 request = action.split(b' ', 2)
4663 4663 if len(request) != 3:
4664 4664 raise error.Abort(
4665 4665 _(
4666 4666 b'invalid httprequest: expected format is '
4667 4667 b'"httprequest <method> <path>'
4668 4668 )
4669 4669 )
4670 4670
4671 4671 method, httppath = request[1:]
4672 4672 headers = {}
4673 4673 body = None
4674 4674 frames = []
4675 4675 for line in lines:
4676 4676 line = line.lstrip()
4677 4677 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4678 4678 if m:
4679 4679 # Headers need to use native strings.
4680 4680 key = pycompat.strurl(m.group(1))
4681 4681 value = pycompat.strurl(m.group(2))
4682 4682 headers[key] = value
4683 4683 continue
4684 4684
4685 4685 if line.startswith(b'BODYFILE '):
4686 4686 with open(line.split(b' ', 1), b'rb') as fh:
4687 4687 body = fh.read()
4688 4688 elif line.startswith(b'frame '):
4689 4689 frame = wireprotoframing.makeframefromhumanstring(
4690 4690 line[len(b'frame ') :]
4691 4691 )
4692 4692
4693 4693 frames.append(frame)
4694 4694 else:
4695 4695 raise error.Abort(
4696 4696 _(b'unknown argument to httprequest: %s') % line
4697 4697 )
4698 4698
4699 4699 url = path + httppath
4700 4700
4701 4701 if frames:
4702 4702 body = b''.join(bytes(f) for f in frames)
4703 4703
4704 4704 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4705 4705
4706 4706 # urllib.Request insists on using has_data() as a proxy for
4707 4707 # determining the request method. Override that to use our
4708 4708 # explicitly requested method.
4709 4709 req.get_method = lambda: pycompat.sysstr(method)
4710 4710
4711 4711 try:
4712 4712 res = opener.open(req)
4713 4713 body = res.read()
4714 4714 except util.urlerr.urlerror as e:
4715 4715 # read() method must be called, but only exists in Python 2
4716 4716 getattr(e, 'read', lambda: None)()
4717 4717 continue
4718 4718
4719 4719 ct = res.headers.get('Content-Type')
4720 4720 if ct == 'application/mercurial-cbor':
4721 4721 ui.write(
4722 4722 _(b'cbor> %s\n')
4723 4723 % stringutil.pprint(
4724 4724 cborutil.decodeall(body), bprefix=True, indent=2
4725 4725 )
4726 4726 )
4727 4727
4728 4728 elif action == b'close':
4729 4729 assert peer is not None
4730 4730 peer.close()
4731 4731 elif action == b'readavailable':
4732 4732 if not stdout or not stderr:
4733 4733 raise error.Abort(
4734 4734 _(b'readavailable not available on this peer')
4735 4735 )
4736 4736
4737 4737 stdin.close()
4738 4738 stdout.read()
4739 4739 stderr.read()
4740 4740
4741 4741 elif action == b'readline':
4742 4742 if not stdout:
4743 4743 raise error.Abort(_(b'readline not available on this peer'))
4744 4744 stdout.readline()
4745 4745 elif action == b'ereadline':
4746 4746 if not stderr:
4747 4747 raise error.Abort(_(b'ereadline not available on this peer'))
4748 4748 stderr.readline()
4749 4749 elif action.startswith(b'read '):
4750 4750 count = int(action.split(b' ', 1)[1])
4751 4751 if not stdout:
4752 4752 raise error.Abort(_(b'read not available on this peer'))
4753 4753 stdout.read(count)
4754 4754 elif action.startswith(b'eread '):
4755 4755 count = int(action.split(b' ', 1)[1])
4756 4756 if not stderr:
4757 4757 raise error.Abort(_(b'eread not available on this peer'))
4758 4758 stderr.read(count)
4759 4759 else:
4760 4760 raise error.Abort(_(b'unknown action: %s') % action)
4761 4761
4762 4762 if batchedcommands is not None:
4763 4763 raise error.Abort(_(b'unclosed "batchbegin" request'))
4764 4764
4765 4765 if peer:
4766 4766 peer.close()
4767 4767
4768 4768 if proc:
4769 4769 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now