##// END OF EJS Templates
debug-discovery: deal with case where common is empty...
marmoute -
r50299:c6aac500 stable
parent child Browse files
Show More
@@ -1,5056 +1,5058 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revlogutils,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 constants as revlog_constants,
108 108 debug as revlog_debug,
109 109 deltas as deltautil,
110 110 nodemap,
111 111 rewrite,
112 112 sidedata,
113 113 )
114 114
115 115 release = lockmod.release
116 116
117 117 table = {}
118 118 table.update(strip.command._table)
119 119 command = registrar.command(table)
120 120
121 121
122 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 123 def debugancestor(ui, repo, *args):
124 124 """find the ancestor revision of two revisions in a given index"""
125 125 if len(args) == 3:
126 126 index, rev1, rev2 = args
127 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 128 lookup = r.lookup
129 129 elif len(args) == 2:
130 130 if not repo:
131 131 raise error.Abort(
132 132 _(b'there is no Mercurial repository here (.hg not found)')
133 133 )
134 134 rev1, rev2 = args
135 135 r = repo.changelog
136 136 lookup = repo.lookup
137 137 else:
138 138 raise error.Abort(_(b'either two or three arguments required'))
139 139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 141
142 142
143 143 @command(b'debugantivirusrunning', [])
144 144 def debugantivirusrunning(ui, repo):
145 145 """attempt to trigger an antivirus scanner to see if one is active"""
146 146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 147 f.write(
148 148 util.b85decode(
149 149 # This is a base85-armored version of the EICAR test file. See
150 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 153 )
154 154 )
155 155 # Give an AV engine time to scan the file.
156 156 time.sleep(2)
157 157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 158
159 159
160 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 161 def debugapplystreamclonebundle(ui, repo, fname):
162 162 """apply a stream clone bundle file"""
163 163 f = hg.openpath(ui, fname)
164 164 gen = exchange.readbundle(ui, f, fname)
165 165 gen.apply(repo)
166 166
167 167
168 168 @command(
169 169 b'debugbuilddag',
170 170 [
171 171 (
172 172 b'm',
173 173 b'mergeable-file',
174 174 None,
175 175 _(b'add single file mergeable changes'),
176 176 ),
177 177 (
178 178 b'o',
179 179 b'overwritten-file',
180 180 None,
181 181 _(b'add single file all revs overwrite'),
182 182 ),
183 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 184 (
185 185 b'',
186 186 b'from-existing',
187 187 None,
188 188 _(b'continue from a non-empty repository'),
189 189 ),
190 190 ],
191 191 _(b'[OPTION]... [TEXT]'),
192 192 )
193 193 def debugbuilddag(
194 194 ui,
195 195 repo,
196 196 text=None,
197 197 mergeable_file=False,
198 198 overwritten_file=False,
199 199 new_file=False,
200 200 from_existing=False,
201 201 ):
202 202 """builds a repo with a given DAG from scratch in the current empty repo
203 203
204 204 The description of the DAG is read from stdin if not given on the
205 205 command line.
206 206
207 207 Elements:
208 208
209 209 - "+n" is a linear run of n nodes based on the current default parent
210 210 - "." is a single node based on the current default parent
211 211 - "$" resets the default parent to null (implied at the start);
212 212 otherwise the default parent is always the last node created
213 213 - "<p" sets the default parent to the backref p
214 214 - "*p" is a fork at parent p, which is a backref
215 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 216 - "/p2" is a merge of the preceding node and p2
217 217 - ":tag" defines a local tag for the preceding node
218 218 - "@branch" sets the named branch for subsequent nodes
219 219 - "#...\\n" is a comment up to the end of the line
220 220
221 221 Whitespace between the above elements is ignored.
222 222
223 223 A backref is either
224 224
225 225 - a number n, which references the node curr-n, where curr is the current
226 226 node, or
227 227 - the name of a local tag you placed earlier using ":tag", or
228 228 - empty to denote the default parent.
229 229
230 230 All string valued-elements are either strictly alphanumeric, or must
231 231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 232 """
233 233
234 234 if text is None:
235 235 ui.status(_(b"reading DAG from stdin\n"))
236 236 text = ui.fin.read()
237 237
238 238 cl = repo.changelog
239 239 if len(cl) > 0 and not from_existing:
240 240 raise error.Abort(_(b'repository is not empty'))
241 241
242 242 # determine number of revs in DAG
243 243 total = 0
244 244 for type, data in dagparser.parsedag(text):
245 245 if type == b'n':
246 246 total += 1
247 247
248 248 if mergeable_file:
249 249 linesperrev = 2
250 250 # make a file with k lines per rev
251 251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 252 initialmergedlines.append(b"")
253 253
254 254 tags = []
255 255 progress = ui.makeprogress(
256 256 _(b'building'), unit=_(b'revisions'), total=total
257 257 )
258 258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 259 at = -1
260 260 atbranch = b'default'
261 261 nodeids = []
262 262 id = 0
263 263 progress.update(id)
264 264 for type, data in dagparser.parsedag(text):
265 265 if type == b'n':
266 266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 267 id, ps = data
268 268
269 269 files = []
270 270 filecontent = {}
271 271
272 272 p2 = None
273 273 if mergeable_file:
274 274 fn = b"mf"
275 275 p1 = repo[ps[0]]
276 276 if len(ps) > 1:
277 277 p2 = repo[ps[1]]
278 278 pa = p1.ancestor(p2)
279 279 base, local, other = [
280 280 x[fn].data() for x in (pa, p1, p2)
281 281 ]
282 282 m3 = simplemerge.Merge3Text(base, local, other)
283 283 ml = [
284 284 l.strip()
285 285 for l in simplemerge.render_minimized(m3)[0]
286 286 ]
287 287 ml.append(b"")
288 288 elif at > 0:
289 289 ml = p1[fn].data().split(b"\n")
290 290 else:
291 291 ml = initialmergedlines
292 292 ml[id * linesperrev] += b" r%i" % id
293 293 mergedtext = b"\n".join(ml)
294 294 files.append(fn)
295 295 filecontent[fn] = mergedtext
296 296
297 297 if overwritten_file:
298 298 fn = b"of"
299 299 files.append(fn)
300 300 filecontent[fn] = b"r%i\n" % id
301 301
302 302 if new_file:
303 303 fn = b"nf%i" % id
304 304 files.append(fn)
305 305 filecontent[fn] = b"r%i\n" % id
306 306 if len(ps) > 1:
307 307 if not p2:
308 308 p2 = repo[ps[1]]
309 309 for fn in p2:
310 310 if fn.startswith(b"nf"):
311 311 files.append(fn)
312 312 filecontent[fn] = p2[fn].data()
313 313
314 314 def fctxfn(repo, cx, path):
315 315 if path in filecontent:
316 316 return context.memfilectx(
317 317 repo, cx, path, filecontent[path]
318 318 )
319 319 return None
320 320
321 321 if len(ps) == 0 or ps[0] < 0:
322 322 pars = [None, None]
323 323 elif len(ps) == 1:
324 324 pars = [nodeids[ps[0]], None]
325 325 else:
326 326 pars = [nodeids[p] for p in ps]
327 327 cx = context.memctx(
328 328 repo,
329 329 pars,
330 330 b"r%i" % id,
331 331 files,
332 332 fctxfn,
333 333 date=(id, 0),
334 334 user=b"debugbuilddag",
335 335 extra={b'branch': atbranch},
336 336 )
337 337 nodeid = repo.commitctx(cx)
338 338 nodeids.append(nodeid)
339 339 at = id
340 340 elif type == b'l':
341 341 id, name = data
342 342 ui.note((b'tag %s\n' % name))
343 343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 344 elif type == b'a':
345 345 ui.note((b'branch %s\n' % data))
346 346 atbranch = data
347 347 progress.update(id)
348 348
349 349 if tags:
350 350 repo.vfs.write(b"localtags", b"".join(tags))
351 351
352 352
353 353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 354 indent_string = b' ' * indent
355 355 if all:
356 356 ui.writenoi18n(
357 357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 358 % indent_string
359 359 )
360 360
361 361 def showchunks(named):
362 362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 363 for deltadata in gen.deltaiter():
364 364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 365 ui.write(
366 366 b"%s%s %s %s %s %s %d\n"
367 367 % (
368 368 indent_string,
369 369 hex(node),
370 370 hex(p1),
371 371 hex(p2),
372 372 hex(cs),
373 373 hex(deltabase),
374 374 len(delta),
375 375 )
376 376 )
377 377
378 378 gen.changelogheader()
379 379 showchunks(b"changelog")
380 380 gen.manifestheader()
381 381 showchunks(b"manifest")
382 382 for chunkdata in iter(gen.filelogheader, {}):
383 383 fname = chunkdata[b'filename']
384 384 showchunks(fname)
385 385 else:
386 386 if isinstance(gen, bundle2.unbundle20):
387 387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 388 gen.changelogheader()
389 389 for deltadata in gen.deltaiter():
390 390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 392
393 393
394 394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 395 """display version and markers contained in 'data'"""
396 396 opts = pycompat.byteskwargs(opts)
397 397 data = part.read()
398 398 indent_string = b' ' * indent
399 399 try:
400 400 version, markers = obsolete._readmarkers(data)
401 401 except error.UnknownVersion as exc:
402 402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 403 msg %= indent_string, exc.version, len(data)
404 404 ui.write(msg)
405 405 else:
406 406 msg = b"%sversion: %d (%d bytes)\n"
407 407 msg %= indent_string, version, len(data)
408 408 ui.write(msg)
409 409 fm = ui.formatter(b'debugobsolete', opts)
410 410 for rawmarker in sorted(markers):
411 411 m = obsutil.marker(None, rawmarker)
412 412 fm.startitem()
413 413 fm.plain(indent_string)
414 414 cmdutil.showmarker(fm, m)
415 415 fm.end()
416 416
417 417
418 418 def _debugphaseheads(ui, data, indent=0):
419 419 """display version and markers contained in 'data'"""
420 420 indent_string = b' ' * indent
421 421 headsbyphase = phases.binarydecode(data)
422 422 for phase in phases.allphases:
423 423 for head in headsbyphase[phase]:
424 424 ui.write(indent_string)
425 425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 426
427 427
428 428 def _quasirepr(thing):
429 429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 430 return b'{%s}' % (
431 431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 432 )
433 433 return pycompat.bytestr(repr(thing))
434 434
435 435
436 436 def _debugbundle2(ui, gen, all=None, **opts):
437 437 """lists the contents of a bundle2"""
438 438 if not isinstance(gen, bundle2.unbundle20):
439 439 raise error.Abort(_(b'not a bundle2 file'))
440 440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 441 parttypes = opts.get('part_type', [])
442 442 for part in gen.iterparts():
443 443 if parttypes and part.type not in parttypes:
444 444 continue
445 445 msg = b'%s -- %s (mandatory: %r)\n'
446 446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 447 if part.type == b'changegroup':
448 448 version = part.params.get(b'version', b'01')
449 449 cg = changegroup.getunbundler(version, part, b'UN')
450 450 if not ui.quiet:
451 451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 452 if part.type == b'obsmarkers':
453 453 if not ui.quiet:
454 454 _debugobsmarkers(ui, part, indent=4, **opts)
455 455 if part.type == b'phase-heads':
456 456 if not ui.quiet:
457 457 _debugphaseheads(ui, part, indent=4)
458 458
459 459
460 460 @command(
461 461 b'debugbundle',
462 462 [
463 463 (b'a', b'all', None, _(b'show all details')),
464 464 (b'', b'part-type', [], _(b'show only the named part type')),
465 465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 466 ],
467 467 _(b'FILE'),
468 468 norepo=True,
469 469 )
470 470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 471 """lists the contents of a bundle"""
472 472 with hg.openpath(ui, bundlepath) as f:
473 473 if spec:
474 474 spec = exchange.getbundlespec(ui, f)
475 475 ui.write(b'%s\n' % spec)
476 476 return
477 477
478 478 gen = exchange.readbundle(ui, f, bundlepath)
479 479 if isinstance(gen, bundle2.unbundle20):
480 480 return _debugbundle2(ui, gen, all=all, **opts)
481 481 _debugchangegroup(ui, gen, all=all, **opts)
482 482
483 483
484 484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 485 def debugcapabilities(ui, path, **opts):
486 486 """lists the capabilities of a remote peer"""
487 487 opts = pycompat.byteskwargs(opts)
488 488 peer = hg.peer(ui, opts, path)
489 489 try:
490 490 caps = peer.capabilities()
491 491 ui.writenoi18n(b'Main capabilities:\n')
492 492 for c in sorted(caps):
493 493 ui.write(b' %s\n' % c)
494 494 b2caps = bundle2.bundle2caps(peer)
495 495 if b2caps:
496 496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 497 for key, values in sorted(b2caps.items()):
498 498 ui.write(b' %s\n' % key)
499 499 for v in values:
500 500 ui.write(b' %s\n' % v)
501 501 finally:
502 502 peer.close()
503 503
504 504
505 505 @command(
506 506 b'debugchangedfiles',
507 507 [
508 508 (
509 509 b'',
510 510 b'compute',
511 511 False,
512 512 b"compute information instead of reading it from storage",
513 513 ),
514 514 ],
515 515 b'REV',
516 516 )
517 517 def debugchangedfiles(ui, repo, rev, **opts):
518 518 """list the stored files changes for a revision"""
519 519 ctx = logcmdutil.revsingle(repo, rev, None)
520 520 files = None
521 521
522 522 if opts['compute']:
523 523 files = metadata.compute_all_files_changes(ctx)
524 524 else:
525 525 sd = repo.changelog.sidedata(ctx.rev())
526 526 files_block = sd.get(sidedata.SD_FILES)
527 527 if files_block is not None:
528 528 files = metadata.decode_files_sidedata(sd)
529 529 if files is not None:
530 530 for f in sorted(files.touched):
531 531 if f in files.added:
532 532 action = b"added"
533 533 elif f in files.removed:
534 534 action = b"removed"
535 535 elif f in files.merged:
536 536 action = b"merged"
537 537 elif f in files.salvaged:
538 538 action = b"salvaged"
539 539 else:
540 540 action = b"touched"
541 541
542 542 copy_parent = b""
543 543 copy_source = b""
544 544 if f in files.copied_from_p1:
545 545 copy_parent = b"p1"
546 546 copy_source = files.copied_from_p1[f]
547 547 elif f in files.copied_from_p2:
548 548 copy_parent = b"p2"
549 549 copy_source = files.copied_from_p2[f]
550 550
551 551 data = (action, copy_parent, f, copy_source)
552 552 template = b"%-8s %2s: %s, %s;\n"
553 553 ui.write(template % data)
554 554
555 555
556 556 @command(b'debugcheckstate', [], b'')
557 557 def debugcheckstate(ui, repo):
558 558 """validate the correctness of the current dirstate"""
559 559 parent1, parent2 = repo.dirstate.parents()
560 560 m1 = repo[parent1].manifest()
561 561 m2 = repo[parent2].manifest()
562 562 errors = 0
563 563 for err in repo.dirstate.verify(m1, m2):
564 564 ui.warn(err[0] % err[1:])
565 565 errors += 1
566 566 if errors:
567 567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 568 raise error.Abort(errstr)
569 569
570 570
571 571 @command(
572 572 b'debugcolor',
573 573 [(b'', b'style', None, _(b'show all configured styles'))],
574 574 b'hg debugcolor',
575 575 )
576 576 def debugcolor(ui, repo, **opts):
577 577 """show available color, effects or style"""
578 578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 579 if opts.get('style'):
580 580 return _debugdisplaystyle(ui)
581 581 else:
582 582 return _debugdisplaycolor(ui)
583 583
584 584
585 585 def _debugdisplaycolor(ui):
586 586 ui = ui.copy()
587 587 ui._styles.clear()
588 588 for effect in color._activeeffects(ui).keys():
589 589 ui._styles[effect] = effect
590 590 if ui._terminfoparams:
591 591 for k, v in ui.configitems(b'color'):
592 592 if k.startswith(b'color.'):
593 593 ui._styles[k] = k[6:]
594 594 elif k.startswith(b'terminfo.'):
595 595 ui._styles[k] = k[9:]
596 596 ui.write(_(b'available colors:\n'))
597 597 # sort label with a '_' after the other to group '_background' entry.
598 598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 599 for colorname, label in items:
600 600 ui.write(b'%s\n' % colorname, label=label)
601 601
602 602
603 603 def _debugdisplaystyle(ui):
604 604 ui.write(_(b'available style:\n'))
605 605 if not ui._styles:
606 606 return
607 607 width = max(len(s) for s in ui._styles)
608 608 for label, effects in sorted(ui._styles.items()):
609 609 ui.write(b'%s' % label, label=label)
610 610 if effects:
611 611 # 50
612 612 ui.write(b': ')
613 613 ui.write(b' ' * (max(0, width - len(label))))
614 614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 615 ui.write(b'\n')
616 616
617 617
618 618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 619 def debugcreatestreamclonebundle(ui, repo, fname):
620 620 """create a stream clone bundle file
621 621
622 622 Stream bundles are special bundles that are essentially archives of
623 623 revlog files. They are commonly used for cloning very quickly.
624 624 """
625 625 # TODO we may want to turn this into an abort when this functionality
626 626 # is moved into `hg bundle`.
627 627 if phases.hassecret(repo):
628 628 ui.warn(
629 629 _(
630 630 b'(warning: stream clone bundle will contain secret '
631 631 b'revisions)\n'
632 632 )
633 633 )
634 634
635 635 requirements, gen = streamclone.generatebundlev1(repo)
636 636 changegroup.writechunks(ui, gen, fname)
637 637
638 638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 639
640 640
641 641 @command(
642 642 b'debugdag',
643 643 [
644 644 (b't', b'tags', None, _(b'use tags as labels')),
645 645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 646 (b'', b'dots', None, _(b'use dots for runs')),
647 647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 648 ],
649 649 _(b'[OPTION]... [FILE [REV]...]'),
650 650 optionalrepo=True,
651 651 )
652 652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 653 """format the changelog or an index DAG as a concise textual description
654 654
655 655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 656 revision numbers, they get labeled in the output as rN.
657 657
658 658 Otherwise, the changelog DAG of the current repo is emitted.
659 659 """
660 660 spaces = opts.get('spaces')
661 661 dots = opts.get('dots')
662 662 if file_:
663 663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 664 revs = {int(r) for r in revs}
665 665
666 666 def events():
667 667 for r in rlog:
668 668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 669 if r in revs:
670 670 yield b'l', (r, b"r%i" % r)
671 671
672 672 elif repo:
673 673 cl = repo.changelog
674 674 tags = opts.get('tags')
675 675 branches = opts.get('branches')
676 676 if tags:
677 677 labels = {}
678 678 for l, n in repo.tags().items():
679 679 labels.setdefault(cl.rev(n), []).append(l)
680 680
681 681 def events():
682 682 b = b"default"
683 683 for r in cl:
684 684 if branches:
685 685 newb = cl.read(cl.node(r))[5][b'branch']
686 686 if newb != b:
687 687 yield b'a', newb
688 688 b = newb
689 689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 690 if tags:
691 691 ls = labels.get(r)
692 692 if ls:
693 693 for l in ls:
694 694 yield b'l', (r, l)
695 695
696 696 else:
697 697 raise error.Abort(_(b'need repo for changelog dag'))
698 698
699 699 for line in dagparser.dagtextlines(
700 700 events(),
701 701 addspaces=spaces,
702 702 wraplabels=True,
703 703 wrapannotations=True,
704 704 wrapnonlinear=dots,
705 705 usedots=dots,
706 706 maxlinewidth=70,
707 707 ):
708 708 ui.write(line)
709 709 ui.write(b"\n")
710 710
711 711
712 712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 713 def debugdata(ui, repo, file_, rev=None, **opts):
714 714 """dump the contents of a data file revision"""
715 715 opts = pycompat.byteskwargs(opts)
716 716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 717 if rev is not None:
718 718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 719 file_, rev = None, file_
720 720 elif rev is None:
721 721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 723 try:
724 724 ui.write(r.rawdata(r.lookup(rev)))
725 725 except KeyError:
726 726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 727
728 728
729 729 @command(
730 730 b'debugdate',
731 731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 732 _(b'[-e] DATE [RANGE]'),
733 733 norepo=True,
734 734 optionalrepo=True,
735 735 )
736 736 def debugdate(ui, date, range=None, **opts):
737 737 """parse and display a date"""
738 738 if opts["extended"]:
739 739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 740 else:
741 741 d = dateutil.parsedate(date)
742 742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 744 if range:
745 745 m = dateutil.matchdate(range)
746 746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 747
748 748
749 749 @command(
750 750 b'debugdeltachain',
751 751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 752 _(b'-c|-m|FILE'),
753 753 optionalrepo=True,
754 754 )
755 755 def debugdeltachain(ui, repo, file_=None, **opts):
756 756 """dump information about delta chains in a revlog
757 757
758 758 Output can be templatized. Available template keywords are:
759 759
760 760 :``rev``: revision number
761 761 :``p1``: parent 1 revision number (for reference)
762 762 :``p2``: parent 2 revision number (for reference)
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 - base: a full snapshot
768 768 - snap: an intermediate snapshot
769 769 - p1: a delta against the first parent
770 770 - p2: a delta against the second parent
771 771 - skip1: a delta against the same base as p1
772 772 (when p1 has empty delta
773 773 - skip2: a delta against the same base as p2
774 774 (when p2 has empty delta
775 775 - prev: a delta against the previous revision
776 776 - other: a delta against an arbitrary revision
777 777 :``compsize``: compressed size of revision
778 778 :``uncompsize``: uncompressed size of revision
779 779 :``chainsize``: total size of compressed revisions in chain
780 780 :``chainratio``: total chain size divided by uncompressed revision size
781 781 (new delta chains typically start at ratio 2.00)
782 782 :``lindist``: linear distance from base revision in delta chain to end
783 783 of this revision
784 784 :``extradist``: total size of revisions not part of this delta chain from
785 785 base of delta chain to end of this revision; a measurement
786 786 of how much extra data we need to read/seek across to read
787 787 the delta chain for this revision
788 788 :``extraratio``: extradist divided by chainsize; another representation of
789 789 how much unrelated data is needed to load this delta chain
790 790
791 791 If the repository is configured to use the sparse read, additional keywords
792 792 are available:
793 793
794 794 :``readsize``: total size of data read from the disk for a revision
795 795 (sum of the sizes of all the blocks)
796 796 :``largestblock``: size of the largest block of data read from the disk
797 797 :``readdensity``: density of useful bytes in the data read from the disk
798 798 :``srchunks``: in how many data hunks the whole revision would be read
799 799
800 800 The sparse read can be enabled with experimental.sparse-read = True
801 801 """
802 802 opts = pycompat.byteskwargs(opts)
803 803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 804 index = r.index
805 805 start = r.start
806 806 length = r.length
807 807 generaldelta = r._generaldelta
808 808 withsparseread = getattr(r, '_withsparseread', False)
809 809
810 810 # security to avoid crash on corrupted revlogs
811 811 total_revs = len(index)
812 812
813 813 def revinfo(rev):
814 814 e = index[rev]
815 815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 817 chainsize = 0
818 818
819 819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 822
823 823 # If the parents of a revision has an empty delta, we never try to delta
824 824 # against that parent, but directly against the delta base of that
825 825 # parent (recursively). It avoids adding a useless entry in the chain.
826 826 #
827 827 # However we need to detect that as a special case for delta-type, that
828 828 # is not simply "other".
829 829 p1_base = p1
830 830 if p1 != nullrev and p1 < total_revs:
831 831 e1 = index[p1]
832 832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 834 if (
835 835 new_base == p1_base
836 836 or new_base == nullrev
837 837 or new_base >= total_revs
838 838 ):
839 839 break
840 840 p1_base = new_base
841 841 e1 = index[p1_base]
842 842 p2_base = p2
843 843 if p2 != nullrev and p2 < total_revs:
844 844 e2 = index[p2]
845 845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 847 if (
848 848 new_base == p2_base
849 849 or new_base == nullrev
850 850 or new_base >= total_revs
851 851 ):
852 852 break
853 853 p2_base = new_base
854 854 e2 = index[p2_base]
855 855
856 856 if generaldelta:
857 857 if base == p1:
858 858 deltatype = b'p1'
859 859 elif base == p2:
860 860 deltatype = b'p2'
861 861 elif base == rev:
862 862 deltatype = b'base'
863 863 elif base == p1_base:
864 864 deltatype = b'skip1'
865 865 elif base == p2_base:
866 866 deltatype = b'skip2'
867 867 elif r.issnapshot(rev):
868 868 deltatype = b'snap'
869 869 elif base == rev - 1:
870 870 deltatype = b'prev'
871 871 else:
872 872 deltatype = b'other'
873 873 else:
874 874 if base == rev:
875 875 deltatype = b'base'
876 876 else:
877 877 deltatype = b'prev'
878 878
879 879 chain = r._deltachain(rev)[0]
880 880 for iterrev in chain:
881 881 e = index[iterrev]
882 882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 883
884 884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 885
886 886 fm = ui.formatter(b'debugdeltachain', opts)
887 887
888 888 fm.plain(
889 889 b' rev p1 p2 chain# chainlen prev delta '
890 890 b'size rawsize chainsize ratio lindist extradist '
891 891 b'extraratio'
892 892 )
893 893 if withsparseread:
894 894 fm.plain(b' readsize largestblk rddensity srchunks')
895 895 fm.plain(b'\n')
896 896
897 897 chainbases = {}
898 898 for rev in r:
899 899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 900 chainbase = chain[0]
901 901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 902 basestart = start(chainbase)
903 903 revstart = start(rev)
904 904 lineardist = revstart + comp - basestart
905 905 extradist = lineardist - chainsize
906 906 try:
907 907 prevrev = chain[-2]
908 908 except IndexError:
909 909 prevrev = -1
910 910
911 911 if uncomp != 0:
912 912 chainratio = float(chainsize) / float(uncomp)
913 913 else:
914 914 chainratio = chainsize
915 915
916 916 if chainsize != 0:
917 917 extraratio = float(extradist) / float(chainsize)
918 918 else:
919 919 extraratio = extradist
920 920
921 921 fm.startitem()
922 922 fm.write(
923 923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 924 b'uncompsize chainsize chainratio lindist extradist '
925 925 b'extraratio',
926 926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 927 rev,
928 928 p1,
929 929 p2,
930 930 chainid,
931 931 len(chain),
932 932 prevrev,
933 933 deltatype,
934 934 comp,
935 935 uncomp,
936 936 chainsize,
937 937 chainratio,
938 938 lineardist,
939 939 extradist,
940 940 extraratio,
941 941 rev=rev,
942 942 chainid=chainid,
943 943 chainlen=len(chain),
944 944 prevrev=prevrev,
945 945 deltatype=deltatype,
946 946 compsize=comp,
947 947 uncompsize=uncomp,
948 948 chainsize=chainsize,
949 949 chainratio=chainratio,
950 950 lindist=lineardist,
951 951 extradist=extradist,
952 952 extraratio=extraratio,
953 953 )
954 954 if withsparseread:
955 955 readsize = 0
956 956 largestblock = 0
957 957 srchunks = 0
958 958
959 959 for revschunk in deltautil.slicechunk(r, chain):
960 960 srchunks += 1
961 961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 962 blksize = blkend - start(revschunk[0])
963 963
964 964 readsize += blksize
965 965 if largestblock < blksize:
966 966 largestblock = blksize
967 967
968 968 if readsize:
969 969 readdensity = float(chainsize) / float(readsize)
970 970 else:
971 971 readdensity = 1
972 972
973 973 fm.write(
974 974 b'readsize largestblock readdensity srchunks',
975 975 b' %10d %10d %9.5f %8d',
976 976 readsize,
977 977 largestblock,
978 978 readdensity,
979 979 srchunks,
980 980 readsize=readsize,
981 981 largestblock=largestblock,
982 982 readdensity=readdensity,
983 983 srchunks=srchunks,
984 984 )
985 985
986 986 fm.plain(b'\n')
987 987
988 988 fm.end()
989 989
990 990
991 991 @command(
992 992 b'debug-delta-find',
993 993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 994 _(b'-c|-m|FILE REV'),
995 995 optionalrepo=True,
996 996 )
997 997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 998 """display the computation to get to a valid delta for storing REV
999 999
1000 1000 This command will replay the process used to find the "best" delta to store
1001 1001 a revision and display information about all the steps used to get to that
1002 1002 result.
1003 1003
1004 1004 The revision use the revision number of the target storage (not changelog
1005 1005 revision number).
1006 1006
1007 1007 note: the process is initiated from a full text of the revision to store.
1008 1008 """
1009 1009 opts = pycompat.byteskwargs(opts)
1010 1010 if arg_2 is None:
1011 1011 file_ = None
1012 1012 rev = arg_1
1013 1013 else:
1014 1014 file_ = arg_1
1015 1015 rev = arg_2
1016 1016
1017 1017 rev = int(rev)
1018 1018
1019 1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020 1020
1021 1021 deltacomputer = deltautil.deltacomputer(
1022 1022 revlog,
1023 1023 write_debug=ui.write,
1024 1024 debug_search=True,
1025 1025 )
1026 1026
1027 1027 node = revlog.node(rev)
1028 1028 p1r, p2r = revlog.parentrevs(rev)
1029 1029 p1 = revlog.node(p1r)
1030 1030 p2 = revlog.node(p2r)
1031 1031 btext = [revlog.revision(rev)]
1032 1032 textlen = len(btext[0])
1033 1033 cachedelta = None
1034 1034 flags = revlog.flags(rev)
1035 1035
1036 1036 revinfo = revlogutils.revisioninfo(
1037 1037 node,
1038 1038 p1,
1039 1039 p2,
1040 1040 btext,
1041 1041 textlen,
1042 1042 cachedelta,
1043 1043 flags,
1044 1044 )
1045 1045
1046 1046 fh = revlog._datafp()
1047 1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048 1048
1049 1049
1050 1050 @command(
1051 1051 b'debugdirstate|debugstate',
1052 1052 [
1053 1053 (
1054 1054 b'',
1055 1055 b'nodates',
1056 1056 None,
1057 1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 1058 ),
1059 1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 1061 (
1062 1062 b'',
1063 1063 b'docket',
1064 1064 False,
1065 1065 _(b'display the docket (metadata file) instead'),
1066 1066 ),
1067 1067 (
1068 1068 b'',
1069 1069 b'all',
1070 1070 False,
1071 1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 1072 ),
1073 1073 ],
1074 1074 _(b'[OPTION]...'),
1075 1075 )
1076 1076 def debugstate(ui, repo, **opts):
1077 1077 """show the contents of the current dirstate"""
1078 1078
1079 1079 if opts.get("docket"):
1080 1080 if not repo.dirstate._use_dirstate_v2:
1081 1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082 1082
1083 1083 docket = repo.dirstate._map.docket
1084 1084 (
1085 1085 start_offset,
1086 1086 root_nodes,
1087 1087 nodes_with_entry,
1088 1088 nodes_with_copy,
1089 1089 unused_bytes,
1090 1090 _unused,
1091 1091 ignore_pattern,
1092 1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093 1093
1094 1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 1101 ui.write(
1102 1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 1103 )
1104 1104 return
1105 1105
1106 1106 nodates = not opts['dates']
1107 1107 if opts.get('nodates') is not None:
1108 1108 nodates = True
1109 1109 datesort = opts.get('datesort')
1110 1110
1111 1111 if datesort:
1112 1112
1113 1113 def keyfunc(entry):
1114 1114 filename, _state, _mode, _size, mtime = entry
1115 1115 return (mtime, filename)
1116 1116
1117 1117 else:
1118 1118 keyfunc = None # sort by filename
1119 1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 1120 entries.sort(key=keyfunc)
1121 1121 for entry in entries:
1122 1122 filename, state, mode, size, mtime = entry
1123 1123 if mtime == -1:
1124 1124 timestr = b'unset '
1125 1125 elif nodates:
1126 1126 timestr = b'set '
1127 1127 else:
1128 1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 1129 timestr = encoding.strtolocal(timestr)
1130 1130 if mode & 0o20000:
1131 1131 mode = b'lnk'
1132 1132 else:
1133 1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 1135 for f in repo.dirstate.copies():
1136 1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137 1137
1138 1138
1139 1139 @command(
1140 1140 b'debugdirstateignorepatternshash',
1141 1141 [],
1142 1142 _(b''),
1143 1143 )
1144 1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 1146 or nothing for dirstate-v2
1147 1147 """
1148 1148 if repo.dirstate._use_dirstate_v2:
1149 1149 docket = repo.dirstate._map.docket
1150 1150 hash_len = 20 # 160 bits for SHA-1
1151 1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153 1153
1154 1154
1155 1155 @command(
1156 1156 b'debugdiscovery',
1157 1157 [
1158 1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 1159 (
1160 1160 b'',
1161 1161 b'nonheads',
1162 1162 None,
1163 1163 _(b'use old-style discovery with non-heads included'),
1164 1164 ),
1165 1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 1167 (
1168 1168 b'',
1169 1169 b'local-as-revs',
1170 1170 b"",
1171 1171 b'treat local has having these revisions only',
1172 1172 ),
1173 1173 (
1174 1174 b'',
1175 1175 b'remote-as-revs',
1176 1176 b"",
1177 1177 b'use local as remote, with only these revisions',
1178 1178 ),
1179 1179 ]
1180 1180 + cmdutil.remoteopts
1181 1181 + cmdutil.formatteropts,
1182 1182 _(b'[--rev REV] [OTHER]'),
1183 1183 )
1184 1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 1185 """runs the changeset discovery protocol in isolation
1186 1186
1187 1187 The local peer can be "replaced" by a subset of the local repository by
1188 1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1189 1189 be "replaced" by a subset of the local repository using the
1190 1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 1191 discovery situation.
1192 1192
1193 1193 The following developer oriented config are relevant for people playing with this command:
1194 1194
1195 1195 * devel.discovery.exchange-heads=True
1196 1196
1197 1197 If False, the discovery will not start with
1198 1198 remote head fetching and local head querying.
1199 1199
1200 1200 * devel.discovery.grow-sample=True
1201 1201
1202 1202 If False, the sample size used in set discovery will not be increased
1203 1203 through the process
1204 1204
1205 1205 * devel.discovery.grow-sample.dynamic=True
1206 1206
1207 1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 1208 adapted to the shape of the undecided set (it is set to the max of:
1209 1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210 1210
1211 1211 * devel.discovery.grow-sample.rate=1.05
1212 1212
1213 1213 the rate at which the sample grow
1214 1214
1215 1215 * devel.discovery.randomize=True
1216 1216
1217 1217 If andom sampling during discovery are deterministic. It is meant for
1218 1218 integration tests.
1219 1219
1220 1220 * devel.discovery.sample-size=200
1221 1221
1222 1222 Control the initial size of the discovery sample
1223 1223
1224 1224 * devel.discovery.sample-size.initial=100
1225 1225
1226 1226 Control the initial size of the discovery for initial change
1227 1227 """
1228 1228 opts = pycompat.byteskwargs(opts)
1229 1229 unfi = repo.unfiltered()
1230 1230
1231 1231 # setup potential extra filtering
1232 1232 local_revs = opts[b"local_as_revs"]
1233 1233 remote_revs = opts[b"remote_as_revs"]
1234 1234
1235 1235 # make sure tests are repeatable
1236 1236 random.seed(int(opts[b'seed']))
1237 1237
1238 1238 if not remote_revs:
1239 1239
1240 1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 1241 b'debugdiscovery', repo, ui, remoteurl
1242 1242 )
1243 1243 remote = hg.peer(repo, opts, remoteurl)
1244 1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 1245 else:
1246 1246 branches = (None, [])
1247 1247 remote_filtered_revs = logcmdutil.revrange(
1248 1248 unfi, [b"not (::(%s))" % remote_revs]
1249 1249 )
1250 1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251 1251
1252 1252 def remote_func(x):
1253 1253 return remote_filtered_revs
1254 1254
1255 1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256 1256
1257 1257 remote = repo.peer()
1258 1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259 1259
1260 1260 if local_revs:
1261 1261 local_filtered_revs = logcmdutil.revrange(
1262 1262 unfi, [b"not (::(%s))" % local_revs]
1263 1263 )
1264 1264 local_filtered_revs = frozenset(local_filtered_revs)
1265 1265
1266 1266 def local_func(x):
1267 1267 return local_filtered_revs
1268 1268
1269 1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271 1271
1272 1272 data = {}
1273 1273 if opts.get(b'old'):
1274 1274
1275 1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 1276 if not util.safehasattr(remote, b'branches'):
1277 1277 # enable in-client legacy support
1278 1278 remote = localrepo.locallegacypeer(remote.local())
1279 1279 if remote_revs:
1280 1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 1281 remote._repo = r
1282 1282 common, _in, hds = treediscovery.findcommonincoming(
1283 1283 repo, remote, force=True, audit=data
1284 1284 )
1285 1285 common = set(common)
1286 1286 if not opts.get(b'nonheads'):
1287 1287 ui.writenoi18n(
1288 1288 b"unpruned common: %s\n"
1289 1289 % b" ".join(sorted(short(n) for n in common))
1290 1290 )
1291 1291
1292 1292 clnode = repo.changelog.node
1293 1293 common = repo.revs(b'heads(::%ln)', common)
1294 1294 common = {clnode(r) for r in common}
1295 1295 return common, hds
1296 1296
1297 1297 else:
1298 1298
1299 1299 def doit(pushedrevs, remoteheads, remote=remote):
1300 1300 nodes = None
1301 1301 if pushedrevs:
1302 1302 revs = logcmdutil.revrange(repo, pushedrevs)
1303 1303 nodes = [repo[r].node() for r in revs]
1304 1304 common, any, hds = setdiscovery.findcommonheads(
1305 1305 ui,
1306 1306 repo,
1307 1307 remote,
1308 1308 ancestorsof=nodes,
1309 1309 audit=data,
1310 1310 abortwhenunrelated=False,
1311 1311 )
1312 1312 return common, hds
1313 1313
1314 1314 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1315 1315 localrevs = opts[b'rev']
1316 1316
1317 1317 fm = ui.formatter(b'debugdiscovery', opts)
1318 1318 if fm.strict_format:
1319 1319
1320 1320 @contextlib.contextmanager
1321 1321 def may_capture_output():
1322 1322 ui.pushbuffer()
1323 1323 yield
1324 1324 data[b'output'] = ui.popbuffer()
1325 1325
1326 1326 else:
1327 1327 may_capture_output = util.nullcontextmanager
1328 1328 with may_capture_output():
1329 1329 with util.timedcm('debug-discovery') as t:
1330 1330 common, hds = doit(localrevs, remoterevs)
1331 1331
1332 1332 # compute all statistics
1333 if len(common) == 1 and repo.nullid in common:
1334 common = set()
1333 1335 heads_common = set(common)
1334 1336 heads_remote = set(hds)
1335 1337 heads_local = set(repo.heads())
1336 1338 # note: they cannot be a local or remote head that is in common and not
1337 1339 # itself a head of common.
1338 1340 heads_common_local = heads_common & heads_local
1339 1341 heads_common_remote = heads_common & heads_remote
1340 1342 heads_common_both = heads_common & heads_remote & heads_local
1341 1343
1342 1344 all = repo.revs(b'all()')
1343 1345 common = repo.revs(b'::%ln', common)
1344 1346 roots_common = repo.revs(b'roots(::%ld)', common)
1345 1347 missing = repo.revs(b'not ::%ld', common)
1346 1348 heads_missing = repo.revs(b'heads(%ld)', missing)
1347 1349 roots_missing = repo.revs(b'roots(%ld)', missing)
1348 1350 assert len(common) + len(missing) == len(all)
1349 1351
1350 1352 initial_undecided = repo.revs(
1351 1353 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1352 1354 )
1353 1355 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1354 1356 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1355 1357 common_initial_undecided = initial_undecided & common
1356 1358 missing_initial_undecided = initial_undecided & missing
1357 1359
1358 1360 data[b'elapsed'] = t.elapsed
1359 1361 data[b'nb-common-heads'] = len(heads_common)
1360 1362 data[b'nb-common-heads-local'] = len(heads_common_local)
1361 1363 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1362 1364 data[b'nb-common-heads-both'] = len(heads_common_both)
1363 1365 data[b'nb-common-roots'] = len(roots_common)
1364 1366 data[b'nb-head-local'] = len(heads_local)
1365 1367 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1366 1368 data[b'nb-head-remote'] = len(heads_remote)
1367 1369 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1368 1370 heads_common_remote
1369 1371 )
1370 1372 data[b'nb-revs'] = len(all)
1371 1373 data[b'nb-revs-common'] = len(common)
1372 1374 data[b'nb-revs-missing'] = len(missing)
1373 1375 data[b'nb-missing-heads'] = len(heads_missing)
1374 1376 data[b'nb-missing-roots'] = len(roots_missing)
1375 1377 data[b'nb-ini_und'] = len(initial_undecided)
1376 1378 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1377 1379 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1378 1380 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1379 1381 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1380 1382
1381 1383 fm.startitem()
1382 1384 fm.data(**pycompat.strkwargs(data))
1383 1385 # display discovery summary
1384 1386 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1385 1387 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1386 1388 if b'total-round-trips-heads' in data:
1387 1389 fm.plain(
1388 1390 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1389 1391 )
1390 1392 if b'total-round-trips-branches' in data:
1391 1393 fm.plain(
1392 1394 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1393 1395 % data
1394 1396 )
1395 1397 if b'total-round-trips-between' in data:
1396 1398 fm.plain(
1397 1399 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1398 1400 )
1399 1401 fm.plain(b"queries: %(total-queries)9d\n" % data)
1400 1402 if b'total-queries-branches' in data:
1401 1403 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1402 1404 if b'total-queries-between' in data:
1403 1405 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1404 1406 fm.plain(b"heads summary:\n")
1405 1407 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1406 1408 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1407 1409 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1408 1410 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1409 1411 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1410 1412 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1411 1413 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1412 1414 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1413 1415 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1414 1416 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1415 1417 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1416 1418 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1417 1419 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1418 1420 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1419 1421 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1420 1422 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1421 1423 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1422 1424 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1423 1425 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1424 1426 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1425 1427 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1426 1428 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1427 1429
1428 1430 if ui.verbose:
1429 1431 fm.plain(
1430 1432 b"common heads: %s\n"
1431 1433 % b" ".join(sorted(short(n) for n in heads_common))
1432 1434 )
1433 1435 fm.end()
1434 1436
1435 1437
1436 1438 _chunksize = 4 << 10
1437 1439
1438 1440
1439 1441 @command(
1440 1442 b'debugdownload',
1441 1443 [
1442 1444 (b'o', b'output', b'', _(b'path')),
1443 1445 ],
1444 1446 optionalrepo=True,
1445 1447 )
1446 1448 def debugdownload(ui, repo, url, output=None, **opts):
1447 1449 """download a resource using Mercurial logic and config"""
1448 1450 fh = urlmod.open(ui, url, output)
1449 1451
1450 1452 dest = ui
1451 1453 if output:
1452 1454 dest = open(output, b"wb", _chunksize)
1453 1455 try:
1454 1456 data = fh.read(_chunksize)
1455 1457 while data:
1456 1458 dest.write(data)
1457 1459 data = fh.read(_chunksize)
1458 1460 finally:
1459 1461 if output:
1460 1462 dest.close()
1461 1463
1462 1464
1463 1465 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1464 1466 def debugextensions(ui, repo, **opts):
1465 1467 '''show information about active extensions'''
1466 1468 opts = pycompat.byteskwargs(opts)
1467 1469 exts = extensions.extensions(ui)
1468 1470 hgver = util.version()
1469 1471 fm = ui.formatter(b'debugextensions', opts)
1470 1472 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1471 1473 isinternal = extensions.ismoduleinternal(extmod)
1472 1474 extsource = None
1473 1475
1474 1476 if util.safehasattr(extmod, '__file__'):
1475 1477 extsource = pycompat.fsencode(extmod.__file__)
1476 1478 elif getattr(sys, 'oxidized', False):
1477 1479 extsource = pycompat.sysexecutable
1478 1480 if isinternal:
1479 1481 exttestedwith = [] # never expose magic string to users
1480 1482 else:
1481 1483 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1482 1484 extbuglink = getattr(extmod, 'buglink', None)
1483 1485
1484 1486 fm.startitem()
1485 1487
1486 1488 if ui.quiet or ui.verbose:
1487 1489 fm.write(b'name', b'%s\n', extname)
1488 1490 else:
1489 1491 fm.write(b'name', b'%s', extname)
1490 1492 if isinternal or hgver in exttestedwith:
1491 1493 fm.plain(b'\n')
1492 1494 elif not exttestedwith:
1493 1495 fm.plain(_(b' (untested!)\n'))
1494 1496 else:
1495 1497 lasttestedversion = exttestedwith[-1]
1496 1498 fm.plain(b' (%s!)\n' % lasttestedversion)
1497 1499
1498 1500 fm.condwrite(
1499 1501 ui.verbose and extsource,
1500 1502 b'source',
1501 1503 _(b' location: %s\n'),
1502 1504 extsource or b"",
1503 1505 )
1504 1506
1505 1507 if ui.verbose:
1506 1508 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1507 1509 fm.data(bundled=isinternal)
1508 1510
1509 1511 fm.condwrite(
1510 1512 ui.verbose and exttestedwith,
1511 1513 b'testedwith',
1512 1514 _(b' tested with: %s\n'),
1513 1515 fm.formatlist(exttestedwith, name=b'ver'),
1514 1516 )
1515 1517
1516 1518 fm.condwrite(
1517 1519 ui.verbose and extbuglink,
1518 1520 b'buglink',
1519 1521 _(b' bug reporting: %s\n'),
1520 1522 extbuglink or b"",
1521 1523 )
1522 1524
1523 1525 fm.end()
1524 1526
1525 1527
1526 1528 @command(
1527 1529 b'debugfileset',
1528 1530 [
1529 1531 (
1530 1532 b'r',
1531 1533 b'rev',
1532 1534 b'',
1533 1535 _(b'apply the filespec on this revision'),
1534 1536 _(b'REV'),
1535 1537 ),
1536 1538 (
1537 1539 b'',
1538 1540 b'all-files',
1539 1541 False,
1540 1542 _(b'test files from all revisions and working directory'),
1541 1543 ),
1542 1544 (
1543 1545 b's',
1544 1546 b'show-matcher',
1545 1547 None,
1546 1548 _(b'print internal representation of matcher'),
1547 1549 ),
1548 1550 (
1549 1551 b'p',
1550 1552 b'show-stage',
1551 1553 [],
1552 1554 _(b'print parsed tree at the given stage'),
1553 1555 _(b'NAME'),
1554 1556 ),
1555 1557 ],
1556 1558 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1557 1559 )
1558 1560 def debugfileset(ui, repo, expr, **opts):
1559 1561 '''parse and apply a fileset specification'''
1560 1562 from . import fileset
1561 1563
1562 1564 fileset.symbols # force import of fileset so we have predicates to optimize
1563 1565 opts = pycompat.byteskwargs(opts)
1564 1566 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1565 1567
1566 1568 stages = [
1567 1569 (b'parsed', pycompat.identity),
1568 1570 (b'analyzed', filesetlang.analyze),
1569 1571 (b'optimized', filesetlang.optimize),
1570 1572 ]
1571 1573 stagenames = {n for n, f in stages}
1572 1574
1573 1575 showalways = set()
1574 1576 if ui.verbose and not opts[b'show_stage']:
1575 1577 # show parsed tree by --verbose (deprecated)
1576 1578 showalways.add(b'parsed')
1577 1579 if opts[b'show_stage'] == [b'all']:
1578 1580 showalways.update(stagenames)
1579 1581 else:
1580 1582 for n in opts[b'show_stage']:
1581 1583 if n not in stagenames:
1582 1584 raise error.Abort(_(b'invalid stage name: %s') % n)
1583 1585 showalways.update(opts[b'show_stage'])
1584 1586
1585 1587 tree = filesetlang.parse(expr)
1586 1588 for n, f in stages:
1587 1589 tree = f(tree)
1588 1590 if n in showalways:
1589 1591 if opts[b'show_stage'] or n != b'parsed':
1590 1592 ui.write(b"* %s:\n" % n)
1591 1593 ui.write(filesetlang.prettyformat(tree), b"\n")
1592 1594
1593 1595 files = set()
1594 1596 if opts[b'all_files']:
1595 1597 for r in repo:
1596 1598 c = repo[r]
1597 1599 files.update(c.files())
1598 1600 files.update(c.substate)
1599 1601 if opts[b'all_files'] or ctx.rev() is None:
1600 1602 wctx = repo[None]
1601 1603 files.update(
1602 1604 repo.dirstate.walk(
1603 1605 scmutil.matchall(repo),
1604 1606 subrepos=list(wctx.substate),
1605 1607 unknown=True,
1606 1608 ignored=True,
1607 1609 )
1608 1610 )
1609 1611 files.update(wctx.substate)
1610 1612 else:
1611 1613 files.update(ctx.files())
1612 1614 files.update(ctx.substate)
1613 1615
1614 1616 m = ctx.matchfileset(repo.getcwd(), expr)
1615 1617 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1616 1618 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1617 1619 for f in sorted(files):
1618 1620 if not m(f):
1619 1621 continue
1620 1622 ui.write(b"%s\n" % f)
1621 1623
1622 1624
1623 1625 @command(
1624 1626 b"debug-repair-issue6528",
1625 1627 [
1626 1628 (
1627 1629 b'',
1628 1630 b'to-report',
1629 1631 b'',
1630 1632 _(b'build a report of affected revisions to this file'),
1631 1633 _(b'FILE'),
1632 1634 ),
1633 1635 (
1634 1636 b'',
1635 1637 b'from-report',
1636 1638 b'',
1637 1639 _(b'repair revisions listed in this report file'),
1638 1640 _(b'FILE'),
1639 1641 ),
1640 1642 (
1641 1643 b'',
1642 1644 b'paranoid',
1643 1645 False,
1644 1646 _(b'check that both detection methods do the same thing'),
1645 1647 ),
1646 1648 ]
1647 1649 + cmdutil.dryrunopts,
1648 1650 )
1649 1651 def debug_repair_issue6528(ui, repo, **opts):
1650 1652 """find affected revisions and repair them. See issue6528 for more details.
1651 1653
1652 1654 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1653 1655 computation of affected revisions for a given repository across clones.
1654 1656 The report format is line-based (with empty lines ignored):
1655 1657
1656 1658 ```
1657 1659 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1658 1660 ```
1659 1661
1660 1662 There can be multiple broken revisions per filelog, they are separated by
1661 1663 a comma with no spaces. The only space is between the revision(s) and the
1662 1664 filename.
1663 1665
1664 1666 Note that this does *not* mean that this repairs future affected revisions,
1665 1667 that needs a separate fix at the exchange level that was introduced in
1666 1668 Mercurial 5.9.1.
1667 1669
1668 1670 There is a `--paranoid` flag to test that the fast implementation is correct
1669 1671 by checking it against the slow implementation. Since this matter is quite
1670 1672 urgent and testing every edge-case is probably quite costly, we use this
1671 1673 method to test on large repositories as a fuzzing method of sorts.
1672 1674 """
1673 1675 cmdutil.check_incompatible_arguments(
1674 1676 opts, 'to_report', ['from_report', 'dry_run']
1675 1677 )
1676 1678 dry_run = opts.get('dry_run')
1677 1679 to_report = opts.get('to_report')
1678 1680 from_report = opts.get('from_report')
1679 1681 paranoid = opts.get('paranoid')
1680 1682 # TODO maybe add filelog pattern and revision pattern parameters to help
1681 1683 # narrow down the search for users that know what they're looking for?
1682 1684
1683 1685 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1684 1686 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1685 1687 raise error.Abort(_(msg))
1686 1688
1687 1689 rewrite.repair_issue6528(
1688 1690 ui,
1689 1691 repo,
1690 1692 dry_run=dry_run,
1691 1693 to_report=to_report,
1692 1694 from_report=from_report,
1693 1695 paranoid=paranoid,
1694 1696 )
1695 1697
1696 1698
1697 1699 @command(b'debugformat', [] + cmdutil.formatteropts)
1698 1700 def debugformat(ui, repo, **opts):
1699 1701 """display format information about the current repository
1700 1702
1701 1703 Use --verbose to get extra information about current config value and
1702 1704 Mercurial default."""
1703 1705 opts = pycompat.byteskwargs(opts)
1704 1706 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1705 1707 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1706 1708
1707 1709 def makeformatname(name):
1708 1710 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1709 1711
1710 1712 fm = ui.formatter(b'debugformat', opts)
1711 1713 if fm.isplain():
1712 1714
1713 1715 def formatvalue(value):
1714 1716 if util.safehasattr(value, b'startswith'):
1715 1717 return value
1716 1718 if value:
1717 1719 return b'yes'
1718 1720 else:
1719 1721 return b'no'
1720 1722
1721 1723 else:
1722 1724 formatvalue = pycompat.identity
1723 1725
1724 1726 fm.plain(b'format-variant')
1725 1727 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1726 1728 fm.plain(b' repo')
1727 1729 if ui.verbose:
1728 1730 fm.plain(b' config default')
1729 1731 fm.plain(b'\n')
1730 1732 for fv in upgrade.allformatvariant:
1731 1733 fm.startitem()
1732 1734 repovalue = fv.fromrepo(repo)
1733 1735 configvalue = fv.fromconfig(repo)
1734 1736
1735 1737 if repovalue != configvalue:
1736 1738 namelabel = b'formatvariant.name.mismatchconfig'
1737 1739 repolabel = b'formatvariant.repo.mismatchconfig'
1738 1740 elif repovalue != fv.default:
1739 1741 namelabel = b'formatvariant.name.mismatchdefault'
1740 1742 repolabel = b'formatvariant.repo.mismatchdefault'
1741 1743 else:
1742 1744 namelabel = b'formatvariant.name.uptodate'
1743 1745 repolabel = b'formatvariant.repo.uptodate'
1744 1746
1745 1747 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1746 1748 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1747 1749 if fv.default != configvalue:
1748 1750 configlabel = b'formatvariant.config.special'
1749 1751 else:
1750 1752 configlabel = b'formatvariant.config.default'
1751 1753 fm.condwrite(
1752 1754 ui.verbose,
1753 1755 b'config',
1754 1756 b' %6s',
1755 1757 formatvalue(configvalue),
1756 1758 label=configlabel,
1757 1759 )
1758 1760 fm.condwrite(
1759 1761 ui.verbose,
1760 1762 b'default',
1761 1763 b' %7s',
1762 1764 formatvalue(fv.default),
1763 1765 label=b'formatvariant.default',
1764 1766 )
1765 1767 fm.plain(b'\n')
1766 1768 fm.end()
1767 1769
1768 1770
1769 1771 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1770 1772 def debugfsinfo(ui, path=b"."):
1771 1773 """show information detected about current filesystem"""
1772 1774 ui.writenoi18n(b'path: %s\n' % path)
1773 1775 ui.writenoi18n(
1774 1776 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1775 1777 )
1776 1778 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1777 1779 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1778 1780 ui.writenoi18n(
1779 1781 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1780 1782 )
1781 1783 ui.writenoi18n(
1782 1784 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1783 1785 )
1784 1786 casesensitive = b'(unknown)'
1785 1787 try:
1786 1788 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1787 1789 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1788 1790 except OSError:
1789 1791 pass
1790 1792 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1791 1793
1792 1794
1793 1795 @command(
1794 1796 b'debuggetbundle',
1795 1797 [
1796 1798 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1797 1799 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1798 1800 (
1799 1801 b't',
1800 1802 b'type',
1801 1803 b'bzip2',
1802 1804 _(b'bundle compression type to use'),
1803 1805 _(b'TYPE'),
1804 1806 ),
1805 1807 ],
1806 1808 _(b'REPO FILE [-H|-C ID]...'),
1807 1809 norepo=True,
1808 1810 )
1809 1811 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1810 1812 """retrieves a bundle from a repo
1811 1813
1812 1814 Every ID must be a full-length hex node id string. Saves the bundle to the
1813 1815 given file.
1814 1816 """
1815 1817 opts = pycompat.byteskwargs(opts)
1816 1818 repo = hg.peer(ui, opts, repopath)
1817 1819 if not repo.capable(b'getbundle'):
1818 1820 raise error.Abort(b"getbundle() not supported by target repository")
1819 1821 args = {}
1820 1822 if common:
1821 1823 args['common'] = [bin(s) for s in common]
1822 1824 if head:
1823 1825 args['heads'] = [bin(s) for s in head]
1824 1826 # TODO: get desired bundlecaps from command line.
1825 1827 args['bundlecaps'] = None
1826 1828 bundle = repo.getbundle(b'debug', **args)
1827 1829
1828 1830 bundletype = opts.get(b'type', b'bzip2').lower()
1829 1831 btypes = {
1830 1832 b'none': b'HG10UN',
1831 1833 b'bzip2': b'HG10BZ',
1832 1834 b'gzip': b'HG10GZ',
1833 1835 b'bundle2': b'HG20',
1834 1836 }
1835 1837 bundletype = btypes.get(bundletype)
1836 1838 if bundletype not in bundle2.bundletypes:
1837 1839 raise error.Abort(_(b'unknown bundle type specified with --type'))
1838 1840 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1839 1841
1840 1842
1841 1843 @command(b'debugignore', [], b'[FILE]')
1842 1844 def debugignore(ui, repo, *files, **opts):
1843 1845 """display the combined ignore pattern and information about ignored files
1844 1846
1845 1847 With no argument display the combined ignore pattern.
1846 1848
1847 1849 Given space separated file names, shows if the given file is ignored and
1848 1850 if so, show the ignore rule (file and line number) that matched it.
1849 1851 """
1850 1852 ignore = repo.dirstate._ignore
1851 1853 if not files:
1852 1854 # Show all the patterns
1853 1855 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1854 1856 else:
1855 1857 m = scmutil.match(repo[None], pats=files)
1856 1858 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1857 1859 for f in m.files():
1858 1860 nf = util.normpath(f)
1859 1861 ignored = None
1860 1862 ignoredata = None
1861 1863 if nf != b'.':
1862 1864 if ignore(nf):
1863 1865 ignored = nf
1864 1866 ignoredata = repo.dirstate._ignorefileandline(nf)
1865 1867 else:
1866 1868 for p in pathutil.finddirs(nf):
1867 1869 if ignore(p):
1868 1870 ignored = p
1869 1871 ignoredata = repo.dirstate._ignorefileandline(p)
1870 1872 break
1871 1873 if ignored:
1872 1874 if ignored == nf:
1873 1875 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1874 1876 else:
1875 1877 ui.write(
1876 1878 _(
1877 1879 b"%s is ignored because of "
1878 1880 b"containing directory %s\n"
1879 1881 )
1880 1882 % (uipathfn(f), ignored)
1881 1883 )
1882 1884 ignorefile, lineno, line = ignoredata
1883 1885 ui.write(
1884 1886 _(b"(ignore rule in %s, line %d: '%s')\n")
1885 1887 % (ignorefile, lineno, line)
1886 1888 )
1887 1889 else:
1888 1890 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1889 1891
1890 1892
1891 1893 @command(
1892 1894 b'debug-revlog-index|debugindex',
1893 1895 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1894 1896 _(b'-c|-m|FILE'),
1895 1897 )
1896 1898 def debugindex(ui, repo, file_=None, **opts):
1897 1899 """dump index data for a revlog"""
1898 1900 opts = pycompat.byteskwargs(opts)
1899 1901 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1900 1902
1901 1903 fm = ui.formatter(b'debugindex', opts)
1902 1904
1903 1905 revlog = getattr(store, b'_revlog', store)
1904 1906
1905 1907 return revlog_debug.debug_index(
1906 1908 ui,
1907 1909 repo,
1908 1910 formatter=fm,
1909 1911 revlog=revlog,
1910 1912 full_node=ui.debugflag,
1911 1913 )
1912 1914
1913 1915
1914 1916 @command(
1915 1917 b'debugindexdot',
1916 1918 cmdutil.debugrevlogopts,
1917 1919 _(b'-c|-m|FILE'),
1918 1920 optionalrepo=True,
1919 1921 )
1920 1922 def debugindexdot(ui, repo, file_=None, **opts):
1921 1923 """dump an index DAG as a graphviz dot file"""
1922 1924 opts = pycompat.byteskwargs(opts)
1923 1925 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1924 1926 ui.writenoi18n(b"digraph G {\n")
1925 1927 for i in r:
1926 1928 node = r.node(i)
1927 1929 pp = r.parents(node)
1928 1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1929 1931 if pp[1] != repo.nullid:
1930 1932 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1931 1933 ui.write(b"}\n")
1932 1934
1933 1935
1934 1936 @command(b'debugindexstats', [])
1935 1937 def debugindexstats(ui, repo):
1936 1938 """show stats related to the changelog index"""
1937 1939 repo.changelog.shortest(repo.nullid, 1)
1938 1940 index = repo.changelog.index
1939 1941 if not util.safehasattr(index, b'stats'):
1940 1942 raise error.Abort(_(b'debugindexstats only works with native code'))
1941 1943 for k, v in sorted(index.stats().items()):
1942 1944 ui.write(b'%s: %d\n' % (k, v))
1943 1945
1944 1946
1945 1947 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1946 1948 def debuginstall(ui, **opts):
1947 1949 """test Mercurial installation
1948 1950
1949 1951 Returns 0 on success.
1950 1952 """
1951 1953 opts = pycompat.byteskwargs(opts)
1952 1954
1953 1955 problems = 0
1954 1956
1955 1957 fm = ui.formatter(b'debuginstall', opts)
1956 1958 fm.startitem()
1957 1959
1958 1960 # encoding might be unknown or wrong. don't translate these messages.
1959 1961 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1960 1962 err = None
1961 1963 try:
1962 1964 codecs.lookup(pycompat.sysstr(encoding.encoding))
1963 1965 except LookupError as inst:
1964 1966 err = stringutil.forcebytestr(inst)
1965 1967 problems += 1
1966 1968 fm.condwrite(
1967 1969 err,
1968 1970 b'encodingerror',
1969 1971 b" %s\n (check that your locale is properly set)\n",
1970 1972 err,
1971 1973 )
1972 1974
1973 1975 # Python
1974 1976 pythonlib = None
1975 1977 if util.safehasattr(os, '__file__'):
1976 1978 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1977 1979 elif getattr(sys, 'oxidized', False):
1978 1980 pythonlib = pycompat.sysexecutable
1979 1981
1980 1982 fm.write(
1981 1983 b'pythonexe',
1982 1984 _(b"checking Python executable (%s)\n"),
1983 1985 pycompat.sysexecutable or _(b"unknown"),
1984 1986 )
1985 1987 fm.write(
1986 1988 b'pythonimplementation',
1987 1989 _(b"checking Python implementation (%s)\n"),
1988 1990 pycompat.sysbytes(platform.python_implementation()),
1989 1991 )
1990 1992 fm.write(
1991 1993 b'pythonver',
1992 1994 _(b"checking Python version (%s)\n"),
1993 1995 (b"%d.%d.%d" % sys.version_info[:3]),
1994 1996 )
1995 1997 fm.write(
1996 1998 b'pythonlib',
1997 1999 _(b"checking Python lib (%s)...\n"),
1998 2000 pythonlib or _(b"unknown"),
1999 2001 )
2000 2002
2001 2003 try:
2002 2004 from . import rustext # pytype: disable=import-error
2003 2005
2004 2006 rustext.__doc__ # trigger lazy import
2005 2007 except ImportError:
2006 2008 rustext = None
2007 2009
2008 2010 security = set(sslutil.supportedprotocols)
2009 2011 if sslutil.hassni:
2010 2012 security.add(b'sni')
2011 2013
2012 2014 fm.write(
2013 2015 b'pythonsecurity',
2014 2016 _(b"checking Python security support (%s)\n"),
2015 2017 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2016 2018 )
2017 2019
2018 2020 # These are warnings, not errors. So don't increment problem count. This
2019 2021 # may change in the future.
2020 2022 if b'tls1.2' not in security:
2021 2023 fm.plain(
2022 2024 _(
2023 2025 b' TLS 1.2 not supported by Python install; '
2024 2026 b'network connections lack modern security\n'
2025 2027 )
2026 2028 )
2027 2029 if b'sni' not in security:
2028 2030 fm.plain(
2029 2031 _(
2030 2032 b' SNI not supported by Python install; may have '
2031 2033 b'connectivity issues with some servers\n'
2032 2034 )
2033 2035 )
2034 2036
2035 2037 fm.plain(
2036 2038 _(
2037 2039 b"checking Rust extensions (%s)\n"
2038 2040 % (b'missing' if rustext is None else b'installed')
2039 2041 ),
2040 2042 )
2041 2043
2042 2044 # TODO print CA cert info
2043 2045
2044 2046 # hg version
2045 2047 hgver = util.version()
2046 2048 fm.write(
2047 2049 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2048 2050 )
2049 2051 fm.write(
2050 2052 b'hgverextra',
2051 2053 _(b"checking Mercurial custom build (%s)\n"),
2052 2054 b'+'.join(hgver.split(b'+')[1:]),
2053 2055 )
2054 2056
2055 2057 # compiled modules
2056 2058 hgmodules = None
2057 2059 if util.safehasattr(sys.modules[__name__], '__file__'):
2058 2060 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2059 2061 elif getattr(sys, 'oxidized', False):
2060 2062 hgmodules = pycompat.sysexecutable
2061 2063
2062 2064 fm.write(
2063 2065 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2064 2066 )
2065 2067 fm.write(
2066 2068 b'hgmodules',
2067 2069 _(b"checking installed modules (%s)...\n"),
2068 2070 hgmodules or _(b"unknown"),
2069 2071 )
2070 2072
2071 2073 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2072 2074 rustext = rustandc # for now, that's the only case
2073 2075 cext = policy.policy in (b'c', b'allow') or rustandc
2074 2076 nopure = cext or rustext
2075 2077 if nopure:
2076 2078 err = None
2077 2079 try:
2078 2080 if cext:
2079 2081 from .cext import ( # pytype: disable=import-error
2080 2082 base85,
2081 2083 bdiff,
2082 2084 mpatch,
2083 2085 osutil,
2084 2086 )
2085 2087
2086 2088 # quiet pyflakes
2087 2089 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2088 2090 if rustext:
2089 2091 from .rustext import ( # pytype: disable=import-error
2090 2092 ancestor,
2091 2093 dirstate,
2092 2094 )
2093 2095
2094 2096 dir(ancestor), dir(dirstate) # quiet pyflakes
2095 2097 except Exception as inst:
2096 2098 err = stringutil.forcebytestr(inst)
2097 2099 problems += 1
2098 2100 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2099 2101
2100 2102 compengines = util.compengines._engines.values()
2101 2103 fm.write(
2102 2104 b'compengines',
2103 2105 _(b'checking registered compression engines (%s)\n'),
2104 2106 fm.formatlist(
2105 2107 sorted(e.name() for e in compengines),
2106 2108 name=b'compengine',
2107 2109 fmt=b'%s',
2108 2110 sep=b', ',
2109 2111 ),
2110 2112 )
2111 2113 fm.write(
2112 2114 b'compenginesavail',
2113 2115 _(b'checking available compression engines (%s)\n'),
2114 2116 fm.formatlist(
2115 2117 sorted(e.name() for e in compengines if e.available()),
2116 2118 name=b'compengine',
2117 2119 fmt=b'%s',
2118 2120 sep=b', ',
2119 2121 ),
2120 2122 )
2121 2123 wirecompengines = compression.compengines.supportedwireengines(
2122 2124 compression.SERVERROLE
2123 2125 )
2124 2126 fm.write(
2125 2127 b'compenginesserver',
2126 2128 _(
2127 2129 b'checking available compression engines '
2128 2130 b'for wire protocol (%s)\n'
2129 2131 ),
2130 2132 fm.formatlist(
2131 2133 [e.name() for e in wirecompengines if e.wireprotosupport()],
2132 2134 name=b'compengine',
2133 2135 fmt=b'%s',
2134 2136 sep=b', ',
2135 2137 ),
2136 2138 )
2137 2139 re2 = b'missing'
2138 2140 if util._re2:
2139 2141 re2 = b'available'
2140 2142 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2141 2143 fm.data(re2=bool(util._re2))
2142 2144
2143 2145 # templates
2144 2146 p = templater.templatedir()
2145 2147 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2146 2148 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2147 2149 if p:
2148 2150 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2149 2151 if m:
2150 2152 # template found, check if it is working
2151 2153 err = None
2152 2154 try:
2153 2155 templater.templater.frommapfile(m)
2154 2156 except Exception as inst:
2155 2157 err = stringutil.forcebytestr(inst)
2156 2158 p = None
2157 2159 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2158 2160 else:
2159 2161 p = None
2160 2162 fm.condwrite(
2161 2163 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2162 2164 )
2163 2165 fm.condwrite(
2164 2166 not m,
2165 2167 b'defaulttemplatenotfound',
2166 2168 _(b" template '%s' not found\n"),
2167 2169 b"default",
2168 2170 )
2169 2171 if not p:
2170 2172 problems += 1
2171 2173 fm.condwrite(
2172 2174 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2173 2175 )
2174 2176
2175 2177 # editor
2176 2178 editor = ui.geteditor()
2177 2179 editor = util.expandpath(editor)
2178 2180 editorbin = procutil.shellsplit(editor)[0]
2179 2181 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2180 2182 cmdpath = procutil.findexe(editorbin)
2181 2183 fm.condwrite(
2182 2184 not cmdpath and editor == b'vi',
2183 2185 b'vinotfound',
2184 2186 _(
2185 2187 b" No commit editor set and can't find %s in PATH\n"
2186 2188 b" (specify a commit editor in your configuration"
2187 2189 b" file)\n"
2188 2190 ),
2189 2191 not cmdpath and editor == b'vi' and editorbin,
2190 2192 )
2191 2193 fm.condwrite(
2192 2194 not cmdpath and editor != b'vi',
2193 2195 b'editornotfound',
2194 2196 _(
2195 2197 b" Can't find editor '%s' in PATH\n"
2196 2198 b" (specify a commit editor in your configuration"
2197 2199 b" file)\n"
2198 2200 ),
2199 2201 not cmdpath and editorbin,
2200 2202 )
2201 2203 if not cmdpath and editor != b'vi':
2202 2204 problems += 1
2203 2205
2204 2206 # check username
2205 2207 username = None
2206 2208 err = None
2207 2209 try:
2208 2210 username = ui.username()
2209 2211 except error.Abort as e:
2210 2212 err = e.message
2211 2213 problems += 1
2212 2214
2213 2215 fm.condwrite(
2214 2216 username, b'username', _(b"checking username (%s)\n"), username
2215 2217 )
2216 2218 fm.condwrite(
2217 2219 err,
2218 2220 b'usernameerror',
2219 2221 _(
2220 2222 b"checking username...\n %s\n"
2221 2223 b" (specify a username in your configuration file)\n"
2222 2224 ),
2223 2225 err,
2224 2226 )
2225 2227
2226 2228 for name, mod in extensions.extensions():
2227 2229 handler = getattr(mod, 'debuginstall', None)
2228 2230 if handler is not None:
2229 2231 problems += handler(ui, fm)
2230 2232
2231 2233 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2232 2234 if not problems:
2233 2235 fm.data(problems=problems)
2234 2236 fm.condwrite(
2235 2237 problems,
2236 2238 b'problems',
2237 2239 _(b"%d problems detected, please check your install!\n"),
2238 2240 problems,
2239 2241 )
2240 2242 fm.end()
2241 2243
2242 2244 return problems
2243 2245
2244 2246
2245 2247 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2246 2248 def debugknown(ui, repopath, *ids, **opts):
2247 2249 """test whether node ids are known to a repo
2248 2250
2249 2251 Every ID must be a full-length hex node id string. Returns a list of 0s
2250 2252 and 1s indicating unknown/known.
2251 2253 """
2252 2254 opts = pycompat.byteskwargs(opts)
2253 2255 repo = hg.peer(ui, opts, repopath)
2254 2256 if not repo.capable(b'known'):
2255 2257 raise error.Abort(b"known() not supported by target repository")
2256 2258 flags = repo.known([bin(s) for s in ids])
2257 2259 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2258 2260
2259 2261
2260 2262 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2261 2263 def debuglabelcomplete(ui, repo, *args):
2262 2264 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2263 2265 debugnamecomplete(ui, repo, *args)
2264 2266
2265 2267
2266 2268 @command(
2267 2269 b'debuglocks',
2268 2270 [
2269 2271 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2270 2272 (
2271 2273 b'W',
2272 2274 b'force-free-wlock',
2273 2275 None,
2274 2276 _(b'free the working state lock (DANGEROUS)'),
2275 2277 ),
2276 2278 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2277 2279 (
2278 2280 b'S',
2279 2281 b'set-wlock',
2280 2282 None,
2281 2283 _(b'set the working state lock until stopped'),
2282 2284 ),
2283 2285 ],
2284 2286 _(b'[OPTION]...'),
2285 2287 )
2286 2288 def debuglocks(ui, repo, **opts):
2287 2289 """show or modify state of locks
2288 2290
2289 2291 By default, this command will show which locks are held. This
2290 2292 includes the user and process holding the lock, the amount of time
2291 2293 the lock has been held, and the machine name where the process is
2292 2294 running if it's not local.
2293 2295
2294 2296 Locks protect the integrity of Mercurial's data, so should be
2295 2297 treated with care. System crashes or other interruptions may cause
2296 2298 locks to not be properly released, though Mercurial will usually
2297 2299 detect and remove such stale locks automatically.
2298 2300
2299 2301 However, detecting stale locks may not always be possible (for
2300 2302 instance, on a shared filesystem). Removing locks may also be
2301 2303 blocked by filesystem permissions.
2302 2304
2303 2305 Setting a lock will prevent other commands from changing the data.
2304 2306 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2305 2307 The set locks are removed when the command exits.
2306 2308
2307 2309 Returns 0 if no locks are held.
2308 2310
2309 2311 """
2310 2312
2311 2313 if opts.get('force_free_lock'):
2312 2314 repo.svfs.tryunlink(b'lock')
2313 2315 if opts.get('force_free_wlock'):
2314 2316 repo.vfs.tryunlink(b'wlock')
2315 2317 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2316 2318 return 0
2317 2319
2318 2320 locks = []
2319 2321 try:
2320 2322 if opts.get('set_wlock'):
2321 2323 try:
2322 2324 locks.append(repo.wlock(False))
2323 2325 except error.LockHeld:
2324 2326 raise error.Abort(_(b'wlock is already held'))
2325 2327 if opts.get('set_lock'):
2326 2328 try:
2327 2329 locks.append(repo.lock(False))
2328 2330 except error.LockHeld:
2329 2331 raise error.Abort(_(b'lock is already held'))
2330 2332 if len(locks):
2331 2333 try:
2332 2334 if ui.interactive():
2333 2335 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2334 2336 ui.promptchoice(prompt)
2335 2337 else:
2336 2338 msg = b"%d locks held, waiting for signal\n"
2337 2339 msg %= len(locks)
2338 2340 ui.status(msg)
2339 2341 while True: # XXX wait for a signal
2340 2342 time.sleep(0.1)
2341 2343 except KeyboardInterrupt:
2342 2344 msg = b"signal-received releasing locks\n"
2343 2345 ui.status(msg)
2344 2346 return 0
2345 2347 finally:
2346 2348 release(*locks)
2347 2349
2348 2350 now = time.time()
2349 2351 held = 0
2350 2352
2351 2353 def report(vfs, name, method):
2352 2354 # this causes stale locks to get reaped for more accurate reporting
2353 2355 try:
2354 2356 l = method(False)
2355 2357 except error.LockHeld:
2356 2358 l = None
2357 2359
2358 2360 if l:
2359 2361 l.release()
2360 2362 else:
2361 2363 try:
2362 2364 st = vfs.lstat(name)
2363 2365 age = now - st[stat.ST_MTIME]
2364 2366 user = util.username(st.st_uid)
2365 2367 locker = vfs.readlock(name)
2366 2368 if b":" in locker:
2367 2369 host, pid = locker.split(b':')
2368 2370 if host == socket.gethostname():
2369 2371 locker = b'user %s, process %s' % (user or b'None', pid)
2370 2372 else:
2371 2373 locker = b'user %s, process %s, host %s' % (
2372 2374 user or b'None',
2373 2375 pid,
2374 2376 host,
2375 2377 )
2376 2378 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2377 2379 return 1
2378 2380 except FileNotFoundError:
2379 2381 pass
2380 2382
2381 2383 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2382 2384 return 0
2383 2385
2384 2386 held += report(repo.svfs, b"lock", repo.lock)
2385 2387 held += report(repo.vfs, b"wlock", repo.wlock)
2386 2388
2387 2389 return held
2388 2390
2389 2391
2390 2392 @command(
2391 2393 b'debugmanifestfulltextcache',
2392 2394 [
2393 2395 (b'', b'clear', False, _(b'clear the cache')),
2394 2396 (
2395 2397 b'a',
2396 2398 b'add',
2397 2399 [],
2398 2400 _(b'add the given manifest nodes to the cache'),
2399 2401 _(b'NODE'),
2400 2402 ),
2401 2403 ],
2402 2404 b'',
2403 2405 )
2404 2406 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2405 2407 """show, clear or amend the contents of the manifest fulltext cache"""
2406 2408
2407 2409 def getcache():
2408 2410 r = repo.manifestlog.getstorage(b'')
2409 2411 try:
2410 2412 return r._fulltextcache
2411 2413 except AttributeError:
2412 2414 msg = _(
2413 2415 b"Current revlog implementation doesn't appear to have a "
2414 2416 b"manifest fulltext cache\n"
2415 2417 )
2416 2418 raise error.Abort(msg)
2417 2419
2418 2420 if opts.get('clear'):
2419 2421 with repo.wlock():
2420 2422 cache = getcache()
2421 2423 cache.clear(clear_persisted_data=True)
2422 2424 return
2423 2425
2424 2426 if add:
2425 2427 with repo.wlock():
2426 2428 m = repo.manifestlog
2427 2429 store = m.getstorage(b'')
2428 2430 for n in add:
2429 2431 try:
2430 2432 manifest = m[store.lookup(n)]
2431 2433 except error.LookupError as e:
2432 2434 raise error.Abort(
2433 2435 bytes(e), hint=b"Check your manifest node id"
2434 2436 )
2435 2437 manifest.read() # stores revisision in cache too
2436 2438 return
2437 2439
2438 2440 cache = getcache()
2439 2441 if not len(cache):
2440 2442 ui.write(_(b'cache empty\n'))
2441 2443 else:
2442 2444 ui.write(
2443 2445 _(
2444 2446 b'cache contains %d manifest entries, in order of most to '
2445 2447 b'least recent:\n'
2446 2448 )
2447 2449 % (len(cache),)
2448 2450 )
2449 2451 totalsize = 0
2450 2452 for nodeid in cache:
2451 2453 # Use cache.get to not update the LRU order
2452 2454 data = cache.peek(nodeid)
2453 2455 size = len(data)
2454 2456 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2455 2457 ui.write(
2456 2458 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2457 2459 )
2458 2460 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2459 2461 ui.write(
2460 2462 _(b'total cache data size %s, on-disk %s\n')
2461 2463 % (util.bytecount(totalsize), util.bytecount(ondisk))
2462 2464 )
2463 2465
2464 2466
2465 2467 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2466 2468 def debugmergestate(ui, repo, *args, **opts):
2467 2469 """print merge state
2468 2470
2469 2471 Use --verbose to print out information about whether v1 or v2 merge state
2470 2472 was chosen."""
2471 2473
2472 2474 if ui.verbose:
2473 2475 ms = mergestatemod.mergestate(repo)
2474 2476
2475 2477 # sort so that reasonable information is on top
2476 2478 v1records = ms._readrecordsv1()
2477 2479 v2records = ms._readrecordsv2()
2478 2480
2479 2481 if not v1records and not v2records:
2480 2482 pass
2481 2483 elif not v2records:
2482 2484 ui.writenoi18n(b'no version 2 merge state\n')
2483 2485 elif ms._v1v2match(v1records, v2records):
2484 2486 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2485 2487 else:
2486 2488 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2487 2489
2488 2490 opts = pycompat.byteskwargs(opts)
2489 2491 if not opts[b'template']:
2490 2492 opts[b'template'] = (
2491 2493 b'{if(commits, "", "no merge state found\n")}'
2492 2494 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2493 2495 b'{files % "file: {path} (state \\"{state}\\")\n'
2494 2496 b'{if(local_path, "'
2495 2497 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2496 2498 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2497 2499 b' other path: {other_path} (node {other_node})\n'
2498 2500 b'")}'
2499 2501 b'{if(rename_side, "'
2500 2502 b' rename side: {rename_side}\n'
2501 2503 b' renamed path: {renamed_path}\n'
2502 2504 b'")}'
2503 2505 b'{extras % " extra: {key} = {value}\n"}'
2504 2506 b'"}'
2505 2507 b'{extras % "extra: {file} ({key} = {value})\n"}'
2506 2508 )
2507 2509
2508 2510 ms = mergestatemod.mergestate.read(repo)
2509 2511
2510 2512 fm = ui.formatter(b'debugmergestate', opts)
2511 2513 fm.startitem()
2512 2514
2513 2515 fm_commits = fm.nested(b'commits')
2514 2516 if ms.active():
2515 2517 for name, node, label_index in (
2516 2518 (b'local', ms.local, 0),
2517 2519 (b'other', ms.other, 1),
2518 2520 ):
2519 2521 fm_commits.startitem()
2520 2522 fm_commits.data(name=name)
2521 2523 fm_commits.data(node=hex(node))
2522 2524 if ms._labels and len(ms._labels) > label_index:
2523 2525 fm_commits.data(label=ms._labels[label_index])
2524 2526 fm_commits.end()
2525 2527
2526 2528 fm_files = fm.nested(b'files')
2527 2529 if ms.active():
2528 2530 for f in ms:
2529 2531 fm_files.startitem()
2530 2532 fm_files.data(path=f)
2531 2533 state = ms._state[f]
2532 2534 fm_files.data(state=state[0])
2533 2535 if state[0] in (
2534 2536 mergestatemod.MERGE_RECORD_UNRESOLVED,
2535 2537 mergestatemod.MERGE_RECORD_RESOLVED,
2536 2538 ):
2537 2539 fm_files.data(local_key=state[1])
2538 2540 fm_files.data(local_path=state[2])
2539 2541 fm_files.data(ancestor_path=state[3])
2540 2542 fm_files.data(ancestor_node=state[4])
2541 2543 fm_files.data(other_path=state[5])
2542 2544 fm_files.data(other_node=state[6])
2543 2545 fm_files.data(local_flags=state[7])
2544 2546 elif state[0] in (
2545 2547 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2546 2548 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2547 2549 ):
2548 2550 fm_files.data(renamed_path=state[1])
2549 2551 fm_files.data(rename_side=state[2])
2550 2552 fm_extras = fm_files.nested(b'extras')
2551 2553 for k, v in sorted(ms.extras(f).items()):
2552 2554 fm_extras.startitem()
2553 2555 fm_extras.data(key=k)
2554 2556 fm_extras.data(value=v)
2555 2557 fm_extras.end()
2556 2558
2557 2559 fm_files.end()
2558 2560
2559 2561 fm_extras = fm.nested(b'extras')
2560 2562 for f, d in sorted(ms.allextras().items()):
2561 2563 if f in ms:
2562 2564 # If file is in mergestate, we have already processed it's extras
2563 2565 continue
2564 2566 for k, v in d.items():
2565 2567 fm_extras.startitem()
2566 2568 fm_extras.data(file=f)
2567 2569 fm_extras.data(key=k)
2568 2570 fm_extras.data(value=v)
2569 2571 fm_extras.end()
2570 2572
2571 2573 fm.end()
2572 2574
2573 2575
2574 2576 @command(b'debugnamecomplete', [], _(b'NAME...'))
2575 2577 def debugnamecomplete(ui, repo, *args):
2576 2578 '''complete "names" - tags, open branch names, bookmark names'''
2577 2579
2578 2580 names = set()
2579 2581 # since we previously only listed open branches, we will handle that
2580 2582 # specially (after this for loop)
2581 2583 for name, ns in repo.names.items():
2582 2584 if name != b'branches':
2583 2585 names.update(ns.listnames(repo))
2584 2586 names.update(
2585 2587 tag
2586 2588 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2587 2589 if not closed
2588 2590 )
2589 2591 completions = set()
2590 2592 if not args:
2591 2593 args = [b'']
2592 2594 for a in args:
2593 2595 completions.update(n for n in names if n.startswith(a))
2594 2596 ui.write(b'\n'.join(sorted(completions)))
2595 2597 ui.write(b'\n')
2596 2598
2597 2599
2598 2600 @command(
2599 2601 b'debugnodemap',
2600 2602 [
2601 2603 (
2602 2604 b'',
2603 2605 b'dump-new',
2604 2606 False,
2605 2607 _(b'write a (new) persistent binary nodemap on stdout'),
2606 2608 ),
2607 2609 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2608 2610 (
2609 2611 b'',
2610 2612 b'check',
2611 2613 False,
2612 2614 _(b'check that the data on disk data are correct.'),
2613 2615 ),
2614 2616 (
2615 2617 b'',
2616 2618 b'metadata',
2617 2619 False,
2618 2620 _(b'display the on disk meta data for the nodemap'),
2619 2621 ),
2620 2622 ],
2621 2623 )
2622 2624 def debugnodemap(ui, repo, **opts):
2623 2625 """write and inspect on disk nodemap"""
2624 2626 if opts['dump_new']:
2625 2627 unfi = repo.unfiltered()
2626 2628 cl = unfi.changelog
2627 2629 if util.safehasattr(cl.index, "nodemap_data_all"):
2628 2630 data = cl.index.nodemap_data_all()
2629 2631 else:
2630 2632 data = nodemap.persistent_data(cl.index)
2631 2633 ui.write(data)
2632 2634 elif opts['dump_disk']:
2633 2635 unfi = repo.unfiltered()
2634 2636 cl = unfi.changelog
2635 2637 nm_data = nodemap.persisted_data(cl)
2636 2638 if nm_data is not None:
2637 2639 docket, data = nm_data
2638 2640 ui.write(data[:])
2639 2641 elif opts['check']:
2640 2642 unfi = repo.unfiltered()
2641 2643 cl = unfi.changelog
2642 2644 nm_data = nodemap.persisted_data(cl)
2643 2645 if nm_data is not None:
2644 2646 docket, data = nm_data
2645 2647 return nodemap.check_data(ui, cl.index, data)
2646 2648 elif opts['metadata']:
2647 2649 unfi = repo.unfiltered()
2648 2650 cl = unfi.changelog
2649 2651 nm_data = nodemap.persisted_data(cl)
2650 2652 if nm_data is not None:
2651 2653 docket, data = nm_data
2652 2654 ui.write((b"uid: %s\n") % docket.uid)
2653 2655 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2654 2656 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2655 2657 ui.write((b"data-length: %d\n") % docket.data_length)
2656 2658 ui.write((b"data-unused: %d\n") % docket.data_unused)
2657 2659 unused_perc = docket.data_unused * 100.0 / docket.data_length
2658 2660 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2659 2661
2660 2662
2661 2663 @command(
2662 2664 b'debugobsolete',
2663 2665 [
2664 2666 (b'', b'flags', 0, _(b'markers flag')),
2665 2667 (
2666 2668 b'',
2667 2669 b'record-parents',
2668 2670 False,
2669 2671 _(b'record parent information for the precursor'),
2670 2672 ),
2671 2673 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2672 2674 (
2673 2675 b'',
2674 2676 b'exclusive',
2675 2677 False,
2676 2678 _(b'restrict display to markers only relevant to REV'),
2677 2679 ),
2678 2680 (b'', b'index', False, _(b'display index of the marker')),
2679 2681 (b'', b'delete', [], _(b'delete markers specified by indices')),
2680 2682 ]
2681 2683 + cmdutil.commitopts2
2682 2684 + cmdutil.formatteropts,
2683 2685 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2684 2686 )
2685 2687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2686 2688 """create arbitrary obsolete marker
2687 2689
2688 2690 With no arguments, displays the list of obsolescence markers."""
2689 2691
2690 2692 opts = pycompat.byteskwargs(opts)
2691 2693
2692 2694 def parsenodeid(s):
2693 2695 try:
2694 2696 # We do not use revsingle/revrange functions here to accept
2695 2697 # arbitrary node identifiers, possibly not present in the
2696 2698 # local repository.
2697 2699 n = bin(s)
2698 2700 if len(n) != repo.nodeconstants.nodelen:
2699 2701 raise ValueError
2700 2702 return n
2701 2703 except ValueError:
2702 2704 raise error.InputError(
2703 2705 b'changeset references must be full hexadecimal '
2704 2706 b'node identifiers'
2705 2707 )
2706 2708
2707 2709 if opts.get(b'delete'):
2708 2710 indices = []
2709 2711 for v in opts.get(b'delete'):
2710 2712 try:
2711 2713 indices.append(int(v))
2712 2714 except ValueError:
2713 2715 raise error.InputError(
2714 2716 _(b'invalid index value: %r') % v,
2715 2717 hint=_(b'use integers for indices'),
2716 2718 )
2717 2719
2718 2720 if repo.currenttransaction():
2719 2721 raise error.Abort(
2720 2722 _(b'cannot delete obsmarkers in the middle of transaction.')
2721 2723 )
2722 2724
2723 2725 with repo.lock():
2724 2726 n = repair.deleteobsmarkers(repo.obsstore, indices)
2725 2727 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2726 2728
2727 2729 return
2728 2730
2729 2731 if precursor is not None:
2730 2732 if opts[b'rev']:
2731 2733 raise error.InputError(
2732 2734 b'cannot select revision when creating marker'
2733 2735 )
2734 2736 metadata = {}
2735 2737 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2736 2738 succs = tuple(parsenodeid(succ) for succ in successors)
2737 2739 l = repo.lock()
2738 2740 try:
2739 2741 tr = repo.transaction(b'debugobsolete')
2740 2742 try:
2741 2743 date = opts.get(b'date')
2742 2744 if date:
2743 2745 date = dateutil.parsedate(date)
2744 2746 else:
2745 2747 date = None
2746 2748 prec = parsenodeid(precursor)
2747 2749 parents = None
2748 2750 if opts[b'record_parents']:
2749 2751 if prec not in repo.unfiltered():
2750 2752 raise error.Abort(
2751 2753 b'cannot used --record-parents on '
2752 2754 b'unknown changesets'
2753 2755 )
2754 2756 parents = repo.unfiltered()[prec].parents()
2755 2757 parents = tuple(p.node() for p in parents)
2756 2758 repo.obsstore.create(
2757 2759 tr,
2758 2760 prec,
2759 2761 succs,
2760 2762 opts[b'flags'],
2761 2763 parents=parents,
2762 2764 date=date,
2763 2765 metadata=metadata,
2764 2766 ui=ui,
2765 2767 )
2766 2768 tr.close()
2767 2769 except ValueError as exc:
2768 2770 raise error.Abort(
2769 2771 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2770 2772 )
2771 2773 finally:
2772 2774 tr.release()
2773 2775 finally:
2774 2776 l.release()
2775 2777 else:
2776 2778 if opts[b'rev']:
2777 2779 revs = logcmdutil.revrange(repo, opts[b'rev'])
2778 2780 nodes = [repo[r].node() for r in revs]
2779 2781 markers = list(
2780 2782 obsutil.getmarkers(
2781 2783 repo, nodes=nodes, exclusive=opts[b'exclusive']
2782 2784 )
2783 2785 )
2784 2786 markers.sort(key=lambda x: x._data)
2785 2787 else:
2786 2788 markers = obsutil.getmarkers(repo)
2787 2789
2788 2790 markerstoiter = markers
2789 2791 isrelevant = lambda m: True
2790 2792 if opts.get(b'rev') and opts.get(b'index'):
2791 2793 markerstoiter = obsutil.getmarkers(repo)
2792 2794 markerset = set(markers)
2793 2795 isrelevant = lambda m: m in markerset
2794 2796
2795 2797 fm = ui.formatter(b'debugobsolete', opts)
2796 2798 for i, m in enumerate(markerstoiter):
2797 2799 if not isrelevant(m):
2798 2800 # marker can be irrelevant when we're iterating over a set
2799 2801 # of markers (markerstoiter) which is bigger than the set
2800 2802 # of markers we want to display (markers)
2801 2803 # this can happen if both --index and --rev options are
2802 2804 # provided and thus we need to iterate over all of the markers
2803 2805 # to get the correct indices, but only display the ones that
2804 2806 # are relevant to --rev value
2805 2807 continue
2806 2808 fm.startitem()
2807 2809 ind = i if opts.get(b'index') else None
2808 2810 cmdutil.showmarker(fm, m, index=ind)
2809 2811 fm.end()
2810 2812
2811 2813
2812 2814 @command(
2813 2815 b'debugp1copies',
2814 2816 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2815 2817 _(b'[-r REV]'),
2816 2818 )
2817 2819 def debugp1copies(ui, repo, **opts):
2818 2820 """dump copy information compared to p1"""
2819 2821
2820 2822 opts = pycompat.byteskwargs(opts)
2821 2823 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2822 2824 for dst, src in ctx.p1copies().items():
2823 2825 ui.write(b'%s -> %s\n' % (src, dst))
2824 2826
2825 2827
2826 2828 @command(
2827 2829 b'debugp2copies',
2828 2830 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2829 2831 _(b'[-r REV]'),
2830 2832 )
2831 2833 def debugp2copies(ui, repo, **opts):
2832 2834 """dump copy information compared to p2"""
2833 2835
2834 2836 opts = pycompat.byteskwargs(opts)
2835 2837 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2836 2838 for dst, src in ctx.p2copies().items():
2837 2839 ui.write(b'%s -> %s\n' % (src, dst))
2838 2840
2839 2841
2840 2842 @command(
2841 2843 b'debugpathcomplete',
2842 2844 [
2843 2845 (b'f', b'full', None, _(b'complete an entire path')),
2844 2846 (b'n', b'normal', None, _(b'show only normal files')),
2845 2847 (b'a', b'added', None, _(b'show only added files')),
2846 2848 (b'r', b'removed', None, _(b'show only removed files')),
2847 2849 ],
2848 2850 _(b'FILESPEC...'),
2849 2851 )
2850 2852 def debugpathcomplete(ui, repo, *specs, **opts):
2851 2853 """complete part or all of a tracked path
2852 2854
2853 2855 This command supports shells that offer path name completion. It
2854 2856 currently completes only files already known to the dirstate.
2855 2857
2856 2858 Completion extends only to the next path segment unless
2857 2859 --full is specified, in which case entire paths are used."""
2858 2860
2859 2861 def complete(path, acceptable):
2860 2862 dirstate = repo.dirstate
2861 2863 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2862 2864 rootdir = repo.root + pycompat.ossep
2863 2865 if spec != repo.root and not spec.startswith(rootdir):
2864 2866 return [], []
2865 2867 if os.path.isdir(spec):
2866 2868 spec += b'/'
2867 2869 spec = spec[len(rootdir) :]
2868 2870 fixpaths = pycompat.ossep != b'/'
2869 2871 if fixpaths:
2870 2872 spec = spec.replace(pycompat.ossep, b'/')
2871 2873 speclen = len(spec)
2872 2874 fullpaths = opts['full']
2873 2875 files, dirs = set(), set()
2874 2876 adddir, addfile = dirs.add, files.add
2875 2877 for f, st in dirstate.items():
2876 2878 if f.startswith(spec) and st.state in acceptable:
2877 2879 if fixpaths:
2878 2880 f = f.replace(b'/', pycompat.ossep)
2879 2881 if fullpaths:
2880 2882 addfile(f)
2881 2883 continue
2882 2884 s = f.find(pycompat.ossep, speclen)
2883 2885 if s >= 0:
2884 2886 adddir(f[:s])
2885 2887 else:
2886 2888 addfile(f)
2887 2889 return files, dirs
2888 2890
2889 2891 acceptable = b''
2890 2892 if opts['normal']:
2891 2893 acceptable += b'nm'
2892 2894 if opts['added']:
2893 2895 acceptable += b'a'
2894 2896 if opts['removed']:
2895 2897 acceptable += b'r'
2896 2898 cwd = repo.getcwd()
2897 2899 if not specs:
2898 2900 specs = [b'.']
2899 2901
2900 2902 files, dirs = set(), set()
2901 2903 for spec in specs:
2902 2904 f, d = complete(spec, acceptable or b'nmar')
2903 2905 files.update(f)
2904 2906 dirs.update(d)
2905 2907 files.update(dirs)
2906 2908 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2907 2909 ui.write(b'\n')
2908 2910
2909 2911
2910 2912 @command(
2911 2913 b'debugpathcopies',
2912 2914 cmdutil.walkopts,
2913 2915 b'hg debugpathcopies REV1 REV2 [FILE]',
2914 2916 inferrepo=True,
2915 2917 )
2916 2918 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2917 2919 """show copies between two revisions"""
2918 2920 ctx1 = scmutil.revsingle(repo, rev1)
2919 2921 ctx2 = scmutil.revsingle(repo, rev2)
2920 2922 m = scmutil.match(ctx1, pats, opts)
2921 2923 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2922 2924 ui.write(b'%s -> %s\n' % (src, dst))
2923 2925
2924 2926
2925 2927 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2926 2928 def debugpeer(ui, path):
2927 2929 """establish a connection to a peer repository"""
2928 2930 # Always enable peer request logging. Requires --debug to display
2929 2931 # though.
2930 2932 overrides = {
2931 2933 (b'devel', b'debug.peer-request'): True,
2932 2934 }
2933 2935
2934 2936 with ui.configoverride(overrides):
2935 2937 peer = hg.peer(ui, {}, path)
2936 2938
2937 2939 try:
2938 2940 local = peer.local() is not None
2939 2941 canpush = peer.canpush()
2940 2942
2941 2943 ui.write(_(b'url: %s\n') % peer.url())
2942 2944 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2943 2945 ui.write(
2944 2946 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2945 2947 )
2946 2948 finally:
2947 2949 peer.close()
2948 2950
2949 2951
2950 2952 @command(
2951 2953 b'debugpickmergetool',
2952 2954 [
2953 2955 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2954 2956 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2955 2957 ]
2956 2958 + cmdutil.walkopts
2957 2959 + cmdutil.mergetoolopts,
2958 2960 _(b'[PATTERN]...'),
2959 2961 inferrepo=True,
2960 2962 )
2961 2963 def debugpickmergetool(ui, repo, *pats, **opts):
2962 2964 """examine which merge tool is chosen for specified file
2963 2965
2964 2966 As described in :hg:`help merge-tools`, Mercurial examines
2965 2967 configurations below in this order to decide which merge tool is
2966 2968 chosen for specified file.
2967 2969
2968 2970 1. ``--tool`` option
2969 2971 2. ``HGMERGE`` environment variable
2970 2972 3. configurations in ``merge-patterns`` section
2971 2973 4. configuration of ``ui.merge``
2972 2974 5. configurations in ``merge-tools`` section
2973 2975 6. ``hgmerge`` tool (for historical reason only)
2974 2976 7. default tool for fallback (``:merge`` or ``:prompt``)
2975 2977
2976 2978 This command writes out examination result in the style below::
2977 2979
2978 2980 FILE = MERGETOOL
2979 2981
2980 2982 By default, all files known in the first parent context of the
2981 2983 working directory are examined. Use file patterns and/or -I/-X
2982 2984 options to limit target files. -r/--rev is also useful to examine
2983 2985 files in another context without actual updating to it.
2984 2986
2985 2987 With --debug, this command shows warning messages while matching
2986 2988 against ``merge-patterns`` and so on, too. It is recommended to
2987 2989 use this option with explicit file patterns and/or -I/-X options,
2988 2990 because this option increases amount of output per file according
2989 2991 to configurations in hgrc.
2990 2992
2991 2993 With -v/--verbose, this command shows configurations below at
2992 2994 first (only if specified).
2993 2995
2994 2996 - ``--tool`` option
2995 2997 - ``HGMERGE`` environment variable
2996 2998 - configuration of ``ui.merge``
2997 2999
2998 3000 If merge tool is chosen before matching against
2999 3001 ``merge-patterns``, this command can't show any helpful
3000 3002 information, even with --debug. In such case, information above is
3001 3003 useful to know why a merge tool is chosen.
3002 3004 """
3003 3005 opts = pycompat.byteskwargs(opts)
3004 3006 overrides = {}
3005 3007 if opts[b'tool']:
3006 3008 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3007 3009 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3008 3010
3009 3011 with ui.configoverride(overrides, b'debugmergepatterns'):
3010 3012 hgmerge = encoding.environ.get(b"HGMERGE")
3011 3013 if hgmerge is not None:
3012 3014 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3013 3015 uimerge = ui.config(b"ui", b"merge")
3014 3016 if uimerge:
3015 3017 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3016 3018
3017 3019 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3018 3020 m = scmutil.match(ctx, pats, opts)
3019 3021 changedelete = opts[b'changedelete']
3020 3022 for path in ctx.walk(m):
3021 3023 fctx = ctx[path]
3022 3024 with ui.silent(
3023 3025 error=True
3024 3026 ) if not ui.debugflag else util.nullcontextmanager():
3025 3027 tool, toolpath = filemerge._picktool(
3026 3028 repo,
3027 3029 ui,
3028 3030 path,
3029 3031 fctx.isbinary(),
3030 3032 b'l' in fctx.flags(),
3031 3033 changedelete,
3032 3034 )
3033 3035 ui.write(b'%s = %s\n' % (path, tool))
3034 3036
3035 3037
3036 3038 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3037 3039 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3038 3040 """access the pushkey key/value protocol
3039 3041
3040 3042 With two args, list the keys in the given namespace.
3041 3043
3042 3044 With five args, set a key to new if it currently is set to old.
3043 3045 Reports success or failure.
3044 3046 """
3045 3047
3046 3048 target = hg.peer(ui, {}, repopath)
3047 3049 try:
3048 3050 if keyinfo:
3049 3051 key, old, new = keyinfo
3050 3052 with target.commandexecutor() as e:
3051 3053 r = e.callcommand(
3052 3054 b'pushkey',
3053 3055 {
3054 3056 b'namespace': namespace,
3055 3057 b'key': key,
3056 3058 b'old': old,
3057 3059 b'new': new,
3058 3060 },
3059 3061 ).result()
3060 3062
3061 3063 ui.status(pycompat.bytestr(r) + b'\n')
3062 3064 return not r
3063 3065 else:
3064 3066 for k, v in sorted(target.listkeys(namespace).items()):
3065 3067 ui.write(
3066 3068 b"%s\t%s\n"
3067 3069 % (stringutil.escapestr(k), stringutil.escapestr(v))
3068 3070 )
3069 3071 finally:
3070 3072 target.close()
3071 3073
3072 3074
3073 3075 @command(b'debugpvec', [], _(b'A B'))
3074 3076 def debugpvec(ui, repo, a, b=None):
3075 3077 ca = scmutil.revsingle(repo, a)
3076 3078 cb = scmutil.revsingle(repo, b)
3077 3079 pa = pvec.ctxpvec(ca)
3078 3080 pb = pvec.ctxpvec(cb)
3079 3081 if pa == pb:
3080 3082 rel = b"="
3081 3083 elif pa > pb:
3082 3084 rel = b">"
3083 3085 elif pa < pb:
3084 3086 rel = b"<"
3085 3087 elif pa | pb:
3086 3088 rel = b"|"
3087 3089 ui.write(_(b"a: %s\n") % pa)
3088 3090 ui.write(_(b"b: %s\n") % pb)
3089 3091 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3090 3092 ui.write(
3091 3093 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3092 3094 % (
3093 3095 abs(pa._depth - pb._depth),
3094 3096 pvec._hamming(pa._vec, pb._vec),
3095 3097 pa.distance(pb),
3096 3098 rel,
3097 3099 )
3098 3100 )
3099 3101
3100 3102
3101 3103 @command(
3102 3104 b'debugrebuilddirstate|debugrebuildstate',
3103 3105 [
3104 3106 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3105 3107 (
3106 3108 b'',
3107 3109 b'minimal',
3108 3110 None,
3109 3111 _(
3110 3112 b'only rebuild files that are inconsistent with '
3111 3113 b'the working copy parent'
3112 3114 ),
3113 3115 ),
3114 3116 ],
3115 3117 _(b'[-r REV]'),
3116 3118 )
3117 3119 def debugrebuilddirstate(ui, repo, rev, **opts):
3118 3120 """rebuild the dirstate as it would look like for the given revision
3119 3121
3120 3122 If no revision is specified the first current parent will be used.
3121 3123
3122 3124 The dirstate will be set to the files of the given revision.
3123 3125 The actual working directory content or existing dirstate
3124 3126 information such as adds or removes is not considered.
3125 3127
3126 3128 ``minimal`` will only rebuild the dirstate status for files that claim to be
3127 3129 tracked but are not in the parent manifest, or that exist in the parent
3128 3130 manifest but are not in the dirstate. It will not change adds, removes, or
3129 3131 modified files that are in the working copy parent.
3130 3132
3131 3133 One use of this command is to make the next :hg:`status` invocation
3132 3134 check the actual file content.
3133 3135 """
3134 3136 ctx = scmutil.revsingle(repo, rev)
3135 3137 with repo.wlock():
3136 3138 dirstate = repo.dirstate
3137 3139 changedfiles = None
3138 3140 # See command doc for what minimal does.
3139 3141 if opts.get('minimal'):
3140 3142 manifestfiles = set(ctx.manifest().keys())
3141 3143 dirstatefiles = set(dirstate)
3142 3144 manifestonly = manifestfiles - dirstatefiles
3143 3145 dsonly = dirstatefiles - manifestfiles
3144 3146 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3145 3147 changedfiles = manifestonly | dsnotadded
3146 3148
3147 3149 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3148 3150
3149 3151
3150 3152 @command(
3151 3153 b'debugrebuildfncache',
3152 3154 [
3153 3155 (
3154 3156 b'',
3155 3157 b'only-data',
3156 3158 False,
3157 3159 _(b'only look for wrong .d files (much faster)'),
3158 3160 )
3159 3161 ],
3160 3162 b'',
3161 3163 )
3162 3164 def debugrebuildfncache(ui, repo, **opts):
3163 3165 """rebuild the fncache file"""
3164 3166 opts = pycompat.byteskwargs(opts)
3165 3167 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3166 3168
3167 3169
3168 3170 @command(
3169 3171 b'debugrename',
3170 3172 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3171 3173 _(b'[-r REV] [FILE]...'),
3172 3174 )
3173 3175 def debugrename(ui, repo, *pats, **opts):
3174 3176 """dump rename information"""
3175 3177
3176 3178 opts = pycompat.byteskwargs(opts)
3177 3179 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3178 3180 m = scmutil.match(ctx, pats, opts)
3179 3181 for abs in ctx.walk(m):
3180 3182 fctx = ctx[abs]
3181 3183 o = fctx.filelog().renamed(fctx.filenode())
3182 3184 rel = repo.pathto(abs)
3183 3185 if o:
3184 3186 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3185 3187 else:
3186 3188 ui.write(_(b"%s not renamed\n") % rel)
3187 3189
3188 3190
3189 3191 @command(b'debugrequires|debugrequirements', [], b'')
3190 3192 def debugrequirements(ui, repo):
3191 3193 """print the current repo requirements"""
3192 3194 for r in sorted(repo.requirements):
3193 3195 ui.write(b"%s\n" % r)
3194 3196
3195 3197
3196 3198 @command(
3197 3199 b'debugrevlog',
3198 3200 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3199 3201 _(b'-c|-m|FILE'),
3200 3202 optionalrepo=True,
3201 3203 )
3202 3204 def debugrevlog(ui, repo, file_=None, **opts):
3203 3205 """show data and statistics about a revlog"""
3204 3206 opts = pycompat.byteskwargs(opts)
3205 3207 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3206 3208
3207 3209 if opts.get(b"dump"):
3208 3210 numrevs = len(r)
3209 3211 ui.write(
3210 3212 (
3211 3213 b"# rev p1rev p2rev start end deltastart base p1 p2"
3212 3214 b" rawsize totalsize compression heads chainlen\n"
3213 3215 )
3214 3216 )
3215 3217 ts = 0
3216 3218 heads = set()
3217 3219
3218 3220 for rev in range(numrevs):
3219 3221 dbase = r.deltaparent(rev)
3220 3222 if dbase == -1:
3221 3223 dbase = rev
3222 3224 cbase = r.chainbase(rev)
3223 3225 clen = r.chainlen(rev)
3224 3226 p1, p2 = r.parentrevs(rev)
3225 3227 rs = r.rawsize(rev)
3226 3228 ts = ts + rs
3227 3229 heads -= set(r.parentrevs(rev))
3228 3230 heads.add(rev)
3229 3231 try:
3230 3232 compression = ts / r.end(rev)
3231 3233 except ZeroDivisionError:
3232 3234 compression = 0
3233 3235 ui.write(
3234 3236 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3235 3237 b"%11d %5d %8d\n"
3236 3238 % (
3237 3239 rev,
3238 3240 p1,
3239 3241 p2,
3240 3242 r.start(rev),
3241 3243 r.end(rev),
3242 3244 r.start(dbase),
3243 3245 r.start(cbase),
3244 3246 r.start(p1),
3245 3247 r.start(p2),
3246 3248 rs,
3247 3249 ts,
3248 3250 compression,
3249 3251 len(heads),
3250 3252 clen,
3251 3253 )
3252 3254 )
3253 3255 return 0
3254 3256
3255 3257 format = r._format_version
3256 3258 v = r._format_flags
3257 3259 flags = []
3258 3260 gdelta = False
3259 3261 if v & revlog.FLAG_INLINE_DATA:
3260 3262 flags.append(b'inline')
3261 3263 if v & revlog.FLAG_GENERALDELTA:
3262 3264 gdelta = True
3263 3265 flags.append(b'generaldelta')
3264 3266 if not flags:
3265 3267 flags = [b'(none)']
3266 3268
3267 3269 ### tracks merge vs single parent
3268 3270 nummerges = 0
3269 3271
3270 3272 ### tracks ways the "delta" are build
3271 3273 # nodelta
3272 3274 numempty = 0
3273 3275 numemptytext = 0
3274 3276 numemptydelta = 0
3275 3277 # full file content
3276 3278 numfull = 0
3277 3279 # intermediate snapshot against a prior snapshot
3278 3280 numsemi = 0
3279 3281 # snapshot count per depth
3280 3282 numsnapdepth = collections.defaultdict(lambda: 0)
3281 3283 # delta against previous revision
3282 3284 numprev = 0
3283 3285 # delta against first or second parent (not prev)
3284 3286 nump1 = 0
3285 3287 nump2 = 0
3286 3288 # delta against neither prev nor parents
3287 3289 numother = 0
3288 3290 # delta against prev that are also first or second parent
3289 3291 # (details of `numprev`)
3290 3292 nump1prev = 0
3291 3293 nump2prev = 0
3292 3294
3293 3295 # data about delta chain of each revs
3294 3296 chainlengths = []
3295 3297 chainbases = []
3296 3298 chainspans = []
3297 3299
3298 3300 # data about each revision
3299 3301 datasize = [None, 0, 0]
3300 3302 fullsize = [None, 0, 0]
3301 3303 semisize = [None, 0, 0]
3302 3304 # snapshot count per depth
3303 3305 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3304 3306 deltasize = [None, 0, 0]
3305 3307 chunktypecounts = {}
3306 3308 chunktypesizes = {}
3307 3309
3308 3310 def addsize(size, l):
3309 3311 if l[0] is None or size < l[0]:
3310 3312 l[0] = size
3311 3313 if size > l[1]:
3312 3314 l[1] = size
3313 3315 l[2] += size
3314 3316
3315 3317 numrevs = len(r)
3316 3318 for rev in range(numrevs):
3317 3319 p1, p2 = r.parentrevs(rev)
3318 3320 delta = r.deltaparent(rev)
3319 3321 if format > 0:
3320 3322 addsize(r.rawsize(rev), datasize)
3321 3323 if p2 != nullrev:
3322 3324 nummerges += 1
3323 3325 size = r.length(rev)
3324 3326 if delta == nullrev:
3325 3327 chainlengths.append(0)
3326 3328 chainbases.append(r.start(rev))
3327 3329 chainspans.append(size)
3328 3330 if size == 0:
3329 3331 numempty += 1
3330 3332 numemptytext += 1
3331 3333 else:
3332 3334 numfull += 1
3333 3335 numsnapdepth[0] += 1
3334 3336 addsize(size, fullsize)
3335 3337 addsize(size, snapsizedepth[0])
3336 3338 else:
3337 3339 chainlengths.append(chainlengths[delta] + 1)
3338 3340 baseaddr = chainbases[delta]
3339 3341 revaddr = r.start(rev)
3340 3342 chainbases.append(baseaddr)
3341 3343 chainspans.append((revaddr - baseaddr) + size)
3342 3344 if size == 0:
3343 3345 numempty += 1
3344 3346 numemptydelta += 1
3345 3347 elif r.issnapshot(rev):
3346 3348 addsize(size, semisize)
3347 3349 numsemi += 1
3348 3350 depth = r.snapshotdepth(rev)
3349 3351 numsnapdepth[depth] += 1
3350 3352 addsize(size, snapsizedepth[depth])
3351 3353 else:
3352 3354 addsize(size, deltasize)
3353 3355 if delta == rev - 1:
3354 3356 numprev += 1
3355 3357 if delta == p1:
3356 3358 nump1prev += 1
3357 3359 elif delta == p2:
3358 3360 nump2prev += 1
3359 3361 elif delta == p1:
3360 3362 nump1 += 1
3361 3363 elif delta == p2:
3362 3364 nump2 += 1
3363 3365 elif delta != nullrev:
3364 3366 numother += 1
3365 3367
3366 3368 # Obtain data on the raw chunks in the revlog.
3367 3369 if util.safehasattr(r, b'_getsegmentforrevs'):
3368 3370 segment = r._getsegmentforrevs(rev, rev)[1]
3369 3371 else:
3370 3372 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3371 3373 if segment:
3372 3374 chunktype = bytes(segment[0:1])
3373 3375 else:
3374 3376 chunktype = b'empty'
3375 3377
3376 3378 if chunktype not in chunktypecounts:
3377 3379 chunktypecounts[chunktype] = 0
3378 3380 chunktypesizes[chunktype] = 0
3379 3381
3380 3382 chunktypecounts[chunktype] += 1
3381 3383 chunktypesizes[chunktype] += size
3382 3384
3383 3385 # Adjust size min value for empty cases
3384 3386 for size in (datasize, fullsize, semisize, deltasize):
3385 3387 if size[0] is None:
3386 3388 size[0] = 0
3387 3389
3388 3390 numdeltas = numrevs - numfull - numempty - numsemi
3389 3391 numoprev = numprev - nump1prev - nump2prev
3390 3392 totalrawsize = datasize[2]
3391 3393 datasize[2] /= numrevs
3392 3394 fulltotal = fullsize[2]
3393 3395 if numfull == 0:
3394 3396 fullsize[2] = 0
3395 3397 else:
3396 3398 fullsize[2] /= numfull
3397 3399 semitotal = semisize[2]
3398 3400 snaptotal = {}
3399 3401 if numsemi > 0:
3400 3402 semisize[2] /= numsemi
3401 3403 for depth in snapsizedepth:
3402 3404 snaptotal[depth] = snapsizedepth[depth][2]
3403 3405 snapsizedepth[depth][2] /= numsnapdepth[depth]
3404 3406
3405 3407 deltatotal = deltasize[2]
3406 3408 if numdeltas > 0:
3407 3409 deltasize[2] /= numdeltas
3408 3410 totalsize = fulltotal + semitotal + deltatotal
3409 3411 avgchainlen = sum(chainlengths) / numrevs
3410 3412 maxchainlen = max(chainlengths)
3411 3413 maxchainspan = max(chainspans)
3412 3414 compratio = 1
3413 3415 if totalsize:
3414 3416 compratio = totalrawsize / totalsize
3415 3417
3416 3418 basedfmtstr = b'%%%dd\n'
3417 3419 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3418 3420
3419 3421 def dfmtstr(max):
3420 3422 return basedfmtstr % len(str(max))
3421 3423
3422 3424 def pcfmtstr(max, padding=0):
3423 3425 return basepcfmtstr % (len(str(max)), b' ' * padding)
3424 3426
3425 3427 def pcfmt(value, total):
3426 3428 if total:
3427 3429 return (value, 100 * float(value) / total)
3428 3430 else:
3429 3431 return value, 100.0
3430 3432
3431 3433 ui.writenoi18n(b'format : %d\n' % format)
3432 3434 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3433 3435
3434 3436 ui.write(b'\n')
3435 3437 fmt = pcfmtstr(totalsize)
3436 3438 fmt2 = dfmtstr(totalsize)
3437 3439 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3438 3440 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3439 3441 ui.writenoi18n(
3440 3442 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3441 3443 )
3442 3444 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3443 3445 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3444 3446 ui.writenoi18n(
3445 3447 b' text : '
3446 3448 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3447 3449 )
3448 3450 ui.writenoi18n(
3449 3451 b' delta : '
3450 3452 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3451 3453 )
3452 3454 ui.writenoi18n(
3453 3455 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3454 3456 )
3455 3457 for depth in sorted(numsnapdepth):
3456 3458 ui.write(
3457 3459 (b' lvl-%-3d : ' % depth)
3458 3460 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3459 3461 )
3460 3462 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3461 3463 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3462 3464 ui.writenoi18n(
3463 3465 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3464 3466 )
3465 3467 for depth in sorted(numsnapdepth):
3466 3468 ui.write(
3467 3469 (b' lvl-%-3d : ' % depth)
3468 3470 + fmt % pcfmt(snaptotal[depth], totalsize)
3469 3471 )
3470 3472 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3471 3473
3472 3474 def fmtchunktype(chunktype):
3473 3475 if chunktype == b'empty':
3474 3476 return b' %s : ' % chunktype
3475 3477 elif chunktype in pycompat.bytestr(string.ascii_letters):
3476 3478 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3477 3479 else:
3478 3480 return b' 0x%s : ' % hex(chunktype)
3479 3481
3480 3482 ui.write(b'\n')
3481 3483 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3482 3484 for chunktype in sorted(chunktypecounts):
3483 3485 ui.write(fmtchunktype(chunktype))
3484 3486 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3485 3487 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3486 3488 for chunktype in sorted(chunktypecounts):
3487 3489 ui.write(fmtchunktype(chunktype))
3488 3490 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3489 3491
3490 3492 ui.write(b'\n')
3491 3493 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3492 3494 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3493 3495 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3494 3496 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3495 3497 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3496 3498
3497 3499 if format > 0:
3498 3500 ui.write(b'\n')
3499 3501 ui.writenoi18n(
3500 3502 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3501 3503 % tuple(datasize)
3502 3504 )
3503 3505 ui.writenoi18n(
3504 3506 b'full revision size (min/max/avg) : %d / %d / %d\n'
3505 3507 % tuple(fullsize)
3506 3508 )
3507 3509 ui.writenoi18n(
3508 3510 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3509 3511 % tuple(semisize)
3510 3512 )
3511 3513 for depth in sorted(snapsizedepth):
3512 3514 if depth == 0:
3513 3515 continue
3514 3516 ui.writenoi18n(
3515 3517 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3516 3518 % ((depth,) + tuple(snapsizedepth[depth]))
3517 3519 )
3518 3520 ui.writenoi18n(
3519 3521 b'delta size (min/max/avg) : %d / %d / %d\n'
3520 3522 % tuple(deltasize)
3521 3523 )
3522 3524
3523 3525 if numdeltas > 0:
3524 3526 ui.write(b'\n')
3525 3527 fmt = pcfmtstr(numdeltas)
3526 3528 fmt2 = pcfmtstr(numdeltas, 4)
3527 3529 ui.writenoi18n(
3528 3530 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3529 3531 )
3530 3532 if numprev > 0:
3531 3533 ui.writenoi18n(
3532 3534 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3533 3535 )
3534 3536 ui.writenoi18n(
3535 3537 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3536 3538 )
3537 3539 ui.writenoi18n(
3538 3540 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3539 3541 )
3540 3542 if gdelta:
3541 3543 ui.writenoi18n(
3542 3544 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3543 3545 )
3544 3546 ui.writenoi18n(
3545 3547 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3546 3548 )
3547 3549 ui.writenoi18n(
3548 3550 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3549 3551 )
3550 3552
3551 3553
3552 3554 @command(
3553 3555 b'debugrevlogindex',
3554 3556 cmdutil.debugrevlogopts
3555 3557 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3556 3558 _(b'[-f FORMAT] -c|-m|FILE'),
3557 3559 optionalrepo=True,
3558 3560 )
3559 3561 def debugrevlogindex(ui, repo, file_=None, **opts):
3560 3562 """dump the contents of a revlog index"""
3561 3563 opts = pycompat.byteskwargs(opts)
3562 3564 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3563 3565 format = opts.get(b'format', 0)
3564 3566 if format not in (0, 1):
3565 3567 raise error.Abort(_(b"unknown format %d") % format)
3566 3568
3567 3569 if ui.debugflag:
3568 3570 shortfn = hex
3569 3571 else:
3570 3572 shortfn = short
3571 3573
3572 3574 # There might not be anything in r, so have a sane default
3573 3575 idlen = 12
3574 3576 for i in r:
3575 3577 idlen = len(shortfn(r.node(i)))
3576 3578 break
3577 3579
3578 3580 if format == 0:
3579 3581 if ui.verbose:
3580 3582 ui.writenoi18n(
3581 3583 b" rev offset length linkrev %s %s p2\n"
3582 3584 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3583 3585 )
3584 3586 else:
3585 3587 ui.writenoi18n(
3586 3588 b" rev linkrev %s %s p2\n"
3587 3589 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3588 3590 )
3589 3591 elif format == 1:
3590 3592 if ui.verbose:
3591 3593 ui.writenoi18n(
3592 3594 (
3593 3595 b" rev flag offset length size link p1"
3594 3596 b" p2 %s\n"
3595 3597 )
3596 3598 % b"nodeid".rjust(idlen)
3597 3599 )
3598 3600 else:
3599 3601 ui.writenoi18n(
3600 3602 b" rev flag size link p1 p2 %s\n"
3601 3603 % b"nodeid".rjust(idlen)
3602 3604 )
3603 3605
3604 3606 for i in r:
3605 3607 node = r.node(i)
3606 3608 if format == 0:
3607 3609 try:
3608 3610 pp = r.parents(node)
3609 3611 except Exception:
3610 3612 pp = [repo.nullid, repo.nullid]
3611 3613 if ui.verbose:
3612 3614 ui.write(
3613 3615 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3614 3616 % (
3615 3617 i,
3616 3618 r.start(i),
3617 3619 r.length(i),
3618 3620 r.linkrev(i),
3619 3621 shortfn(node),
3620 3622 shortfn(pp[0]),
3621 3623 shortfn(pp[1]),
3622 3624 )
3623 3625 )
3624 3626 else:
3625 3627 ui.write(
3626 3628 b"% 6d % 7d %s %s %s\n"
3627 3629 % (
3628 3630 i,
3629 3631 r.linkrev(i),
3630 3632 shortfn(node),
3631 3633 shortfn(pp[0]),
3632 3634 shortfn(pp[1]),
3633 3635 )
3634 3636 )
3635 3637 elif format == 1:
3636 3638 pr = r.parentrevs(i)
3637 3639 if ui.verbose:
3638 3640 ui.write(
3639 3641 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3640 3642 % (
3641 3643 i,
3642 3644 r.flags(i),
3643 3645 r.start(i),
3644 3646 r.length(i),
3645 3647 r.rawsize(i),
3646 3648 r.linkrev(i),
3647 3649 pr[0],
3648 3650 pr[1],
3649 3651 shortfn(node),
3650 3652 )
3651 3653 )
3652 3654 else:
3653 3655 ui.write(
3654 3656 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3655 3657 % (
3656 3658 i,
3657 3659 r.flags(i),
3658 3660 r.rawsize(i),
3659 3661 r.linkrev(i),
3660 3662 pr[0],
3661 3663 pr[1],
3662 3664 shortfn(node),
3663 3665 )
3664 3666 )
3665 3667
3666 3668
3667 3669 @command(
3668 3670 b'debugrevspec',
3669 3671 [
3670 3672 (
3671 3673 b'',
3672 3674 b'optimize',
3673 3675 None,
3674 3676 _(b'print parsed tree after optimizing (DEPRECATED)'),
3675 3677 ),
3676 3678 (
3677 3679 b'',
3678 3680 b'show-revs',
3679 3681 True,
3680 3682 _(b'print list of result revisions (default)'),
3681 3683 ),
3682 3684 (
3683 3685 b's',
3684 3686 b'show-set',
3685 3687 None,
3686 3688 _(b'print internal representation of result set'),
3687 3689 ),
3688 3690 (
3689 3691 b'p',
3690 3692 b'show-stage',
3691 3693 [],
3692 3694 _(b'print parsed tree at the given stage'),
3693 3695 _(b'NAME'),
3694 3696 ),
3695 3697 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3696 3698 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3697 3699 ],
3698 3700 b'REVSPEC',
3699 3701 )
3700 3702 def debugrevspec(ui, repo, expr, **opts):
3701 3703 """parse and apply a revision specification
3702 3704
3703 3705 Use -p/--show-stage option to print the parsed tree at the given stages.
3704 3706 Use -p all to print tree at every stage.
3705 3707
3706 3708 Use --no-show-revs option with -s or -p to print only the set
3707 3709 representation or the parsed tree respectively.
3708 3710
3709 3711 Use --verify-optimized to compare the optimized result with the unoptimized
3710 3712 one. Returns 1 if the optimized result differs.
3711 3713 """
3712 3714 opts = pycompat.byteskwargs(opts)
3713 3715 aliases = ui.configitems(b'revsetalias')
3714 3716 stages = [
3715 3717 (b'parsed', lambda tree: tree),
3716 3718 (
3717 3719 b'expanded',
3718 3720 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3719 3721 ),
3720 3722 (b'concatenated', revsetlang.foldconcat),
3721 3723 (b'analyzed', revsetlang.analyze),
3722 3724 (b'optimized', revsetlang.optimize),
3723 3725 ]
3724 3726 if opts[b'no_optimized']:
3725 3727 stages = stages[:-1]
3726 3728 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3727 3729 raise error.Abort(
3728 3730 _(b'cannot use --verify-optimized with --no-optimized')
3729 3731 )
3730 3732 stagenames = {n for n, f in stages}
3731 3733
3732 3734 showalways = set()
3733 3735 showchanged = set()
3734 3736 if ui.verbose and not opts[b'show_stage']:
3735 3737 # show parsed tree by --verbose (deprecated)
3736 3738 showalways.add(b'parsed')
3737 3739 showchanged.update([b'expanded', b'concatenated'])
3738 3740 if opts[b'optimize']:
3739 3741 showalways.add(b'optimized')
3740 3742 if opts[b'show_stage'] and opts[b'optimize']:
3741 3743 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3742 3744 if opts[b'show_stage'] == [b'all']:
3743 3745 showalways.update(stagenames)
3744 3746 else:
3745 3747 for n in opts[b'show_stage']:
3746 3748 if n not in stagenames:
3747 3749 raise error.Abort(_(b'invalid stage name: %s') % n)
3748 3750 showalways.update(opts[b'show_stage'])
3749 3751
3750 3752 treebystage = {}
3751 3753 printedtree = None
3752 3754 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3753 3755 for n, f in stages:
3754 3756 treebystage[n] = tree = f(tree)
3755 3757 if n in showalways or (n in showchanged and tree != printedtree):
3756 3758 if opts[b'show_stage'] or n != b'parsed':
3757 3759 ui.write(b"* %s:\n" % n)
3758 3760 ui.write(revsetlang.prettyformat(tree), b"\n")
3759 3761 printedtree = tree
3760 3762
3761 3763 if opts[b'verify_optimized']:
3762 3764 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3763 3765 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3764 3766 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3765 3767 ui.writenoi18n(
3766 3768 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3767 3769 )
3768 3770 ui.writenoi18n(
3769 3771 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3770 3772 )
3771 3773 arevs = list(arevs)
3772 3774 brevs = list(brevs)
3773 3775 if arevs == brevs:
3774 3776 return 0
3775 3777 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3776 3778 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3777 3779 sm = difflib.SequenceMatcher(None, arevs, brevs)
3778 3780 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3779 3781 if tag in ('delete', 'replace'):
3780 3782 for c in arevs[alo:ahi]:
3781 3783 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3782 3784 if tag in ('insert', 'replace'):
3783 3785 for c in brevs[blo:bhi]:
3784 3786 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3785 3787 if tag == 'equal':
3786 3788 for c in arevs[alo:ahi]:
3787 3789 ui.write(b' %d\n' % c)
3788 3790 return 1
3789 3791
3790 3792 func = revset.makematcher(tree)
3791 3793 revs = func(repo)
3792 3794 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3793 3795 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3794 3796 if not opts[b'show_revs']:
3795 3797 return
3796 3798 for c in revs:
3797 3799 ui.write(b"%d\n" % c)
3798 3800
3799 3801
3800 3802 @command(
3801 3803 b'debugserve',
3802 3804 [
3803 3805 (
3804 3806 b'',
3805 3807 b'sshstdio',
3806 3808 False,
3807 3809 _(b'run an SSH server bound to process handles'),
3808 3810 ),
3809 3811 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3810 3812 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3811 3813 ],
3812 3814 b'',
3813 3815 )
3814 3816 def debugserve(ui, repo, **opts):
3815 3817 """run a server with advanced settings
3816 3818
3817 3819 This command is similar to :hg:`serve`. It exists partially as a
3818 3820 workaround to the fact that ``hg serve --stdio`` must have specific
3819 3821 arguments for security reasons.
3820 3822 """
3821 3823 opts = pycompat.byteskwargs(opts)
3822 3824
3823 3825 if not opts[b'sshstdio']:
3824 3826 raise error.Abort(_(b'only --sshstdio is currently supported'))
3825 3827
3826 3828 logfh = None
3827 3829
3828 3830 if opts[b'logiofd'] and opts[b'logiofile']:
3829 3831 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3830 3832
3831 3833 if opts[b'logiofd']:
3832 3834 # Ideally we would be line buffered. But line buffering in binary
3833 3835 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3834 3836 # buffering could have performance impacts. But since this isn't
3835 3837 # performance critical code, it should be fine.
3836 3838 try:
3837 3839 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3838 3840 except OSError as e:
3839 3841 if e.errno != errno.ESPIPE:
3840 3842 raise
3841 3843 # can't seek a pipe, so `ab` mode fails on py3
3842 3844 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3843 3845 elif opts[b'logiofile']:
3844 3846 logfh = open(opts[b'logiofile'], b'ab', 0)
3845 3847
3846 3848 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3847 3849 s.serve_forever()
3848 3850
3849 3851
3850 3852 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3851 3853 def debugsetparents(ui, repo, rev1, rev2=None):
3852 3854 """manually set the parents of the current working directory (DANGEROUS)
3853 3855
3854 3856 This command is not what you are looking for and should not be used. Using
3855 3857 this command will most certainly results in slight corruption of the file
3856 3858 level histories withing your repository. DO NOT USE THIS COMMAND.
3857 3859
3858 3860 The command update the p1 and p2 field in the dirstate, and not touching
3859 3861 anything else. This useful for writing repository conversion tools, but
3860 3862 should be used with extreme care. For example, neither the working
3861 3863 directory nor the dirstate is updated, so file status may be incorrect
3862 3864 after running this command. Only used if you are one of the few people that
3863 3865 deeply unstand both conversion tools and file level histories. If you are
3864 3866 reading this help, you are not one of this people (most of them sailed west
3865 3867 from Mithlond anyway.
3866 3868
3867 3869 So one last time DO NOT USE THIS COMMAND.
3868 3870
3869 3871 Returns 0 on success.
3870 3872 """
3871 3873
3872 3874 node1 = scmutil.revsingle(repo, rev1).node()
3873 3875 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3874 3876
3875 3877 with repo.wlock():
3876 3878 repo.setparents(node1, node2)
3877 3879
3878 3880
3879 3881 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3880 3882 def debugsidedata(ui, repo, file_, rev=None, **opts):
3881 3883 """dump the side data for a cl/manifest/file revision
3882 3884
3883 3885 Use --verbose to dump the sidedata content."""
3884 3886 opts = pycompat.byteskwargs(opts)
3885 3887 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3886 3888 if rev is not None:
3887 3889 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3888 3890 file_, rev = None, file_
3889 3891 elif rev is None:
3890 3892 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3891 3893 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3892 3894 r = getattr(r, '_revlog', r)
3893 3895 try:
3894 3896 sidedata = r.sidedata(r.lookup(rev))
3895 3897 except KeyError:
3896 3898 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3897 3899 if sidedata:
3898 3900 sidedata = list(sidedata.items())
3899 3901 sidedata.sort()
3900 3902 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3901 3903 for key, value in sidedata:
3902 3904 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3903 3905 if ui.verbose:
3904 3906 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3905 3907
3906 3908
3907 3909 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3908 3910 def debugssl(ui, repo, source=None, **opts):
3909 3911 """test a secure connection to a server
3910 3912
3911 3913 This builds the certificate chain for the server on Windows, installing the
3912 3914 missing intermediates and trusted root via Windows Update if necessary. It
3913 3915 does nothing on other platforms.
3914 3916
3915 3917 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3916 3918 that server is used. See :hg:`help urls` for more information.
3917 3919
3918 3920 If the update succeeds, retry the original operation. Otherwise, the cause
3919 3921 of the SSL error is likely another issue.
3920 3922 """
3921 3923 if not pycompat.iswindows:
3922 3924 raise error.Abort(
3923 3925 _(b'certificate chain building is only possible on Windows')
3924 3926 )
3925 3927
3926 3928 if not source:
3927 3929 if not repo:
3928 3930 raise error.Abort(
3929 3931 _(
3930 3932 b"there is no Mercurial repository here, and no "
3931 3933 b"server specified"
3932 3934 )
3933 3935 )
3934 3936 source = b"default"
3935 3937
3936 3938 source, branches = urlutil.get_unique_pull_path(
3937 3939 b'debugssl', repo, ui, source
3938 3940 )
3939 3941 url = urlutil.url(source)
3940 3942
3941 3943 defaultport = {b'https': 443, b'ssh': 22}
3942 3944 if url.scheme in defaultport:
3943 3945 try:
3944 3946 addr = (url.host, int(url.port or defaultport[url.scheme]))
3945 3947 except ValueError:
3946 3948 raise error.Abort(_(b"malformed port number in URL"))
3947 3949 else:
3948 3950 raise error.Abort(_(b"only https and ssh connections are supported"))
3949 3951
3950 3952 from . import win32
3951 3953
3952 3954 s = ssl.wrap_socket(
3953 3955 socket.socket(),
3954 3956 ssl_version=ssl.PROTOCOL_TLS,
3955 3957 cert_reqs=ssl.CERT_NONE,
3956 3958 ca_certs=None,
3957 3959 )
3958 3960
3959 3961 try:
3960 3962 s.connect(addr)
3961 3963 cert = s.getpeercert(True)
3962 3964
3963 3965 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3964 3966
3965 3967 complete = win32.checkcertificatechain(cert, build=False)
3966 3968
3967 3969 if not complete:
3968 3970 ui.status(_(b'certificate chain is incomplete, updating... '))
3969 3971
3970 3972 if not win32.checkcertificatechain(cert):
3971 3973 ui.status(_(b'failed.\n'))
3972 3974 else:
3973 3975 ui.status(_(b'done.\n'))
3974 3976 else:
3975 3977 ui.status(_(b'full certificate chain is available\n'))
3976 3978 finally:
3977 3979 s.close()
3978 3980
3979 3981
3980 3982 @command(
3981 3983 b"debugbackupbundle",
3982 3984 [
3983 3985 (
3984 3986 b"",
3985 3987 b"recover",
3986 3988 b"",
3987 3989 b"brings the specified changeset back into the repository",
3988 3990 )
3989 3991 ]
3990 3992 + cmdutil.logopts,
3991 3993 _(b"hg debugbackupbundle [--recover HASH]"),
3992 3994 )
3993 3995 def debugbackupbundle(ui, repo, *pats, **opts):
3994 3996 """lists the changesets available in backup bundles
3995 3997
3996 3998 Without any arguments, this command prints a list of the changesets in each
3997 3999 backup bundle.
3998 4000
3999 4001 --recover takes a changeset hash and unbundles the first bundle that
4000 4002 contains that hash, which puts that changeset back in your repository.
4001 4003
4002 4004 --verbose will print the entire commit message and the bundle path for that
4003 4005 backup.
4004 4006 """
4005 4007 backups = list(
4006 4008 filter(
4007 4009 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4008 4010 )
4009 4011 )
4010 4012 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4011 4013
4012 4014 opts = pycompat.byteskwargs(opts)
4013 4015 opts[b"bundle"] = b""
4014 4016 opts[b"force"] = None
4015 4017 limit = logcmdutil.getlimit(opts)
4016 4018
4017 4019 def display(other, chlist, displayer):
4018 4020 if opts.get(b"newest_first"):
4019 4021 chlist.reverse()
4020 4022 count = 0
4021 4023 for n in chlist:
4022 4024 if limit is not None and count >= limit:
4023 4025 break
4024 4026 parents = [
4025 4027 True for p in other.changelog.parents(n) if p != repo.nullid
4026 4028 ]
4027 4029 if opts.get(b"no_merges") and len(parents) == 2:
4028 4030 continue
4029 4031 count += 1
4030 4032 displayer.show(other[n])
4031 4033
4032 4034 recovernode = opts.get(b"recover")
4033 4035 if recovernode:
4034 4036 if scmutil.isrevsymbol(repo, recovernode):
4035 4037 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4036 4038 return
4037 4039 elif backups:
4038 4040 msg = _(
4039 4041 b"Recover changesets using: hg debugbackupbundle --recover "
4040 4042 b"<changeset hash>\n\nAvailable backup changesets:"
4041 4043 )
4042 4044 ui.status(msg, label=b"status.removed")
4043 4045 else:
4044 4046 ui.status(_(b"no backup changesets found\n"))
4045 4047 return
4046 4048
4047 4049 for backup in backups:
4048 4050 # Much of this is copied from the hg incoming logic
4049 4051 source = os.path.relpath(backup, encoding.getcwd())
4050 4052 source, branches = urlutil.get_unique_pull_path(
4051 4053 b'debugbackupbundle',
4052 4054 repo,
4053 4055 ui,
4054 4056 source,
4055 4057 default_branches=opts.get(b'branch'),
4056 4058 )
4057 4059 try:
4058 4060 other = hg.peer(repo, opts, source)
4059 4061 except error.LookupError as ex:
4060 4062 msg = _(b"\nwarning: unable to open bundle %s") % source
4061 4063 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4062 4064 ui.warn(msg, hint=hint)
4063 4065 continue
4064 4066 revs, checkout = hg.addbranchrevs(
4065 4067 repo, other, branches, opts.get(b"rev")
4066 4068 )
4067 4069
4068 4070 if revs:
4069 4071 revs = [other.lookup(rev) for rev in revs]
4070 4072
4071 4073 with ui.silent():
4072 4074 try:
4073 4075 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4074 4076 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4075 4077 )
4076 4078 except error.LookupError:
4077 4079 continue
4078 4080
4079 4081 try:
4080 4082 if not chlist:
4081 4083 continue
4082 4084 if recovernode:
4083 4085 with repo.lock(), repo.transaction(b"unbundle") as tr:
4084 4086 if scmutil.isrevsymbol(other, recovernode):
4085 4087 ui.status(_(b"Unbundling %s\n") % (recovernode))
4086 4088 f = hg.openpath(ui, source)
4087 4089 gen = exchange.readbundle(ui, f, source)
4088 4090 if isinstance(gen, bundle2.unbundle20):
4089 4091 bundle2.applybundle(
4090 4092 repo,
4091 4093 gen,
4092 4094 tr,
4093 4095 source=b"unbundle",
4094 4096 url=b"bundle:" + source,
4095 4097 )
4096 4098 else:
4097 4099 gen.apply(repo, b"unbundle", b"bundle:" + source)
4098 4100 break
4099 4101 else:
4100 4102 backupdate = encoding.strtolocal(
4101 4103 time.strftime(
4102 4104 "%a %H:%M, %Y-%m-%d",
4103 4105 time.localtime(os.path.getmtime(source)),
4104 4106 )
4105 4107 )
4106 4108 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4107 4109 if ui.verbose:
4108 4110 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4109 4111 else:
4110 4112 opts[
4111 4113 b"template"
4112 4114 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4113 4115 displayer = logcmdutil.changesetdisplayer(
4114 4116 ui, other, opts, False
4115 4117 )
4116 4118 display(other, chlist, displayer)
4117 4119 displayer.close()
4118 4120 finally:
4119 4121 cleanupfn()
4120 4122
4121 4123
4122 4124 @command(
4123 4125 b'debugsub',
4124 4126 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4125 4127 _(b'[-r REV] [REV]'),
4126 4128 )
4127 4129 def debugsub(ui, repo, rev=None):
4128 4130 ctx = scmutil.revsingle(repo, rev, None)
4129 4131 for k, v in sorted(ctx.substate.items()):
4130 4132 ui.writenoi18n(b'path %s\n' % k)
4131 4133 ui.writenoi18n(b' source %s\n' % v[0])
4132 4134 ui.writenoi18n(b' revision %s\n' % v[1])
4133 4135
4134 4136
4135 4137 @command(b'debugshell', optionalrepo=True)
4136 4138 def debugshell(ui, repo):
4137 4139 """run an interactive Python interpreter
4138 4140
4139 4141 The local namespace is provided with a reference to the ui and
4140 4142 the repo instance (if available).
4141 4143 """
4142 4144 import code
4143 4145
4144 4146 imported_objects = {
4145 4147 'ui': ui,
4146 4148 'repo': repo,
4147 4149 }
4148 4150
4149 4151 code.interact(local=imported_objects)
4150 4152
4151 4153
4152 4154 @command(
4153 4155 b'debugsuccessorssets',
4154 4156 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4155 4157 _(b'[REV]'),
4156 4158 )
4157 4159 def debugsuccessorssets(ui, repo, *revs, **opts):
4158 4160 """show set of successors for revision
4159 4161
4160 4162 A successors set of changeset A is a consistent group of revisions that
4161 4163 succeed A. It contains non-obsolete changesets only unless closests
4162 4164 successors set is set.
4163 4165
4164 4166 In most cases a changeset A has a single successors set containing a single
4165 4167 successor (changeset A replaced by A').
4166 4168
4167 4169 A changeset that is made obsolete with no successors are called "pruned".
4168 4170 Such changesets have no successors sets at all.
4169 4171
4170 4172 A changeset that has been "split" will have a successors set containing
4171 4173 more than one successor.
4172 4174
4173 4175 A changeset that has been rewritten in multiple different ways is called
4174 4176 "divergent". Such changesets have multiple successor sets (each of which
4175 4177 may also be split, i.e. have multiple successors).
4176 4178
4177 4179 Results are displayed as follows::
4178 4180
4179 4181 <rev1>
4180 4182 <successors-1A>
4181 4183 <rev2>
4182 4184 <successors-2A>
4183 4185 <successors-2B1> <successors-2B2> <successors-2B3>
4184 4186
4185 4187 Here rev2 has two possible (i.e. divergent) successors sets. The first
4186 4188 holds one element, whereas the second holds three (i.e. the changeset has
4187 4189 been split).
4188 4190 """
4189 4191 # passed to successorssets caching computation from one call to another
4190 4192 cache = {}
4191 4193 ctx2str = bytes
4192 4194 node2str = short
4193 4195 for rev in logcmdutil.revrange(repo, revs):
4194 4196 ctx = repo[rev]
4195 4197 ui.write(b'%s\n' % ctx2str(ctx))
4196 4198 for succsset in obsutil.successorssets(
4197 4199 repo, ctx.node(), closest=opts['closest'], cache=cache
4198 4200 ):
4199 4201 if succsset:
4200 4202 ui.write(b' ')
4201 4203 ui.write(node2str(succsset[0]))
4202 4204 for node in succsset[1:]:
4203 4205 ui.write(b' ')
4204 4206 ui.write(node2str(node))
4205 4207 ui.write(b'\n')
4206 4208
4207 4209
4208 4210 @command(b'debugtagscache', [])
4209 4211 def debugtagscache(ui, repo):
4210 4212 """display the contents of .hg/cache/hgtagsfnodes1"""
4211 4213 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4212 4214 flog = repo.file(b'.hgtags')
4213 4215 for r in repo:
4214 4216 node = repo[r].node()
4215 4217 tagsnode = cache.getfnode(node, computemissing=False)
4216 4218 if tagsnode:
4217 4219 tagsnodedisplay = hex(tagsnode)
4218 4220 if not flog.hasnode(tagsnode):
4219 4221 tagsnodedisplay += b' (unknown node)'
4220 4222 elif tagsnode is None:
4221 4223 tagsnodedisplay = b'missing'
4222 4224 else:
4223 4225 tagsnodedisplay = b'invalid'
4224 4226
4225 4227 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4226 4228
4227 4229
4228 4230 @command(
4229 4231 b'debugtemplate',
4230 4232 [
4231 4233 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4232 4234 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4233 4235 ],
4234 4236 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4235 4237 optionalrepo=True,
4236 4238 )
4237 4239 def debugtemplate(ui, repo, tmpl, **opts):
4238 4240 """parse and apply a template
4239 4241
4240 4242 If -r/--rev is given, the template is processed as a log template and
4241 4243 applied to the given changesets. Otherwise, it is processed as a generic
4242 4244 template.
4243 4245
4244 4246 Use --verbose to print the parsed tree.
4245 4247 """
4246 4248 revs = None
4247 4249 if opts['rev']:
4248 4250 if repo is None:
4249 4251 raise error.RepoError(
4250 4252 _(b'there is no Mercurial repository here (.hg not found)')
4251 4253 )
4252 4254 revs = logcmdutil.revrange(repo, opts['rev'])
4253 4255
4254 4256 props = {}
4255 4257 for d in opts['define']:
4256 4258 try:
4257 4259 k, v = (e.strip() for e in d.split(b'=', 1))
4258 4260 if not k or k == b'ui':
4259 4261 raise ValueError
4260 4262 props[k] = v
4261 4263 except ValueError:
4262 4264 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4263 4265
4264 4266 if ui.verbose:
4265 4267 aliases = ui.configitems(b'templatealias')
4266 4268 tree = templater.parse(tmpl)
4267 4269 ui.note(templater.prettyformat(tree), b'\n')
4268 4270 newtree = templater.expandaliases(tree, aliases)
4269 4271 if newtree != tree:
4270 4272 ui.notenoi18n(
4271 4273 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4272 4274 )
4273 4275
4274 4276 if revs is None:
4275 4277 tres = formatter.templateresources(ui, repo)
4276 4278 t = formatter.maketemplater(ui, tmpl, resources=tres)
4277 4279 if ui.verbose:
4278 4280 kwds, funcs = t.symbolsuseddefault()
4279 4281 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4280 4282 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4281 4283 ui.write(t.renderdefault(props))
4282 4284 else:
4283 4285 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4284 4286 if ui.verbose:
4285 4287 kwds, funcs = displayer.t.symbolsuseddefault()
4286 4288 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4287 4289 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4288 4290 for r in revs:
4289 4291 displayer.show(repo[r], **pycompat.strkwargs(props))
4290 4292 displayer.close()
4291 4293
4292 4294
4293 4295 @command(
4294 4296 b'debuguigetpass',
4295 4297 [
4296 4298 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4297 4299 ],
4298 4300 _(b'[-p TEXT]'),
4299 4301 norepo=True,
4300 4302 )
4301 4303 def debuguigetpass(ui, prompt=b''):
4302 4304 """show prompt to type password"""
4303 4305 r = ui.getpass(prompt)
4304 4306 if r is None:
4305 4307 r = b"<default response>"
4306 4308 ui.writenoi18n(b'response: %s\n' % r)
4307 4309
4308 4310
4309 4311 @command(
4310 4312 b'debuguiprompt',
4311 4313 [
4312 4314 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4313 4315 ],
4314 4316 _(b'[-p TEXT]'),
4315 4317 norepo=True,
4316 4318 )
4317 4319 def debuguiprompt(ui, prompt=b''):
4318 4320 """show plain prompt"""
4319 4321 r = ui.prompt(prompt)
4320 4322 ui.writenoi18n(b'response: %s\n' % r)
4321 4323
4322 4324
4323 4325 @command(b'debugupdatecaches', [])
4324 4326 def debugupdatecaches(ui, repo, *pats, **opts):
4325 4327 """warm all known caches in the repository"""
4326 4328 with repo.wlock(), repo.lock():
4327 4329 repo.updatecaches(caches=repository.CACHES_ALL)
4328 4330
4329 4331
4330 4332 @command(
4331 4333 b'debugupgraderepo',
4332 4334 [
4333 4335 (
4334 4336 b'o',
4335 4337 b'optimize',
4336 4338 [],
4337 4339 _(b'extra optimization to perform'),
4338 4340 _(b'NAME'),
4339 4341 ),
4340 4342 (b'', b'run', False, _(b'performs an upgrade')),
4341 4343 (b'', b'backup', True, _(b'keep the old repository content around')),
4342 4344 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4343 4345 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4344 4346 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4345 4347 ],
4346 4348 )
4347 4349 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4348 4350 """upgrade a repository to use different features
4349 4351
4350 4352 If no arguments are specified, the repository is evaluated for upgrade
4351 4353 and a list of problems and potential optimizations is printed.
4352 4354
4353 4355 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4354 4356 can be influenced via additional arguments. More details will be provided
4355 4357 by the command output when run without ``--run``.
4356 4358
4357 4359 During the upgrade, the repository will be locked and no writes will be
4358 4360 allowed.
4359 4361
4360 4362 At the end of the upgrade, the repository may not be readable while new
4361 4363 repository data is swapped in. This window will be as long as it takes to
4362 4364 rename some directories inside the ``.hg`` directory. On most machines, this
4363 4365 should complete almost instantaneously and the chances of a consumer being
4364 4366 unable to access the repository should be low.
4365 4367
4366 4368 By default, all revlogs will be upgraded. You can restrict this using flags
4367 4369 such as `--manifest`:
4368 4370
4369 4371 * `--manifest`: only optimize the manifest
4370 4372 * `--no-manifest`: optimize all revlog but the manifest
4371 4373 * `--changelog`: optimize the changelog only
4372 4374 * `--no-changelog --no-manifest`: optimize filelogs only
4373 4375 * `--filelogs`: optimize the filelogs only
4374 4376 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4375 4377 """
4376 4378 return upgrade.upgraderepo(
4377 4379 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4378 4380 )
4379 4381
4380 4382
4381 4383 @command(
4382 4384 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4383 4385 )
4384 4386 def debugwalk(ui, repo, *pats, **opts):
4385 4387 """show how files match on given patterns"""
4386 4388 opts = pycompat.byteskwargs(opts)
4387 4389 m = scmutil.match(repo[None], pats, opts)
4388 4390 if ui.verbose:
4389 4391 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4390 4392 items = list(repo[None].walk(m))
4391 4393 if not items:
4392 4394 return
4393 4395 f = lambda fn: fn
4394 4396 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4395 4397 f = lambda fn: util.normpath(fn)
4396 4398 fmt = b'f %%-%ds %%-%ds %%s' % (
4397 4399 max([len(abs) for abs in items]),
4398 4400 max([len(repo.pathto(abs)) for abs in items]),
4399 4401 )
4400 4402 for abs in items:
4401 4403 line = fmt % (
4402 4404 abs,
4403 4405 f(repo.pathto(abs)),
4404 4406 m.exact(abs) and b'exact' or b'',
4405 4407 )
4406 4408 ui.write(b"%s\n" % line.rstrip())
4407 4409
4408 4410
4409 4411 @command(b'debugwhyunstable', [], _(b'REV'))
4410 4412 def debugwhyunstable(ui, repo, rev):
4411 4413 """explain instabilities of a changeset"""
4412 4414 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4413 4415 dnodes = b''
4414 4416 if entry.get(b'divergentnodes'):
4415 4417 dnodes = (
4416 4418 b' '.join(
4417 4419 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4418 4420 for ctx in entry[b'divergentnodes']
4419 4421 )
4420 4422 + b' '
4421 4423 )
4422 4424 ui.write(
4423 4425 b'%s: %s%s %s\n'
4424 4426 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4425 4427 )
4426 4428
4427 4429
4428 4430 @command(
4429 4431 b'debugwireargs',
4430 4432 [
4431 4433 (b'', b'three', b'', b'three'),
4432 4434 (b'', b'four', b'', b'four'),
4433 4435 (b'', b'five', b'', b'five'),
4434 4436 ]
4435 4437 + cmdutil.remoteopts,
4436 4438 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4437 4439 norepo=True,
4438 4440 )
4439 4441 def debugwireargs(ui, repopath, *vals, **opts):
4440 4442 opts = pycompat.byteskwargs(opts)
4441 4443 repo = hg.peer(ui, opts, repopath)
4442 4444 try:
4443 4445 for opt in cmdutil.remoteopts:
4444 4446 del opts[opt[1]]
4445 4447 args = {}
4446 4448 for k, v in opts.items():
4447 4449 if v:
4448 4450 args[k] = v
4449 4451 args = pycompat.strkwargs(args)
4450 4452 # run twice to check that we don't mess up the stream for the next command
4451 4453 res1 = repo.debugwireargs(*vals, **args)
4452 4454 res2 = repo.debugwireargs(*vals, **args)
4453 4455 ui.write(b"%s\n" % res1)
4454 4456 if res1 != res2:
4455 4457 ui.warn(b"%s\n" % res2)
4456 4458 finally:
4457 4459 repo.close()
4458 4460
4459 4461
4460 4462 def _parsewirelangblocks(fh):
4461 4463 activeaction = None
4462 4464 blocklines = []
4463 4465 lastindent = 0
4464 4466
4465 4467 for line in fh:
4466 4468 line = line.rstrip()
4467 4469 if not line:
4468 4470 continue
4469 4471
4470 4472 if line.startswith(b'#'):
4471 4473 continue
4472 4474
4473 4475 if not line.startswith(b' '):
4474 4476 # New block. Flush previous one.
4475 4477 if activeaction:
4476 4478 yield activeaction, blocklines
4477 4479
4478 4480 activeaction = line
4479 4481 blocklines = []
4480 4482 lastindent = 0
4481 4483 continue
4482 4484
4483 4485 # Else we start with an indent.
4484 4486
4485 4487 if not activeaction:
4486 4488 raise error.Abort(_(b'indented line outside of block'))
4487 4489
4488 4490 indent = len(line) - len(line.lstrip())
4489 4491
4490 4492 # If this line is indented more than the last line, concatenate it.
4491 4493 if indent > lastindent and blocklines:
4492 4494 blocklines[-1] += line.lstrip()
4493 4495 else:
4494 4496 blocklines.append(line)
4495 4497 lastindent = indent
4496 4498
4497 4499 # Flush last block.
4498 4500 if activeaction:
4499 4501 yield activeaction, blocklines
4500 4502
4501 4503
4502 4504 @command(
4503 4505 b'debugwireproto',
4504 4506 [
4505 4507 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4506 4508 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4507 4509 (
4508 4510 b'',
4509 4511 b'noreadstderr',
4510 4512 False,
4511 4513 _(b'do not read from stderr of the remote'),
4512 4514 ),
4513 4515 (
4514 4516 b'',
4515 4517 b'nologhandshake',
4516 4518 False,
4517 4519 _(b'do not log I/O related to the peer handshake'),
4518 4520 ),
4519 4521 ]
4520 4522 + cmdutil.remoteopts,
4521 4523 _(b'[PATH]'),
4522 4524 optionalrepo=True,
4523 4525 )
4524 4526 def debugwireproto(ui, repo, path=None, **opts):
4525 4527 """send wire protocol commands to a server
4526 4528
4527 4529 This command can be used to issue wire protocol commands to remote
4528 4530 peers and to debug the raw data being exchanged.
4529 4531
4530 4532 ``--localssh`` will start an SSH server against the current repository
4531 4533 and connect to that. By default, the connection will perform a handshake
4532 4534 and establish an appropriate peer instance.
4533 4535
4534 4536 ``--peer`` can be used to bypass the handshake protocol and construct a
4535 4537 peer instance using the specified class type. Valid values are ``raw``,
4536 4538 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4537 4539 don't support higher-level command actions.
4538 4540
4539 4541 ``--noreadstderr`` can be used to disable automatic reading from stderr
4540 4542 of the peer (for SSH connections only). Disabling automatic reading of
4541 4543 stderr is useful for making output more deterministic.
4542 4544
4543 4545 Commands are issued via a mini language which is specified via stdin.
4544 4546 The language consists of individual actions to perform. An action is
4545 4547 defined by a block. A block is defined as a line with no leading
4546 4548 space followed by 0 or more lines with leading space. Blocks are
4547 4549 effectively a high-level command with additional metadata.
4548 4550
4549 4551 Lines beginning with ``#`` are ignored.
4550 4552
4551 4553 The following sections denote available actions.
4552 4554
4553 4555 raw
4554 4556 ---
4555 4557
4556 4558 Send raw data to the server.
4557 4559
4558 4560 The block payload contains the raw data to send as one atomic send
4559 4561 operation. The data may not actually be delivered in a single system
4560 4562 call: it depends on the abilities of the transport being used.
4561 4563
4562 4564 Each line in the block is de-indented and concatenated. Then, that
4563 4565 value is evaluated as a Python b'' literal. This allows the use of
4564 4566 backslash escaping, etc.
4565 4567
4566 4568 raw+
4567 4569 ----
4568 4570
4569 4571 Behaves like ``raw`` except flushes output afterwards.
4570 4572
4571 4573 command <X>
4572 4574 -----------
4573 4575
4574 4576 Send a request to run a named command, whose name follows the ``command``
4575 4577 string.
4576 4578
4577 4579 Arguments to the command are defined as lines in this block. The format of
4578 4580 each line is ``<key> <value>``. e.g.::
4579 4581
4580 4582 command listkeys
4581 4583 namespace bookmarks
4582 4584
4583 4585 If the value begins with ``eval:``, it will be interpreted as a Python
4584 4586 literal expression. Otherwise values are interpreted as Python b'' literals.
4585 4587 This allows sending complex types and encoding special byte sequences via
4586 4588 backslash escaping.
4587 4589
4588 4590 The following arguments have special meaning:
4589 4591
4590 4592 ``PUSHFILE``
4591 4593 When defined, the *push* mechanism of the peer will be used instead
4592 4594 of the static request-response mechanism and the content of the
4593 4595 file specified in the value of this argument will be sent as the
4594 4596 command payload.
4595 4597
4596 4598 This can be used to submit a local bundle file to the remote.
4597 4599
4598 4600 batchbegin
4599 4601 ----------
4600 4602
4601 4603 Instruct the peer to begin a batched send.
4602 4604
4603 4605 All ``command`` blocks are queued for execution until the next
4604 4606 ``batchsubmit`` block.
4605 4607
4606 4608 batchsubmit
4607 4609 -----------
4608 4610
4609 4611 Submit previously queued ``command`` blocks as a batch request.
4610 4612
4611 4613 This action MUST be paired with a ``batchbegin`` action.
4612 4614
4613 4615 httprequest <method> <path>
4614 4616 ---------------------------
4615 4617
4616 4618 (HTTP peer only)
4617 4619
4618 4620 Send an HTTP request to the peer.
4619 4621
4620 4622 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4621 4623
4622 4624 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4623 4625 headers to add to the request. e.g. ``Accept: foo``.
4624 4626
4625 4627 The following arguments are special:
4626 4628
4627 4629 ``BODYFILE``
4628 4630 The content of the file defined as the value to this argument will be
4629 4631 transferred verbatim as the HTTP request body.
4630 4632
4631 4633 ``frame <type> <flags> <payload>``
4632 4634 Send a unified protocol frame as part of the request body.
4633 4635
4634 4636 All frames will be collected and sent as the body to the HTTP
4635 4637 request.
4636 4638
4637 4639 close
4638 4640 -----
4639 4641
4640 4642 Close the connection to the server.
4641 4643
4642 4644 flush
4643 4645 -----
4644 4646
4645 4647 Flush data written to the server.
4646 4648
4647 4649 readavailable
4648 4650 -------------
4649 4651
4650 4652 Close the write end of the connection and read all available data from
4651 4653 the server.
4652 4654
4653 4655 If the connection to the server encompasses multiple pipes, we poll both
4654 4656 pipes and read available data.
4655 4657
4656 4658 readline
4657 4659 --------
4658 4660
4659 4661 Read a line of output from the server. If there are multiple output
4660 4662 pipes, reads only the main pipe.
4661 4663
4662 4664 ereadline
4663 4665 ---------
4664 4666
4665 4667 Like ``readline``, but read from the stderr pipe, if available.
4666 4668
4667 4669 read <X>
4668 4670 --------
4669 4671
4670 4672 ``read()`` N bytes from the server's main output pipe.
4671 4673
4672 4674 eread <X>
4673 4675 ---------
4674 4676
4675 4677 ``read()`` N bytes from the server's stderr pipe, if available.
4676 4678
4677 4679 Specifying Unified Frame-Based Protocol Frames
4678 4680 ----------------------------------------------
4679 4681
4680 4682 It is possible to emit a *Unified Frame-Based Protocol* by using special
4681 4683 syntax.
4682 4684
4683 4685 A frame is composed as a type, flags, and payload. These can be parsed
4684 4686 from a string of the form:
4685 4687
4686 4688 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4687 4689
4688 4690 ``request-id`` and ``stream-id`` are integers defining the request and
4689 4691 stream identifiers.
4690 4692
4691 4693 ``type`` can be an integer value for the frame type or the string name
4692 4694 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4693 4695 ``command-name``.
4694 4696
4695 4697 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4696 4698 components. Each component (and there can be just one) can be an integer
4697 4699 or a flag name for stream flags or frame flags, respectively. Values are
4698 4700 resolved to integers and then bitwise OR'd together.
4699 4701
4700 4702 ``payload`` represents the raw frame payload. If it begins with
4701 4703 ``cbor:``, the following string is evaluated as Python code and the
4702 4704 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4703 4705 as a Python byte string literal.
4704 4706 """
4705 4707 opts = pycompat.byteskwargs(opts)
4706 4708
4707 4709 if opts[b'localssh'] and not repo:
4708 4710 raise error.Abort(_(b'--localssh requires a repository'))
4709 4711
4710 4712 if opts[b'peer'] and opts[b'peer'] not in (
4711 4713 b'raw',
4712 4714 b'ssh1',
4713 4715 ):
4714 4716 raise error.Abort(
4715 4717 _(b'invalid value for --peer'),
4716 4718 hint=_(b'valid values are "raw" and "ssh1"'),
4717 4719 )
4718 4720
4719 4721 if path and opts[b'localssh']:
4720 4722 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4721 4723
4722 4724 if ui.interactive():
4723 4725 ui.write(_(b'(waiting for commands on stdin)\n'))
4724 4726
4725 4727 blocks = list(_parsewirelangblocks(ui.fin))
4726 4728
4727 4729 proc = None
4728 4730 stdin = None
4729 4731 stdout = None
4730 4732 stderr = None
4731 4733 opener = None
4732 4734
4733 4735 if opts[b'localssh']:
4734 4736 # We start the SSH server in its own process so there is process
4735 4737 # separation. This prevents a whole class of potential bugs around
4736 4738 # shared state from interfering with server operation.
4737 4739 args = procutil.hgcmd() + [
4738 4740 b'-R',
4739 4741 repo.root,
4740 4742 b'debugserve',
4741 4743 b'--sshstdio',
4742 4744 ]
4743 4745 proc = subprocess.Popen(
4744 4746 pycompat.rapply(procutil.tonativestr, args),
4745 4747 stdin=subprocess.PIPE,
4746 4748 stdout=subprocess.PIPE,
4747 4749 stderr=subprocess.PIPE,
4748 4750 bufsize=0,
4749 4751 )
4750 4752
4751 4753 stdin = proc.stdin
4752 4754 stdout = proc.stdout
4753 4755 stderr = proc.stderr
4754 4756
4755 4757 # We turn the pipes into observers so we can log I/O.
4756 4758 if ui.verbose or opts[b'peer'] == b'raw':
4757 4759 stdin = util.makeloggingfileobject(
4758 4760 ui, proc.stdin, b'i', logdata=True
4759 4761 )
4760 4762 stdout = util.makeloggingfileobject(
4761 4763 ui, proc.stdout, b'o', logdata=True
4762 4764 )
4763 4765 stderr = util.makeloggingfileobject(
4764 4766 ui, proc.stderr, b'e', logdata=True
4765 4767 )
4766 4768
4767 4769 # --localssh also implies the peer connection settings.
4768 4770
4769 4771 url = b'ssh://localserver'
4770 4772 autoreadstderr = not opts[b'noreadstderr']
4771 4773
4772 4774 if opts[b'peer'] == b'ssh1':
4773 4775 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4774 4776 peer = sshpeer.sshv1peer(
4775 4777 ui,
4776 4778 url,
4777 4779 proc,
4778 4780 stdin,
4779 4781 stdout,
4780 4782 stderr,
4781 4783 None,
4782 4784 autoreadstderr=autoreadstderr,
4783 4785 )
4784 4786 elif opts[b'peer'] == b'raw':
4785 4787 ui.write(_(b'using raw connection to peer\n'))
4786 4788 peer = None
4787 4789 else:
4788 4790 ui.write(_(b'creating ssh peer from handshake results\n'))
4789 4791 peer = sshpeer.makepeer(
4790 4792 ui,
4791 4793 url,
4792 4794 proc,
4793 4795 stdin,
4794 4796 stdout,
4795 4797 stderr,
4796 4798 autoreadstderr=autoreadstderr,
4797 4799 )
4798 4800
4799 4801 elif path:
4800 4802 # We bypass hg.peer() so we can proxy the sockets.
4801 4803 # TODO consider not doing this because we skip
4802 4804 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4803 4805 u = urlutil.url(path)
4804 4806 if u.scheme != b'http':
4805 4807 raise error.Abort(_(b'only http:// paths are currently supported'))
4806 4808
4807 4809 url, authinfo = u.authinfo()
4808 4810 openerargs = {
4809 4811 'useragent': b'Mercurial debugwireproto',
4810 4812 }
4811 4813
4812 4814 # Turn pipes/sockets into observers so we can log I/O.
4813 4815 if ui.verbose:
4814 4816 openerargs.update(
4815 4817 {
4816 4818 'loggingfh': ui,
4817 4819 'loggingname': b's',
4818 4820 'loggingopts': {
4819 4821 'logdata': True,
4820 4822 'logdataapis': False,
4821 4823 },
4822 4824 }
4823 4825 )
4824 4826
4825 4827 if ui.debugflag:
4826 4828 openerargs['loggingopts']['logdataapis'] = True
4827 4829
4828 4830 # Don't send default headers when in raw mode. This allows us to
4829 4831 # bypass most of the behavior of our URL handling code so we can
4830 4832 # have near complete control over what's sent on the wire.
4831 4833 if opts[b'peer'] == b'raw':
4832 4834 openerargs['sendaccept'] = False
4833 4835
4834 4836 opener = urlmod.opener(ui, authinfo, **openerargs)
4835 4837
4836 4838 if opts[b'peer'] == b'raw':
4837 4839 ui.write(_(b'using raw connection to peer\n'))
4838 4840 peer = None
4839 4841 elif opts[b'peer']:
4840 4842 raise error.Abort(
4841 4843 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4842 4844 )
4843 4845 else:
4844 4846 peer = httppeer.makepeer(ui, path, opener=opener)
4845 4847
4846 4848 # We /could/ populate stdin/stdout with sock.makefile()...
4847 4849 else:
4848 4850 raise error.Abort(_(b'unsupported connection configuration'))
4849 4851
4850 4852 batchedcommands = None
4851 4853
4852 4854 # Now perform actions based on the parsed wire language instructions.
4853 4855 for action, lines in blocks:
4854 4856 if action in (b'raw', b'raw+'):
4855 4857 if not stdin:
4856 4858 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4857 4859
4858 4860 # Concatenate the data together.
4859 4861 data = b''.join(l.lstrip() for l in lines)
4860 4862 data = stringutil.unescapestr(data)
4861 4863 stdin.write(data)
4862 4864
4863 4865 if action == b'raw+':
4864 4866 stdin.flush()
4865 4867 elif action == b'flush':
4866 4868 if not stdin:
4867 4869 raise error.Abort(_(b'cannot call flush on this peer'))
4868 4870 stdin.flush()
4869 4871 elif action.startswith(b'command'):
4870 4872 if not peer:
4871 4873 raise error.Abort(
4872 4874 _(
4873 4875 b'cannot send commands unless peer instance '
4874 4876 b'is available'
4875 4877 )
4876 4878 )
4877 4879
4878 4880 command = action.split(b' ', 1)[1]
4879 4881
4880 4882 args = {}
4881 4883 for line in lines:
4882 4884 # We need to allow empty values.
4883 4885 fields = line.lstrip().split(b' ', 1)
4884 4886 if len(fields) == 1:
4885 4887 key = fields[0]
4886 4888 value = b''
4887 4889 else:
4888 4890 key, value = fields
4889 4891
4890 4892 if value.startswith(b'eval:'):
4891 4893 value = stringutil.evalpythonliteral(value[5:])
4892 4894 else:
4893 4895 value = stringutil.unescapestr(value)
4894 4896
4895 4897 args[key] = value
4896 4898
4897 4899 if batchedcommands is not None:
4898 4900 batchedcommands.append((command, args))
4899 4901 continue
4900 4902
4901 4903 ui.status(_(b'sending %s command\n') % command)
4902 4904
4903 4905 if b'PUSHFILE' in args:
4904 4906 with open(args[b'PUSHFILE'], 'rb') as fh:
4905 4907 del args[b'PUSHFILE']
4906 4908 res, output = peer._callpush(
4907 4909 command, fh, **pycompat.strkwargs(args)
4908 4910 )
4909 4911 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4910 4912 ui.status(
4911 4913 _(b'remote output: %s\n') % stringutil.escapestr(output)
4912 4914 )
4913 4915 else:
4914 4916 with peer.commandexecutor() as e:
4915 4917 res = e.callcommand(command, args).result()
4916 4918
4917 4919 ui.status(
4918 4920 _(b'response: %s\n')
4919 4921 % stringutil.pprint(res, bprefix=True, indent=2)
4920 4922 )
4921 4923
4922 4924 elif action == b'batchbegin':
4923 4925 if batchedcommands is not None:
4924 4926 raise error.Abort(_(b'nested batchbegin not allowed'))
4925 4927
4926 4928 batchedcommands = []
4927 4929 elif action == b'batchsubmit':
4928 4930 # There is a batching API we could go through. But it would be
4929 4931 # difficult to normalize requests into function calls. It is easier
4930 4932 # to bypass this layer and normalize to commands + args.
4931 4933 ui.status(
4932 4934 _(b'sending batch with %d sub-commands\n')
4933 4935 % len(batchedcommands)
4934 4936 )
4935 4937 assert peer is not None
4936 4938 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4937 4939 ui.status(
4938 4940 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4939 4941 )
4940 4942
4941 4943 batchedcommands = None
4942 4944
4943 4945 elif action.startswith(b'httprequest '):
4944 4946 if not opener:
4945 4947 raise error.Abort(
4946 4948 _(b'cannot use httprequest without an HTTP peer')
4947 4949 )
4948 4950
4949 4951 request = action.split(b' ', 2)
4950 4952 if len(request) != 3:
4951 4953 raise error.Abort(
4952 4954 _(
4953 4955 b'invalid httprequest: expected format is '
4954 4956 b'"httprequest <method> <path>'
4955 4957 )
4956 4958 )
4957 4959
4958 4960 method, httppath = request[1:]
4959 4961 headers = {}
4960 4962 body = None
4961 4963 frames = []
4962 4964 for line in lines:
4963 4965 line = line.lstrip()
4964 4966 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4965 4967 if m:
4966 4968 # Headers need to use native strings.
4967 4969 key = pycompat.strurl(m.group(1))
4968 4970 value = pycompat.strurl(m.group(2))
4969 4971 headers[key] = value
4970 4972 continue
4971 4973
4972 4974 if line.startswith(b'BODYFILE '):
4973 4975 with open(line.split(b' ', 1), b'rb') as fh:
4974 4976 body = fh.read()
4975 4977 elif line.startswith(b'frame '):
4976 4978 frame = wireprotoframing.makeframefromhumanstring(
4977 4979 line[len(b'frame ') :]
4978 4980 )
4979 4981
4980 4982 frames.append(frame)
4981 4983 else:
4982 4984 raise error.Abort(
4983 4985 _(b'unknown argument to httprequest: %s') % line
4984 4986 )
4985 4987
4986 4988 url = path + httppath
4987 4989
4988 4990 if frames:
4989 4991 body = b''.join(bytes(f) for f in frames)
4990 4992
4991 4993 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4992 4994
4993 4995 # urllib.Request insists on using has_data() as a proxy for
4994 4996 # determining the request method. Override that to use our
4995 4997 # explicitly requested method.
4996 4998 req.get_method = lambda: pycompat.sysstr(method)
4997 4999
4998 5000 try:
4999 5001 res = opener.open(req)
5000 5002 body = res.read()
5001 5003 except util.urlerr.urlerror as e:
5002 5004 # read() method must be called, but only exists in Python 2
5003 5005 getattr(e, 'read', lambda: None)()
5004 5006 continue
5005 5007
5006 5008 ct = res.headers.get('Content-Type')
5007 5009 if ct == 'application/mercurial-cbor':
5008 5010 ui.write(
5009 5011 _(b'cbor> %s\n')
5010 5012 % stringutil.pprint(
5011 5013 cborutil.decodeall(body), bprefix=True, indent=2
5012 5014 )
5013 5015 )
5014 5016
5015 5017 elif action == b'close':
5016 5018 assert peer is not None
5017 5019 peer.close()
5018 5020 elif action == b'readavailable':
5019 5021 if not stdout or not stderr:
5020 5022 raise error.Abort(
5021 5023 _(b'readavailable not available on this peer')
5022 5024 )
5023 5025
5024 5026 stdin.close()
5025 5027 stdout.read()
5026 5028 stderr.read()
5027 5029
5028 5030 elif action == b'readline':
5029 5031 if not stdout:
5030 5032 raise error.Abort(_(b'readline not available on this peer'))
5031 5033 stdout.readline()
5032 5034 elif action == b'ereadline':
5033 5035 if not stderr:
5034 5036 raise error.Abort(_(b'ereadline not available on this peer'))
5035 5037 stderr.readline()
5036 5038 elif action.startswith(b'read '):
5037 5039 count = int(action.split(b' ', 1)[1])
5038 5040 if not stdout:
5039 5041 raise error.Abort(_(b'read not available on this peer'))
5040 5042 stdout.read(count)
5041 5043 elif action.startswith(b'eread '):
5042 5044 count = int(action.split(b' ', 1)[1])
5043 5045 if not stderr:
5044 5046 raise error.Abort(_(b'eread not available on this peer'))
5045 5047 stderr.read(count)
5046 5048 else:
5047 5049 raise error.Abort(_(b'unknown action: %s') % action)
5048 5050
5049 5051 if batchedcommands is not None:
5050 5052 raise error.Abort(_(b'unclosed "batchbegin" request'))
5051 5053
5052 5054 if peer:
5053 5055 peer.close()
5054 5056
5055 5057 if proc:
5056 5058 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now