##// END OF EJS Templates
debug-discovery: do not abort on unrelated repositories...
marmoute -
r50298:ac4fda5d stable
parent child Browse files
Show More
@@ -1,5051 +1,5056 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revlogutils,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 constants as revlog_constants,
108 108 debug as revlog_debug,
109 109 deltas as deltautil,
110 110 nodemap,
111 111 rewrite,
112 112 sidedata,
113 113 )
114 114
115 115 release = lockmod.release
116 116
117 117 table = {}
118 118 table.update(strip.command._table)
119 119 command = registrar.command(table)
120 120
121 121
122 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 123 def debugancestor(ui, repo, *args):
124 124 """find the ancestor revision of two revisions in a given index"""
125 125 if len(args) == 3:
126 126 index, rev1, rev2 = args
127 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 128 lookup = r.lookup
129 129 elif len(args) == 2:
130 130 if not repo:
131 131 raise error.Abort(
132 132 _(b'there is no Mercurial repository here (.hg not found)')
133 133 )
134 134 rev1, rev2 = args
135 135 r = repo.changelog
136 136 lookup = repo.lookup
137 137 else:
138 138 raise error.Abort(_(b'either two or three arguments required'))
139 139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 141
142 142
143 143 @command(b'debugantivirusrunning', [])
144 144 def debugantivirusrunning(ui, repo):
145 145 """attempt to trigger an antivirus scanner to see if one is active"""
146 146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 147 f.write(
148 148 util.b85decode(
149 149 # This is a base85-armored version of the EICAR test file. See
150 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 153 )
154 154 )
155 155 # Give an AV engine time to scan the file.
156 156 time.sleep(2)
157 157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 158
159 159
160 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 161 def debugapplystreamclonebundle(ui, repo, fname):
162 162 """apply a stream clone bundle file"""
163 163 f = hg.openpath(ui, fname)
164 164 gen = exchange.readbundle(ui, f, fname)
165 165 gen.apply(repo)
166 166
167 167
168 168 @command(
169 169 b'debugbuilddag',
170 170 [
171 171 (
172 172 b'm',
173 173 b'mergeable-file',
174 174 None,
175 175 _(b'add single file mergeable changes'),
176 176 ),
177 177 (
178 178 b'o',
179 179 b'overwritten-file',
180 180 None,
181 181 _(b'add single file all revs overwrite'),
182 182 ),
183 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 184 (
185 185 b'',
186 186 b'from-existing',
187 187 None,
188 188 _(b'continue from a non-empty repository'),
189 189 ),
190 190 ],
191 191 _(b'[OPTION]... [TEXT]'),
192 192 )
193 193 def debugbuilddag(
194 194 ui,
195 195 repo,
196 196 text=None,
197 197 mergeable_file=False,
198 198 overwritten_file=False,
199 199 new_file=False,
200 200 from_existing=False,
201 201 ):
202 202 """builds a repo with a given DAG from scratch in the current empty repo
203 203
204 204 The description of the DAG is read from stdin if not given on the
205 205 command line.
206 206
207 207 Elements:
208 208
209 209 - "+n" is a linear run of n nodes based on the current default parent
210 210 - "." is a single node based on the current default parent
211 211 - "$" resets the default parent to null (implied at the start);
212 212 otherwise the default parent is always the last node created
213 213 - "<p" sets the default parent to the backref p
214 214 - "*p" is a fork at parent p, which is a backref
215 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 216 - "/p2" is a merge of the preceding node and p2
217 217 - ":tag" defines a local tag for the preceding node
218 218 - "@branch" sets the named branch for subsequent nodes
219 219 - "#...\\n" is a comment up to the end of the line
220 220
221 221 Whitespace between the above elements is ignored.
222 222
223 223 A backref is either
224 224
225 225 - a number n, which references the node curr-n, where curr is the current
226 226 node, or
227 227 - the name of a local tag you placed earlier using ":tag", or
228 228 - empty to denote the default parent.
229 229
230 230 All string valued-elements are either strictly alphanumeric, or must
231 231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 232 """
233 233
234 234 if text is None:
235 235 ui.status(_(b"reading DAG from stdin\n"))
236 236 text = ui.fin.read()
237 237
238 238 cl = repo.changelog
239 239 if len(cl) > 0 and not from_existing:
240 240 raise error.Abort(_(b'repository is not empty'))
241 241
242 242 # determine number of revs in DAG
243 243 total = 0
244 244 for type, data in dagparser.parsedag(text):
245 245 if type == b'n':
246 246 total += 1
247 247
248 248 if mergeable_file:
249 249 linesperrev = 2
250 250 # make a file with k lines per rev
251 251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 252 initialmergedlines.append(b"")
253 253
254 254 tags = []
255 255 progress = ui.makeprogress(
256 256 _(b'building'), unit=_(b'revisions'), total=total
257 257 )
258 258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 259 at = -1
260 260 atbranch = b'default'
261 261 nodeids = []
262 262 id = 0
263 263 progress.update(id)
264 264 for type, data in dagparser.parsedag(text):
265 265 if type == b'n':
266 266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 267 id, ps = data
268 268
269 269 files = []
270 270 filecontent = {}
271 271
272 272 p2 = None
273 273 if mergeable_file:
274 274 fn = b"mf"
275 275 p1 = repo[ps[0]]
276 276 if len(ps) > 1:
277 277 p2 = repo[ps[1]]
278 278 pa = p1.ancestor(p2)
279 279 base, local, other = [
280 280 x[fn].data() for x in (pa, p1, p2)
281 281 ]
282 282 m3 = simplemerge.Merge3Text(base, local, other)
283 283 ml = [
284 284 l.strip()
285 285 for l in simplemerge.render_minimized(m3)[0]
286 286 ]
287 287 ml.append(b"")
288 288 elif at > 0:
289 289 ml = p1[fn].data().split(b"\n")
290 290 else:
291 291 ml = initialmergedlines
292 292 ml[id * linesperrev] += b" r%i" % id
293 293 mergedtext = b"\n".join(ml)
294 294 files.append(fn)
295 295 filecontent[fn] = mergedtext
296 296
297 297 if overwritten_file:
298 298 fn = b"of"
299 299 files.append(fn)
300 300 filecontent[fn] = b"r%i\n" % id
301 301
302 302 if new_file:
303 303 fn = b"nf%i" % id
304 304 files.append(fn)
305 305 filecontent[fn] = b"r%i\n" % id
306 306 if len(ps) > 1:
307 307 if not p2:
308 308 p2 = repo[ps[1]]
309 309 for fn in p2:
310 310 if fn.startswith(b"nf"):
311 311 files.append(fn)
312 312 filecontent[fn] = p2[fn].data()
313 313
314 314 def fctxfn(repo, cx, path):
315 315 if path in filecontent:
316 316 return context.memfilectx(
317 317 repo, cx, path, filecontent[path]
318 318 )
319 319 return None
320 320
321 321 if len(ps) == 0 or ps[0] < 0:
322 322 pars = [None, None]
323 323 elif len(ps) == 1:
324 324 pars = [nodeids[ps[0]], None]
325 325 else:
326 326 pars = [nodeids[p] for p in ps]
327 327 cx = context.memctx(
328 328 repo,
329 329 pars,
330 330 b"r%i" % id,
331 331 files,
332 332 fctxfn,
333 333 date=(id, 0),
334 334 user=b"debugbuilddag",
335 335 extra={b'branch': atbranch},
336 336 )
337 337 nodeid = repo.commitctx(cx)
338 338 nodeids.append(nodeid)
339 339 at = id
340 340 elif type == b'l':
341 341 id, name = data
342 342 ui.note((b'tag %s\n' % name))
343 343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 344 elif type == b'a':
345 345 ui.note((b'branch %s\n' % data))
346 346 atbranch = data
347 347 progress.update(id)
348 348
349 349 if tags:
350 350 repo.vfs.write(b"localtags", b"".join(tags))
351 351
352 352
353 353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 354 indent_string = b' ' * indent
355 355 if all:
356 356 ui.writenoi18n(
357 357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 358 % indent_string
359 359 )
360 360
361 361 def showchunks(named):
362 362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 363 for deltadata in gen.deltaiter():
364 364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 365 ui.write(
366 366 b"%s%s %s %s %s %s %d\n"
367 367 % (
368 368 indent_string,
369 369 hex(node),
370 370 hex(p1),
371 371 hex(p2),
372 372 hex(cs),
373 373 hex(deltabase),
374 374 len(delta),
375 375 )
376 376 )
377 377
378 378 gen.changelogheader()
379 379 showchunks(b"changelog")
380 380 gen.manifestheader()
381 381 showchunks(b"manifest")
382 382 for chunkdata in iter(gen.filelogheader, {}):
383 383 fname = chunkdata[b'filename']
384 384 showchunks(fname)
385 385 else:
386 386 if isinstance(gen, bundle2.unbundle20):
387 387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 388 gen.changelogheader()
389 389 for deltadata in gen.deltaiter():
390 390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 392
393 393
394 394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 395 """display version and markers contained in 'data'"""
396 396 opts = pycompat.byteskwargs(opts)
397 397 data = part.read()
398 398 indent_string = b' ' * indent
399 399 try:
400 400 version, markers = obsolete._readmarkers(data)
401 401 except error.UnknownVersion as exc:
402 402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 403 msg %= indent_string, exc.version, len(data)
404 404 ui.write(msg)
405 405 else:
406 406 msg = b"%sversion: %d (%d bytes)\n"
407 407 msg %= indent_string, version, len(data)
408 408 ui.write(msg)
409 409 fm = ui.formatter(b'debugobsolete', opts)
410 410 for rawmarker in sorted(markers):
411 411 m = obsutil.marker(None, rawmarker)
412 412 fm.startitem()
413 413 fm.plain(indent_string)
414 414 cmdutil.showmarker(fm, m)
415 415 fm.end()
416 416
417 417
418 418 def _debugphaseheads(ui, data, indent=0):
419 419 """display version and markers contained in 'data'"""
420 420 indent_string = b' ' * indent
421 421 headsbyphase = phases.binarydecode(data)
422 422 for phase in phases.allphases:
423 423 for head in headsbyphase[phase]:
424 424 ui.write(indent_string)
425 425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 426
427 427
428 428 def _quasirepr(thing):
429 429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 430 return b'{%s}' % (
431 431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 432 )
433 433 return pycompat.bytestr(repr(thing))
434 434
435 435
436 436 def _debugbundle2(ui, gen, all=None, **opts):
437 437 """lists the contents of a bundle2"""
438 438 if not isinstance(gen, bundle2.unbundle20):
439 439 raise error.Abort(_(b'not a bundle2 file'))
440 440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 441 parttypes = opts.get('part_type', [])
442 442 for part in gen.iterparts():
443 443 if parttypes and part.type not in parttypes:
444 444 continue
445 445 msg = b'%s -- %s (mandatory: %r)\n'
446 446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 447 if part.type == b'changegroup':
448 448 version = part.params.get(b'version', b'01')
449 449 cg = changegroup.getunbundler(version, part, b'UN')
450 450 if not ui.quiet:
451 451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 452 if part.type == b'obsmarkers':
453 453 if not ui.quiet:
454 454 _debugobsmarkers(ui, part, indent=4, **opts)
455 455 if part.type == b'phase-heads':
456 456 if not ui.quiet:
457 457 _debugphaseheads(ui, part, indent=4)
458 458
459 459
460 460 @command(
461 461 b'debugbundle',
462 462 [
463 463 (b'a', b'all', None, _(b'show all details')),
464 464 (b'', b'part-type', [], _(b'show only the named part type')),
465 465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 466 ],
467 467 _(b'FILE'),
468 468 norepo=True,
469 469 )
470 470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 471 """lists the contents of a bundle"""
472 472 with hg.openpath(ui, bundlepath) as f:
473 473 if spec:
474 474 spec = exchange.getbundlespec(ui, f)
475 475 ui.write(b'%s\n' % spec)
476 476 return
477 477
478 478 gen = exchange.readbundle(ui, f, bundlepath)
479 479 if isinstance(gen, bundle2.unbundle20):
480 480 return _debugbundle2(ui, gen, all=all, **opts)
481 481 _debugchangegroup(ui, gen, all=all, **opts)
482 482
483 483
484 484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 485 def debugcapabilities(ui, path, **opts):
486 486 """lists the capabilities of a remote peer"""
487 487 opts = pycompat.byteskwargs(opts)
488 488 peer = hg.peer(ui, opts, path)
489 489 try:
490 490 caps = peer.capabilities()
491 491 ui.writenoi18n(b'Main capabilities:\n')
492 492 for c in sorted(caps):
493 493 ui.write(b' %s\n' % c)
494 494 b2caps = bundle2.bundle2caps(peer)
495 495 if b2caps:
496 496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 497 for key, values in sorted(b2caps.items()):
498 498 ui.write(b' %s\n' % key)
499 499 for v in values:
500 500 ui.write(b' %s\n' % v)
501 501 finally:
502 502 peer.close()
503 503
504 504
505 505 @command(
506 506 b'debugchangedfiles',
507 507 [
508 508 (
509 509 b'',
510 510 b'compute',
511 511 False,
512 512 b"compute information instead of reading it from storage",
513 513 ),
514 514 ],
515 515 b'REV',
516 516 )
517 517 def debugchangedfiles(ui, repo, rev, **opts):
518 518 """list the stored files changes for a revision"""
519 519 ctx = logcmdutil.revsingle(repo, rev, None)
520 520 files = None
521 521
522 522 if opts['compute']:
523 523 files = metadata.compute_all_files_changes(ctx)
524 524 else:
525 525 sd = repo.changelog.sidedata(ctx.rev())
526 526 files_block = sd.get(sidedata.SD_FILES)
527 527 if files_block is not None:
528 528 files = metadata.decode_files_sidedata(sd)
529 529 if files is not None:
530 530 for f in sorted(files.touched):
531 531 if f in files.added:
532 532 action = b"added"
533 533 elif f in files.removed:
534 534 action = b"removed"
535 535 elif f in files.merged:
536 536 action = b"merged"
537 537 elif f in files.salvaged:
538 538 action = b"salvaged"
539 539 else:
540 540 action = b"touched"
541 541
542 542 copy_parent = b""
543 543 copy_source = b""
544 544 if f in files.copied_from_p1:
545 545 copy_parent = b"p1"
546 546 copy_source = files.copied_from_p1[f]
547 547 elif f in files.copied_from_p2:
548 548 copy_parent = b"p2"
549 549 copy_source = files.copied_from_p2[f]
550 550
551 551 data = (action, copy_parent, f, copy_source)
552 552 template = b"%-8s %2s: %s, %s;\n"
553 553 ui.write(template % data)
554 554
555 555
556 556 @command(b'debugcheckstate', [], b'')
557 557 def debugcheckstate(ui, repo):
558 558 """validate the correctness of the current dirstate"""
559 559 parent1, parent2 = repo.dirstate.parents()
560 560 m1 = repo[parent1].manifest()
561 561 m2 = repo[parent2].manifest()
562 562 errors = 0
563 563 for err in repo.dirstate.verify(m1, m2):
564 564 ui.warn(err[0] % err[1:])
565 565 errors += 1
566 566 if errors:
567 567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 568 raise error.Abort(errstr)
569 569
570 570
571 571 @command(
572 572 b'debugcolor',
573 573 [(b'', b'style', None, _(b'show all configured styles'))],
574 574 b'hg debugcolor',
575 575 )
576 576 def debugcolor(ui, repo, **opts):
577 577 """show available color, effects or style"""
578 578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 579 if opts.get('style'):
580 580 return _debugdisplaystyle(ui)
581 581 else:
582 582 return _debugdisplaycolor(ui)
583 583
584 584
585 585 def _debugdisplaycolor(ui):
586 586 ui = ui.copy()
587 587 ui._styles.clear()
588 588 for effect in color._activeeffects(ui).keys():
589 589 ui._styles[effect] = effect
590 590 if ui._terminfoparams:
591 591 for k, v in ui.configitems(b'color'):
592 592 if k.startswith(b'color.'):
593 593 ui._styles[k] = k[6:]
594 594 elif k.startswith(b'terminfo.'):
595 595 ui._styles[k] = k[9:]
596 596 ui.write(_(b'available colors:\n'))
597 597 # sort label with a '_' after the other to group '_background' entry.
598 598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 599 for colorname, label in items:
600 600 ui.write(b'%s\n' % colorname, label=label)
601 601
602 602
603 603 def _debugdisplaystyle(ui):
604 604 ui.write(_(b'available style:\n'))
605 605 if not ui._styles:
606 606 return
607 607 width = max(len(s) for s in ui._styles)
608 608 for label, effects in sorted(ui._styles.items()):
609 609 ui.write(b'%s' % label, label=label)
610 610 if effects:
611 611 # 50
612 612 ui.write(b': ')
613 613 ui.write(b' ' * (max(0, width - len(label))))
614 614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 615 ui.write(b'\n')
616 616
617 617
618 618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 619 def debugcreatestreamclonebundle(ui, repo, fname):
620 620 """create a stream clone bundle file
621 621
622 622 Stream bundles are special bundles that are essentially archives of
623 623 revlog files. They are commonly used for cloning very quickly.
624 624 """
625 625 # TODO we may want to turn this into an abort when this functionality
626 626 # is moved into `hg bundle`.
627 627 if phases.hassecret(repo):
628 628 ui.warn(
629 629 _(
630 630 b'(warning: stream clone bundle will contain secret '
631 631 b'revisions)\n'
632 632 )
633 633 )
634 634
635 635 requirements, gen = streamclone.generatebundlev1(repo)
636 636 changegroup.writechunks(ui, gen, fname)
637 637
638 638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 639
640 640
641 641 @command(
642 642 b'debugdag',
643 643 [
644 644 (b't', b'tags', None, _(b'use tags as labels')),
645 645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 646 (b'', b'dots', None, _(b'use dots for runs')),
647 647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 648 ],
649 649 _(b'[OPTION]... [FILE [REV]...]'),
650 650 optionalrepo=True,
651 651 )
652 652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 653 """format the changelog or an index DAG as a concise textual description
654 654
655 655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 656 revision numbers, they get labeled in the output as rN.
657 657
658 658 Otherwise, the changelog DAG of the current repo is emitted.
659 659 """
660 660 spaces = opts.get('spaces')
661 661 dots = opts.get('dots')
662 662 if file_:
663 663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 664 revs = {int(r) for r in revs}
665 665
666 666 def events():
667 667 for r in rlog:
668 668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 669 if r in revs:
670 670 yield b'l', (r, b"r%i" % r)
671 671
672 672 elif repo:
673 673 cl = repo.changelog
674 674 tags = opts.get('tags')
675 675 branches = opts.get('branches')
676 676 if tags:
677 677 labels = {}
678 678 for l, n in repo.tags().items():
679 679 labels.setdefault(cl.rev(n), []).append(l)
680 680
681 681 def events():
682 682 b = b"default"
683 683 for r in cl:
684 684 if branches:
685 685 newb = cl.read(cl.node(r))[5][b'branch']
686 686 if newb != b:
687 687 yield b'a', newb
688 688 b = newb
689 689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 690 if tags:
691 691 ls = labels.get(r)
692 692 if ls:
693 693 for l in ls:
694 694 yield b'l', (r, l)
695 695
696 696 else:
697 697 raise error.Abort(_(b'need repo for changelog dag'))
698 698
699 699 for line in dagparser.dagtextlines(
700 700 events(),
701 701 addspaces=spaces,
702 702 wraplabels=True,
703 703 wrapannotations=True,
704 704 wrapnonlinear=dots,
705 705 usedots=dots,
706 706 maxlinewidth=70,
707 707 ):
708 708 ui.write(line)
709 709 ui.write(b"\n")
710 710
711 711
712 712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 713 def debugdata(ui, repo, file_, rev=None, **opts):
714 714 """dump the contents of a data file revision"""
715 715 opts = pycompat.byteskwargs(opts)
716 716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 717 if rev is not None:
718 718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 719 file_, rev = None, file_
720 720 elif rev is None:
721 721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 723 try:
724 724 ui.write(r.rawdata(r.lookup(rev)))
725 725 except KeyError:
726 726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 727
728 728
729 729 @command(
730 730 b'debugdate',
731 731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 732 _(b'[-e] DATE [RANGE]'),
733 733 norepo=True,
734 734 optionalrepo=True,
735 735 )
736 736 def debugdate(ui, date, range=None, **opts):
737 737 """parse and display a date"""
738 738 if opts["extended"]:
739 739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 740 else:
741 741 d = dateutil.parsedate(date)
742 742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 744 if range:
745 745 m = dateutil.matchdate(range)
746 746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 747
748 748
749 749 @command(
750 750 b'debugdeltachain',
751 751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 752 _(b'-c|-m|FILE'),
753 753 optionalrepo=True,
754 754 )
755 755 def debugdeltachain(ui, repo, file_=None, **opts):
756 756 """dump information about delta chains in a revlog
757 757
758 758 Output can be templatized. Available template keywords are:
759 759
760 760 :``rev``: revision number
761 761 :``p1``: parent 1 revision number (for reference)
762 762 :``p2``: parent 2 revision number (for reference)
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 - base: a full snapshot
768 768 - snap: an intermediate snapshot
769 769 - p1: a delta against the first parent
770 770 - p2: a delta against the second parent
771 771 - skip1: a delta against the same base as p1
772 772 (when p1 has empty delta
773 773 - skip2: a delta against the same base as p2
774 774 (when p2 has empty delta
775 775 - prev: a delta against the previous revision
776 776 - other: a delta against an arbitrary revision
777 777 :``compsize``: compressed size of revision
778 778 :``uncompsize``: uncompressed size of revision
779 779 :``chainsize``: total size of compressed revisions in chain
780 780 :``chainratio``: total chain size divided by uncompressed revision size
781 781 (new delta chains typically start at ratio 2.00)
782 782 :``lindist``: linear distance from base revision in delta chain to end
783 783 of this revision
784 784 :``extradist``: total size of revisions not part of this delta chain from
785 785 base of delta chain to end of this revision; a measurement
786 786 of how much extra data we need to read/seek across to read
787 787 the delta chain for this revision
788 788 :``extraratio``: extradist divided by chainsize; another representation of
789 789 how much unrelated data is needed to load this delta chain
790 790
791 791 If the repository is configured to use the sparse read, additional keywords
792 792 are available:
793 793
794 794 :``readsize``: total size of data read from the disk for a revision
795 795 (sum of the sizes of all the blocks)
796 796 :``largestblock``: size of the largest block of data read from the disk
797 797 :``readdensity``: density of useful bytes in the data read from the disk
798 798 :``srchunks``: in how many data hunks the whole revision would be read
799 799
800 800 The sparse read can be enabled with experimental.sparse-read = True
801 801 """
802 802 opts = pycompat.byteskwargs(opts)
803 803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 804 index = r.index
805 805 start = r.start
806 806 length = r.length
807 807 generaldelta = r._generaldelta
808 808 withsparseread = getattr(r, '_withsparseread', False)
809 809
810 810 # security to avoid crash on corrupted revlogs
811 811 total_revs = len(index)
812 812
813 813 def revinfo(rev):
814 814 e = index[rev]
815 815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 817 chainsize = 0
818 818
819 819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 822
823 823 # If the parents of a revision has an empty delta, we never try to delta
824 824 # against that parent, but directly against the delta base of that
825 825 # parent (recursively). It avoids adding a useless entry in the chain.
826 826 #
827 827 # However we need to detect that as a special case for delta-type, that
828 828 # is not simply "other".
829 829 p1_base = p1
830 830 if p1 != nullrev and p1 < total_revs:
831 831 e1 = index[p1]
832 832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 834 if (
835 835 new_base == p1_base
836 836 or new_base == nullrev
837 837 or new_base >= total_revs
838 838 ):
839 839 break
840 840 p1_base = new_base
841 841 e1 = index[p1_base]
842 842 p2_base = p2
843 843 if p2 != nullrev and p2 < total_revs:
844 844 e2 = index[p2]
845 845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 847 if (
848 848 new_base == p2_base
849 849 or new_base == nullrev
850 850 or new_base >= total_revs
851 851 ):
852 852 break
853 853 p2_base = new_base
854 854 e2 = index[p2_base]
855 855
856 856 if generaldelta:
857 857 if base == p1:
858 858 deltatype = b'p1'
859 859 elif base == p2:
860 860 deltatype = b'p2'
861 861 elif base == rev:
862 862 deltatype = b'base'
863 863 elif base == p1_base:
864 864 deltatype = b'skip1'
865 865 elif base == p2_base:
866 866 deltatype = b'skip2'
867 867 elif r.issnapshot(rev):
868 868 deltatype = b'snap'
869 869 elif base == rev - 1:
870 870 deltatype = b'prev'
871 871 else:
872 872 deltatype = b'other'
873 873 else:
874 874 if base == rev:
875 875 deltatype = b'base'
876 876 else:
877 877 deltatype = b'prev'
878 878
879 879 chain = r._deltachain(rev)[0]
880 880 for iterrev in chain:
881 881 e = index[iterrev]
882 882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 883
884 884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 885
886 886 fm = ui.formatter(b'debugdeltachain', opts)
887 887
888 888 fm.plain(
889 889 b' rev p1 p2 chain# chainlen prev delta '
890 890 b'size rawsize chainsize ratio lindist extradist '
891 891 b'extraratio'
892 892 )
893 893 if withsparseread:
894 894 fm.plain(b' readsize largestblk rddensity srchunks')
895 895 fm.plain(b'\n')
896 896
897 897 chainbases = {}
898 898 for rev in r:
899 899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 900 chainbase = chain[0]
901 901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 902 basestart = start(chainbase)
903 903 revstart = start(rev)
904 904 lineardist = revstart + comp - basestart
905 905 extradist = lineardist - chainsize
906 906 try:
907 907 prevrev = chain[-2]
908 908 except IndexError:
909 909 prevrev = -1
910 910
911 911 if uncomp != 0:
912 912 chainratio = float(chainsize) / float(uncomp)
913 913 else:
914 914 chainratio = chainsize
915 915
916 916 if chainsize != 0:
917 917 extraratio = float(extradist) / float(chainsize)
918 918 else:
919 919 extraratio = extradist
920 920
921 921 fm.startitem()
922 922 fm.write(
923 923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 924 b'uncompsize chainsize chainratio lindist extradist '
925 925 b'extraratio',
926 926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 927 rev,
928 928 p1,
929 929 p2,
930 930 chainid,
931 931 len(chain),
932 932 prevrev,
933 933 deltatype,
934 934 comp,
935 935 uncomp,
936 936 chainsize,
937 937 chainratio,
938 938 lineardist,
939 939 extradist,
940 940 extraratio,
941 941 rev=rev,
942 942 chainid=chainid,
943 943 chainlen=len(chain),
944 944 prevrev=prevrev,
945 945 deltatype=deltatype,
946 946 compsize=comp,
947 947 uncompsize=uncomp,
948 948 chainsize=chainsize,
949 949 chainratio=chainratio,
950 950 lindist=lineardist,
951 951 extradist=extradist,
952 952 extraratio=extraratio,
953 953 )
954 954 if withsparseread:
955 955 readsize = 0
956 956 largestblock = 0
957 957 srchunks = 0
958 958
959 959 for revschunk in deltautil.slicechunk(r, chain):
960 960 srchunks += 1
961 961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 962 blksize = blkend - start(revschunk[0])
963 963
964 964 readsize += blksize
965 965 if largestblock < blksize:
966 966 largestblock = blksize
967 967
968 968 if readsize:
969 969 readdensity = float(chainsize) / float(readsize)
970 970 else:
971 971 readdensity = 1
972 972
973 973 fm.write(
974 974 b'readsize largestblock readdensity srchunks',
975 975 b' %10d %10d %9.5f %8d',
976 976 readsize,
977 977 largestblock,
978 978 readdensity,
979 979 srchunks,
980 980 readsize=readsize,
981 981 largestblock=largestblock,
982 982 readdensity=readdensity,
983 983 srchunks=srchunks,
984 984 )
985 985
986 986 fm.plain(b'\n')
987 987
988 988 fm.end()
989 989
990 990
991 991 @command(
992 992 b'debug-delta-find',
993 993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 994 _(b'-c|-m|FILE REV'),
995 995 optionalrepo=True,
996 996 )
997 997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 998 """display the computation to get to a valid delta for storing REV
999 999
1000 1000 This command will replay the process used to find the "best" delta to store
1001 1001 a revision and display information about all the steps used to get to that
1002 1002 result.
1003 1003
1004 1004 The revision use the revision number of the target storage (not changelog
1005 1005 revision number).
1006 1006
1007 1007 note: the process is initiated from a full text of the revision to store.
1008 1008 """
1009 1009 opts = pycompat.byteskwargs(opts)
1010 1010 if arg_2 is None:
1011 1011 file_ = None
1012 1012 rev = arg_1
1013 1013 else:
1014 1014 file_ = arg_1
1015 1015 rev = arg_2
1016 1016
1017 1017 rev = int(rev)
1018 1018
1019 1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020 1020
1021 1021 deltacomputer = deltautil.deltacomputer(
1022 1022 revlog,
1023 1023 write_debug=ui.write,
1024 1024 debug_search=True,
1025 1025 )
1026 1026
1027 1027 node = revlog.node(rev)
1028 1028 p1r, p2r = revlog.parentrevs(rev)
1029 1029 p1 = revlog.node(p1r)
1030 1030 p2 = revlog.node(p2r)
1031 1031 btext = [revlog.revision(rev)]
1032 1032 textlen = len(btext[0])
1033 1033 cachedelta = None
1034 1034 flags = revlog.flags(rev)
1035 1035
1036 1036 revinfo = revlogutils.revisioninfo(
1037 1037 node,
1038 1038 p1,
1039 1039 p2,
1040 1040 btext,
1041 1041 textlen,
1042 1042 cachedelta,
1043 1043 flags,
1044 1044 )
1045 1045
1046 1046 fh = revlog._datafp()
1047 1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048 1048
1049 1049
1050 1050 @command(
1051 1051 b'debugdirstate|debugstate',
1052 1052 [
1053 1053 (
1054 1054 b'',
1055 1055 b'nodates',
1056 1056 None,
1057 1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 1058 ),
1059 1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 1061 (
1062 1062 b'',
1063 1063 b'docket',
1064 1064 False,
1065 1065 _(b'display the docket (metadata file) instead'),
1066 1066 ),
1067 1067 (
1068 1068 b'',
1069 1069 b'all',
1070 1070 False,
1071 1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 1072 ),
1073 1073 ],
1074 1074 _(b'[OPTION]...'),
1075 1075 )
1076 1076 def debugstate(ui, repo, **opts):
1077 1077 """show the contents of the current dirstate"""
1078 1078
1079 1079 if opts.get("docket"):
1080 1080 if not repo.dirstate._use_dirstate_v2:
1081 1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082 1082
1083 1083 docket = repo.dirstate._map.docket
1084 1084 (
1085 1085 start_offset,
1086 1086 root_nodes,
1087 1087 nodes_with_entry,
1088 1088 nodes_with_copy,
1089 1089 unused_bytes,
1090 1090 _unused,
1091 1091 ignore_pattern,
1092 1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093 1093
1094 1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 1101 ui.write(
1102 1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 1103 )
1104 1104 return
1105 1105
1106 1106 nodates = not opts['dates']
1107 1107 if opts.get('nodates') is not None:
1108 1108 nodates = True
1109 1109 datesort = opts.get('datesort')
1110 1110
1111 1111 if datesort:
1112 1112
1113 1113 def keyfunc(entry):
1114 1114 filename, _state, _mode, _size, mtime = entry
1115 1115 return (mtime, filename)
1116 1116
1117 1117 else:
1118 1118 keyfunc = None # sort by filename
1119 1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 1120 entries.sort(key=keyfunc)
1121 1121 for entry in entries:
1122 1122 filename, state, mode, size, mtime = entry
1123 1123 if mtime == -1:
1124 1124 timestr = b'unset '
1125 1125 elif nodates:
1126 1126 timestr = b'set '
1127 1127 else:
1128 1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 1129 timestr = encoding.strtolocal(timestr)
1130 1130 if mode & 0o20000:
1131 1131 mode = b'lnk'
1132 1132 else:
1133 1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 1135 for f in repo.dirstate.copies():
1136 1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137 1137
1138 1138
1139 1139 @command(
1140 1140 b'debugdirstateignorepatternshash',
1141 1141 [],
1142 1142 _(b''),
1143 1143 )
1144 1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 1146 or nothing for dirstate-v2
1147 1147 """
1148 1148 if repo.dirstate._use_dirstate_v2:
1149 1149 docket = repo.dirstate._map.docket
1150 1150 hash_len = 20 # 160 bits for SHA-1
1151 1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153 1153
1154 1154
1155 1155 @command(
1156 1156 b'debugdiscovery',
1157 1157 [
1158 1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 1159 (
1160 1160 b'',
1161 1161 b'nonheads',
1162 1162 None,
1163 1163 _(b'use old-style discovery with non-heads included'),
1164 1164 ),
1165 1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 1167 (
1168 1168 b'',
1169 1169 b'local-as-revs',
1170 1170 b"",
1171 1171 b'treat local has having these revisions only',
1172 1172 ),
1173 1173 (
1174 1174 b'',
1175 1175 b'remote-as-revs',
1176 1176 b"",
1177 1177 b'use local as remote, with only these revisions',
1178 1178 ),
1179 1179 ]
1180 1180 + cmdutil.remoteopts
1181 1181 + cmdutil.formatteropts,
1182 1182 _(b'[--rev REV] [OTHER]'),
1183 1183 )
1184 1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 1185 """runs the changeset discovery protocol in isolation
1186 1186
1187 1187 The local peer can be "replaced" by a subset of the local repository by
1188 1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1189 1189 be "replaced" by a subset of the local repository using the
1190 1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 1191 discovery situation.
1192 1192
1193 1193 The following developer oriented config are relevant for people playing with this command:
1194 1194
1195 1195 * devel.discovery.exchange-heads=True
1196 1196
1197 1197 If False, the discovery will not start with
1198 1198 remote head fetching and local head querying.
1199 1199
1200 1200 * devel.discovery.grow-sample=True
1201 1201
1202 1202 If False, the sample size used in set discovery will not be increased
1203 1203 through the process
1204 1204
1205 1205 * devel.discovery.grow-sample.dynamic=True
1206 1206
1207 1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 1208 adapted to the shape of the undecided set (it is set to the max of:
1209 1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210 1210
1211 1211 * devel.discovery.grow-sample.rate=1.05
1212 1212
1213 1213 the rate at which the sample grow
1214 1214
1215 1215 * devel.discovery.randomize=True
1216 1216
1217 1217 If andom sampling during discovery are deterministic. It is meant for
1218 1218 integration tests.
1219 1219
1220 1220 * devel.discovery.sample-size=200
1221 1221
1222 1222 Control the initial size of the discovery sample
1223 1223
1224 1224 * devel.discovery.sample-size.initial=100
1225 1225
1226 1226 Control the initial size of the discovery for initial change
1227 1227 """
1228 1228 opts = pycompat.byteskwargs(opts)
1229 1229 unfi = repo.unfiltered()
1230 1230
1231 1231 # setup potential extra filtering
1232 1232 local_revs = opts[b"local_as_revs"]
1233 1233 remote_revs = opts[b"remote_as_revs"]
1234 1234
1235 1235 # make sure tests are repeatable
1236 1236 random.seed(int(opts[b'seed']))
1237 1237
1238 1238 if not remote_revs:
1239 1239
1240 1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 1241 b'debugdiscovery', repo, ui, remoteurl
1242 1242 )
1243 1243 remote = hg.peer(repo, opts, remoteurl)
1244 1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 1245 else:
1246 1246 branches = (None, [])
1247 1247 remote_filtered_revs = logcmdutil.revrange(
1248 1248 unfi, [b"not (::(%s))" % remote_revs]
1249 1249 )
1250 1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251 1251
1252 1252 def remote_func(x):
1253 1253 return remote_filtered_revs
1254 1254
1255 1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256 1256
1257 1257 remote = repo.peer()
1258 1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259 1259
1260 1260 if local_revs:
1261 1261 local_filtered_revs = logcmdutil.revrange(
1262 1262 unfi, [b"not (::(%s))" % local_revs]
1263 1263 )
1264 1264 local_filtered_revs = frozenset(local_filtered_revs)
1265 1265
1266 1266 def local_func(x):
1267 1267 return local_filtered_revs
1268 1268
1269 1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271 1271
1272 1272 data = {}
1273 1273 if opts.get(b'old'):
1274 1274
1275 1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 1276 if not util.safehasattr(remote, b'branches'):
1277 1277 # enable in-client legacy support
1278 1278 remote = localrepo.locallegacypeer(remote.local())
1279 1279 if remote_revs:
1280 1280 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 1281 remote._repo = r
1282 1282 common, _in, hds = treediscovery.findcommonincoming(
1283 1283 repo, remote, force=True, audit=data
1284 1284 )
1285 1285 common = set(common)
1286 1286 if not opts.get(b'nonheads'):
1287 1287 ui.writenoi18n(
1288 1288 b"unpruned common: %s\n"
1289 1289 % b" ".join(sorted(short(n) for n in common))
1290 1290 )
1291 1291
1292 1292 clnode = repo.changelog.node
1293 1293 common = repo.revs(b'heads(::%ln)', common)
1294 1294 common = {clnode(r) for r in common}
1295 1295 return common, hds
1296 1296
1297 1297 else:
1298 1298
1299 1299 def doit(pushedrevs, remoteheads, remote=remote):
1300 1300 nodes = None
1301 1301 if pushedrevs:
1302 1302 revs = logcmdutil.revrange(repo, pushedrevs)
1303 1303 nodes = [repo[r].node() for r in revs]
1304 1304 common, any, hds = setdiscovery.findcommonheads(
1305 ui, repo, remote, ancestorsof=nodes, audit=data
1305 ui,
1306 repo,
1307 remote,
1308 ancestorsof=nodes,
1309 audit=data,
1310 abortwhenunrelated=False,
1306 1311 )
1307 1312 return common, hds
1308 1313
1309 1314 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1310 1315 localrevs = opts[b'rev']
1311 1316
1312 1317 fm = ui.formatter(b'debugdiscovery', opts)
1313 1318 if fm.strict_format:
1314 1319
1315 1320 @contextlib.contextmanager
1316 1321 def may_capture_output():
1317 1322 ui.pushbuffer()
1318 1323 yield
1319 1324 data[b'output'] = ui.popbuffer()
1320 1325
1321 1326 else:
1322 1327 may_capture_output = util.nullcontextmanager
1323 1328 with may_capture_output():
1324 1329 with util.timedcm('debug-discovery') as t:
1325 1330 common, hds = doit(localrevs, remoterevs)
1326 1331
1327 1332 # compute all statistics
1328 1333 heads_common = set(common)
1329 1334 heads_remote = set(hds)
1330 1335 heads_local = set(repo.heads())
1331 1336 # note: they cannot be a local or remote head that is in common and not
1332 1337 # itself a head of common.
1333 1338 heads_common_local = heads_common & heads_local
1334 1339 heads_common_remote = heads_common & heads_remote
1335 1340 heads_common_both = heads_common & heads_remote & heads_local
1336 1341
1337 1342 all = repo.revs(b'all()')
1338 1343 common = repo.revs(b'::%ln', common)
1339 1344 roots_common = repo.revs(b'roots(::%ld)', common)
1340 1345 missing = repo.revs(b'not ::%ld', common)
1341 1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1342 1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1343 1348 assert len(common) + len(missing) == len(all)
1344 1349
1345 1350 initial_undecided = repo.revs(
1346 1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1347 1352 )
1348 1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1349 1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1350 1355 common_initial_undecided = initial_undecided & common
1351 1356 missing_initial_undecided = initial_undecided & missing
1352 1357
1353 1358 data[b'elapsed'] = t.elapsed
1354 1359 data[b'nb-common-heads'] = len(heads_common)
1355 1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1356 1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1357 1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1358 1363 data[b'nb-common-roots'] = len(roots_common)
1359 1364 data[b'nb-head-local'] = len(heads_local)
1360 1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1361 1366 data[b'nb-head-remote'] = len(heads_remote)
1362 1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1363 1368 heads_common_remote
1364 1369 )
1365 1370 data[b'nb-revs'] = len(all)
1366 1371 data[b'nb-revs-common'] = len(common)
1367 1372 data[b'nb-revs-missing'] = len(missing)
1368 1373 data[b'nb-missing-heads'] = len(heads_missing)
1369 1374 data[b'nb-missing-roots'] = len(roots_missing)
1370 1375 data[b'nb-ini_und'] = len(initial_undecided)
1371 1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1372 1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1373 1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1374 1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1375 1380
1376 1381 fm.startitem()
1377 1382 fm.data(**pycompat.strkwargs(data))
1378 1383 # display discovery summary
1379 1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1380 1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1381 1386 if b'total-round-trips-heads' in data:
1382 1387 fm.plain(
1383 1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1384 1389 )
1385 1390 if b'total-round-trips-branches' in data:
1386 1391 fm.plain(
1387 1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1388 1393 % data
1389 1394 )
1390 1395 if b'total-round-trips-between' in data:
1391 1396 fm.plain(
1392 1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1393 1398 )
1394 1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1395 1400 if b'total-queries-branches' in data:
1396 1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1397 1402 if b'total-queries-between' in data:
1398 1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1399 1404 fm.plain(b"heads summary:\n")
1400 1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1401 1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1402 1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1403 1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1404 1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1405 1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1406 1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1407 1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1408 1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1409 1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1410 1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1411 1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1412 1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1413 1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1414 1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1415 1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1416 1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1417 1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1418 1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1419 1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1420 1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1421 1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1422 1427
1423 1428 if ui.verbose:
1424 1429 fm.plain(
1425 1430 b"common heads: %s\n"
1426 1431 % b" ".join(sorted(short(n) for n in heads_common))
1427 1432 )
1428 1433 fm.end()
1429 1434
1430 1435
1431 1436 _chunksize = 4 << 10
1432 1437
1433 1438
1434 1439 @command(
1435 1440 b'debugdownload',
1436 1441 [
1437 1442 (b'o', b'output', b'', _(b'path')),
1438 1443 ],
1439 1444 optionalrepo=True,
1440 1445 )
1441 1446 def debugdownload(ui, repo, url, output=None, **opts):
1442 1447 """download a resource using Mercurial logic and config"""
1443 1448 fh = urlmod.open(ui, url, output)
1444 1449
1445 1450 dest = ui
1446 1451 if output:
1447 1452 dest = open(output, b"wb", _chunksize)
1448 1453 try:
1449 1454 data = fh.read(_chunksize)
1450 1455 while data:
1451 1456 dest.write(data)
1452 1457 data = fh.read(_chunksize)
1453 1458 finally:
1454 1459 if output:
1455 1460 dest.close()
1456 1461
1457 1462
1458 1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1459 1464 def debugextensions(ui, repo, **opts):
1460 1465 '''show information about active extensions'''
1461 1466 opts = pycompat.byteskwargs(opts)
1462 1467 exts = extensions.extensions(ui)
1463 1468 hgver = util.version()
1464 1469 fm = ui.formatter(b'debugextensions', opts)
1465 1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1466 1471 isinternal = extensions.ismoduleinternal(extmod)
1467 1472 extsource = None
1468 1473
1469 1474 if util.safehasattr(extmod, '__file__'):
1470 1475 extsource = pycompat.fsencode(extmod.__file__)
1471 1476 elif getattr(sys, 'oxidized', False):
1472 1477 extsource = pycompat.sysexecutable
1473 1478 if isinternal:
1474 1479 exttestedwith = [] # never expose magic string to users
1475 1480 else:
1476 1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1477 1482 extbuglink = getattr(extmod, 'buglink', None)
1478 1483
1479 1484 fm.startitem()
1480 1485
1481 1486 if ui.quiet or ui.verbose:
1482 1487 fm.write(b'name', b'%s\n', extname)
1483 1488 else:
1484 1489 fm.write(b'name', b'%s', extname)
1485 1490 if isinternal or hgver in exttestedwith:
1486 1491 fm.plain(b'\n')
1487 1492 elif not exttestedwith:
1488 1493 fm.plain(_(b' (untested!)\n'))
1489 1494 else:
1490 1495 lasttestedversion = exttestedwith[-1]
1491 1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1492 1497
1493 1498 fm.condwrite(
1494 1499 ui.verbose and extsource,
1495 1500 b'source',
1496 1501 _(b' location: %s\n'),
1497 1502 extsource or b"",
1498 1503 )
1499 1504
1500 1505 if ui.verbose:
1501 1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1502 1507 fm.data(bundled=isinternal)
1503 1508
1504 1509 fm.condwrite(
1505 1510 ui.verbose and exttestedwith,
1506 1511 b'testedwith',
1507 1512 _(b' tested with: %s\n'),
1508 1513 fm.formatlist(exttestedwith, name=b'ver'),
1509 1514 )
1510 1515
1511 1516 fm.condwrite(
1512 1517 ui.verbose and extbuglink,
1513 1518 b'buglink',
1514 1519 _(b' bug reporting: %s\n'),
1515 1520 extbuglink or b"",
1516 1521 )
1517 1522
1518 1523 fm.end()
1519 1524
1520 1525
1521 1526 @command(
1522 1527 b'debugfileset',
1523 1528 [
1524 1529 (
1525 1530 b'r',
1526 1531 b'rev',
1527 1532 b'',
1528 1533 _(b'apply the filespec on this revision'),
1529 1534 _(b'REV'),
1530 1535 ),
1531 1536 (
1532 1537 b'',
1533 1538 b'all-files',
1534 1539 False,
1535 1540 _(b'test files from all revisions and working directory'),
1536 1541 ),
1537 1542 (
1538 1543 b's',
1539 1544 b'show-matcher',
1540 1545 None,
1541 1546 _(b'print internal representation of matcher'),
1542 1547 ),
1543 1548 (
1544 1549 b'p',
1545 1550 b'show-stage',
1546 1551 [],
1547 1552 _(b'print parsed tree at the given stage'),
1548 1553 _(b'NAME'),
1549 1554 ),
1550 1555 ],
1551 1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1552 1557 )
1553 1558 def debugfileset(ui, repo, expr, **opts):
1554 1559 '''parse and apply a fileset specification'''
1555 1560 from . import fileset
1556 1561
1557 1562 fileset.symbols # force import of fileset so we have predicates to optimize
1558 1563 opts = pycompat.byteskwargs(opts)
1559 1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1560 1565
1561 1566 stages = [
1562 1567 (b'parsed', pycompat.identity),
1563 1568 (b'analyzed', filesetlang.analyze),
1564 1569 (b'optimized', filesetlang.optimize),
1565 1570 ]
1566 1571 stagenames = {n for n, f in stages}
1567 1572
1568 1573 showalways = set()
1569 1574 if ui.verbose and not opts[b'show_stage']:
1570 1575 # show parsed tree by --verbose (deprecated)
1571 1576 showalways.add(b'parsed')
1572 1577 if opts[b'show_stage'] == [b'all']:
1573 1578 showalways.update(stagenames)
1574 1579 else:
1575 1580 for n in opts[b'show_stage']:
1576 1581 if n not in stagenames:
1577 1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1578 1583 showalways.update(opts[b'show_stage'])
1579 1584
1580 1585 tree = filesetlang.parse(expr)
1581 1586 for n, f in stages:
1582 1587 tree = f(tree)
1583 1588 if n in showalways:
1584 1589 if opts[b'show_stage'] or n != b'parsed':
1585 1590 ui.write(b"* %s:\n" % n)
1586 1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1587 1592
1588 1593 files = set()
1589 1594 if opts[b'all_files']:
1590 1595 for r in repo:
1591 1596 c = repo[r]
1592 1597 files.update(c.files())
1593 1598 files.update(c.substate)
1594 1599 if opts[b'all_files'] or ctx.rev() is None:
1595 1600 wctx = repo[None]
1596 1601 files.update(
1597 1602 repo.dirstate.walk(
1598 1603 scmutil.matchall(repo),
1599 1604 subrepos=list(wctx.substate),
1600 1605 unknown=True,
1601 1606 ignored=True,
1602 1607 )
1603 1608 )
1604 1609 files.update(wctx.substate)
1605 1610 else:
1606 1611 files.update(ctx.files())
1607 1612 files.update(ctx.substate)
1608 1613
1609 1614 m = ctx.matchfileset(repo.getcwd(), expr)
1610 1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1611 1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1612 1617 for f in sorted(files):
1613 1618 if not m(f):
1614 1619 continue
1615 1620 ui.write(b"%s\n" % f)
1616 1621
1617 1622
1618 1623 @command(
1619 1624 b"debug-repair-issue6528",
1620 1625 [
1621 1626 (
1622 1627 b'',
1623 1628 b'to-report',
1624 1629 b'',
1625 1630 _(b'build a report of affected revisions to this file'),
1626 1631 _(b'FILE'),
1627 1632 ),
1628 1633 (
1629 1634 b'',
1630 1635 b'from-report',
1631 1636 b'',
1632 1637 _(b'repair revisions listed in this report file'),
1633 1638 _(b'FILE'),
1634 1639 ),
1635 1640 (
1636 1641 b'',
1637 1642 b'paranoid',
1638 1643 False,
1639 1644 _(b'check that both detection methods do the same thing'),
1640 1645 ),
1641 1646 ]
1642 1647 + cmdutil.dryrunopts,
1643 1648 )
1644 1649 def debug_repair_issue6528(ui, repo, **opts):
1645 1650 """find affected revisions and repair them. See issue6528 for more details.
1646 1651
1647 1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1648 1653 computation of affected revisions for a given repository across clones.
1649 1654 The report format is line-based (with empty lines ignored):
1650 1655
1651 1656 ```
1652 1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1653 1658 ```
1654 1659
1655 1660 There can be multiple broken revisions per filelog, they are separated by
1656 1661 a comma with no spaces. The only space is between the revision(s) and the
1657 1662 filename.
1658 1663
1659 1664 Note that this does *not* mean that this repairs future affected revisions,
1660 1665 that needs a separate fix at the exchange level that was introduced in
1661 1666 Mercurial 5.9.1.
1662 1667
1663 1668 There is a `--paranoid` flag to test that the fast implementation is correct
1664 1669 by checking it against the slow implementation. Since this matter is quite
1665 1670 urgent and testing every edge-case is probably quite costly, we use this
1666 1671 method to test on large repositories as a fuzzing method of sorts.
1667 1672 """
1668 1673 cmdutil.check_incompatible_arguments(
1669 1674 opts, 'to_report', ['from_report', 'dry_run']
1670 1675 )
1671 1676 dry_run = opts.get('dry_run')
1672 1677 to_report = opts.get('to_report')
1673 1678 from_report = opts.get('from_report')
1674 1679 paranoid = opts.get('paranoid')
1675 1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1676 1681 # narrow down the search for users that know what they're looking for?
1677 1682
1678 1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1679 1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1680 1685 raise error.Abort(_(msg))
1681 1686
1682 1687 rewrite.repair_issue6528(
1683 1688 ui,
1684 1689 repo,
1685 1690 dry_run=dry_run,
1686 1691 to_report=to_report,
1687 1692 from_report=from_report,
1688 1693 paranoid=paranoid,
1689 1694 )
1690 1695
1691 1696
1692 1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1693 1698 def debugformat(ui, repo, **opts):
1694 1699 """display format information about the current repository
1695 1700
1696 1701 Use --verbose to get extra information about current config value and
1697 1702 Mercurial default."""
1698 1703 opts = pycompat.byteskwargs(opts)
1699 1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1700 1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1701 1706
1702 1707 def makeformatname(name):
1703 1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1704 1709
1705 1710 fm = ui.formatter(b'debugformat', opts)
1706 1711 if fm.isplain():
1707 1712
1708 1713 def formatvalue(value):
1709 1714 if util.safehasattr(value, b'startswith'):
1710 1715 return value
1711 1716 if value:
1712 1717 return b'yes'
1713 1718 else:
1714 1719 return b'no'
1715 1720
1716 1721 else:
1717 1722 formatvalue = pycompat.identity
1718 1723
1719 1724 fm.plain(b'format-variant')
1720 1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1721 1726 fm.plain(b' repo')
1722 1727 if ui.verbose:
1723 1728 fm.plain(b' config default')
1724 1729 fm.plain(b'\n')
1725 1730 for fv in upgrade.allformatvariant:
1726 1731 fm.startitem()
1727 1732 repovalue = fv.fromrepo(repo)
1728 1733 configvalue = fv.fromconfig(repo)
1729 1734
1730 1735 if repovalue != configvalue:
1731 1736 namelabel = b'formatvariant.name.mismatchconfig'
1732 1737 repolabel = b'formatvariant.repo.mismatchconfig'
1733 1738 elif repovalue != fv.default:
1734 1739 namelabel = b'formatvariant.name.mismatchdefault'
1735 1740 repolabel = b'formatvariant.repo.mismatchdefault'
1736 1741 else:
1737 1742 namelabel = b'formatvariant.name.uptodate'
1738 1743 repolabel = b'formatvariant.repo.uptodate'
1739 1744
1740 1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1741 1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1742 1747 if fv.default != configvalue:
1743 1748 configlabel = b'formatvariant.config.special'
1744 1749 else:
1745 1750 configlabel = b'formatvariant.config.default'
1746 1751 fm.condwrite(
1747 1752 ui.verbose,
1748 1753 b'config',
1749 1754 b' %6s',
1750 1755 formatvalue(configvalue),
1751 1756 label=configlabel,
1752 1757 )
1753 1758 fm.condwrite(
1754 1759 ui.verbose,
1755 1760 b'default',
1756 1761 b' %7s',
1757 1762 formatvalue(fv.default),
1758 1763 label=b'formatvariant.default',
1759 1764 )
1760 1765 fm.plain(b'\n')
1761 1766 fm.end()
1762 1767
1763 1768
1764 1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1765 1770 def debugfsinfo(ui, path=b"."):
1766 1771 """show information detected about current filesystem"""
1767 1772 ui.writenoi18n(b'path: %s\n' % path)
1768 1773 ui.writenoi18n(
1769 1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1770 1775 )
1771 1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1772 1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1773 1778 ui.writenoi18n(
1774 1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1775 1780 )
1776 1781 ui.writenoi18n(
1777 1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1778 1783 )
1779 1784 casesensitive = b'(unknown)'
1780 1785 try:
1781 1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1782 1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1783 1788 except OSError:
1784 1789 pass
1785 1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1786 1791
1787 1792
1788 1793 @command(
1789 1794 b'debuggetbundle',
1790 1795 [
1791 1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1792 1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1793 1798 (
1794 1799 b't',
1795 1800 b'type',
1796 1801 b'bzip2',
1797 1802 _(b'bundle compression type to use'),
1798 1803 _(b'TYPE'),
1799 1804 ),
1800 1805 ],
1801 1806 _(b'REPO FILE [-H|-C ID]...'),
1802 1807 norepo=True,
1803 1808 )
1804 1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1805 1810 """retrieves a bundle from a repo
1806 1811
1807 1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1808 1813 given file.
1809 1814 """
1810 1815 opts = pycompat.byteskwargs(opts)
1811 1816 repo = hg.peer(ui, opts, repopath)
1812 1817 if not repo.capable(b'getbundle'):
1813 1818 raise error.Abort(b"getbundle() not supported by target repository")
1814 1819 args = {}
1815 1820 if common:
1816 1821 args['common'] = [bin(s) for s in common]
1817 1822 if head:
1818 1823 args['heads'] = [bin(s) for s in head]
1819 1824 # TODO: get desired bundlecaps from command line.
1820 1825 args['bundlecaps'] = None
1821 1826 bundle = repo.getbundle(b'debug', **args)
1822 1827
1823 1828 bundletype = opts.get(b'type', b'bzip2').lower()
1824 1829 btypes = {
1825 1830 b'none': b'HG10UN',
1826 1831 b'bzip2': b'HG10BZ',
1827 1832 b'gzip': b'HG10GZ',
1828 1833 b'bundle2': b'HG20',
1829 1834 }
1830 1835 bundletype = btypes.get(bundletype)
1831 1836 if bundletype not in bundle2.bundletypes:
1832 1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1833 1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1834 1839
1835 1840
1836 1841 @command(b'debugignore', [], b'[FILE]')
1837 1842 def debugignore(ui, repo, *files, **opts):
1838 1843 """display the combined ignore pattern and information about ignored files
1839 1844
1840 1845 With no argument display the combined ignore pattern.
1841 1846
1842 1847 Given space separated file names, shows if the given file is ignored and
1843 1848 if so, show the ignore rule (file and line number) that matched it.
1844 1849 """
1845 1850 ignore = repo.dirstate._ignore
1846 1851 if not files:
1847 1852 # Show all the patterns
1848 1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1849 1854 else:
1850 1855 m = scmutil.match(repo[None], pats=files)
1851 1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1852 1857 for f in m.files():
1853 1858 nf = util.normpath(f)
1854 1859 ignored = None
1855 1860 ignoredata = None
1856 1861 if nf != b'.':
1857 1862 if ignore(nf):
1858 1863 ignored = nf
1859 1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1860 1865 else:
1861 1866 for p in pathutil.finddirs(nf):
1862 1867 if ignore(p):
1863 1868 ignored = p
1864 1869 ignoredata = repo.dirstate._ignorefileandline(p)
1865 1870 break
1866 1871 if ignored:
1867 1872 if ignored == nf:
1868 1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1869 1874 else:
1870 1875 ui.write(
1871 1876 _(
1872 1877 b"%s is ignored because of "
1873 1878 b"containing directory %s\n"
1874 1879 )
1875 1880 % (uipathfn(f), ignored)
1876 1881 )
1877 1882 ignorefile, lineno, line = ignoredata
1878 1883 ui.write(
1879 1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1880 1885 % (ignorefile, lineno, line)
1881 1886 )
1882 1887 else:
1883 1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1884 1889
1885 1890
1886 1891 @command(
1887 1892 b'debug-revlog-index|debugindex',
1888 1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1889 1894 _(b'-c|-m|FILE'),
1890 1895 )
1891 1896 def debugindex(ui, repo, file_=None, **opts):
1892 1897 """dump index data for a revlog"""
1893 1898 opts = pycompat.byteskwargs(opts)
1894 1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1895 1900
1896 1901 fm = ui.formatter(b'debugindex', opts)
1897 1902
1898 1903 revlog = getattr(store, b'_revlog', store)
1899 1904
1900 1905 return revlog_debug.debug_index(
1901 1906 ui,
1902 1907 repo,
1903 1908 formatter=fm,
1904 1909 revlog=revlog,
1905 1910 full_node=ui.debugflag,
1906 1911 )
1907 1912
1908 1913
1909 1914 @command(
1910 1915 b'debugindexdot',
1911 1916 cmdutil.debugrevlogopts,
1912 1917 _(b'-c|-m|FILE'),
1913 1918 optionalrepo=True,
1914 1919 )
1915 1920 def debugindexdot(ui, repo, file_=None, **opts):
1916 1921 """dump an index DAG as a graphviz dot file"""
1917 1922 opts = pycompat.byteskwargs(opts)
1918 1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1919 1924 ui.writenoi18n(b"digraph G {\n")
1920 1925 for i in r:
1921 1926 node = r.node(i)
1922 1927 pp = r.parents(node)
1923 1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1924 1929 if pp[1] != repo.nullid:
1925 1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1926 1931 ui.write(b"}\n")
1927 1932
1928 1933
1929 1934 @command(b'debugindexstats', [])
1930 1935 def debugindexstats(ui, repo):
1931 1936 """show stats related to the changelog index"""
1932 1937 repo.changelog.shortest(repo.nullid, 1)
1933 1938 index = repo.changelog.index
1934 1939 if not util.safehasattr(index, b'stats'):
1935 1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1936 1941 for k, v in sorted(index.stats().items()):
1937 1942 ui.write(b'%s: %d\n' % (k, v))
1938 1943
1939 1944
1940 1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1941 1946 def debuginstall(ui, **opts):
1942 1947 """test Mercurial installation
1943 1948
1944 1949 Returns 0 on success.
1945 1950 """
1946 1951 opts = pycompat.byteskwargs(opts)
1947 1952
1948 1953 problems = 0
1949 1954
1950 1955 fm = ui.formatter(b'debuginstall', opts)
1951 1956 fm.startitem()
1952 1957
1953 1958 # encoding might be unknown or wrong. don't translate these messages.
1954 1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1955 1960 err = None
1956 1961 try:
1957 1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1958 1963 except LookupError as inst:
1959 1964 err = stringutil.forcebytestr(inst)
1960 1965 problems += 1
1961 1966 fm.condwrite(
1962 1967 err,
1963 1968 b'encodingerror',
1964 1969 b" %s\n (check that your locale is properly set)\n",
1965 1970 err,
1966 1971 )
1967 1972
1968 1973 # Python
1969 1974 pythonlib = None
1970 1975 if util.safehasattr(os, '__file__'):
1971 1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1972 1977 elif getattr(sys, 'oxidized', False):
1973 1978 pythonlib = pycompat.sysexecutable
1974 1979
1975 1980 fm.write(
1976 1981 b'pythonexe',
1977 1982 _(b"checking Python executable (%s)\n"),
1978 1983 pycompat.sysexecutable or _(b"unknown"),
1979 1984 )
1980 1985 fm.write(
1981 1986 b'pythonimplementation',
1982 1987 _(b"checking Python implementation (%s)\n"),
1983 1988 pycompat.sysbytes(platform.python_implementation()),
1984 1989 )
1985 1990 fm.write(
1986 1991 b'pythonver',
1987 1992 _(b"checking Python version (%s)\n"),
1988 1993 (b"%d.%d.%d" % sys.version_info[:3]),
1989 1994 )
1990 1995 fm.write(
1991 1996 b'pythonlib',
1992 1997 _(b"checking Python lib (%s)...\n"),
1993 1998 pythonlib or _(b"unknown"),
1994 1999 )
1995 2000
1996 2001 try:
1997 2002 from . import rustext # pytype: disable=import-error
1998 2003
1999 2004 rustext.__doc__ # trigger lazy import
2000 2005 except ImportError:
2001 2006 rustext = None
2002 2007
2003 2008 security = set(sslutil.supportedprotocols)
2004 2009 if sslutil.hassni:
2005 2010 security.add(b'sni')
2006 2011
2007 2012 fm.write(
2008 2013 b'pythonsecurity',
2009 2014 _(b"checking Python security support (%s)\n"),
2010 2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2011 2016 )
2012 2017
2013 2018 # These are warnings, not errors. So don't increment problem count. This
2014 2019 # may change in the future.
2015 2020 if b'tls1.2' not in security:
2016 2021 fm.plain(
2017 2022 _(
2018 2023 b' TLS 1.2 not supported by Python install; '
2019 2024 b'network connections lack modern security\n'
2020 2025 )
2021 2026 )
2022 2027 if b'sni' not in security:
2023 2028 fm.plain(
2024 2029 _(
2025 2030 b' SNI not supported by Python install; may have '
2026 2031 b'connectivity issues with some servers\n'
2027 2032 )
2028 2033 )
2029 2034
2030 2035 fm.plain(
2031 2036 _(
2032 2037 b"checking Rust extensions (%s)\n"
2033 2038 % (b'missing' if rustext is None else b'installed')
2034 2039 ),
2035 2040 )
2036 2041
2037 2042 # TODO print CA cert info
2038 2043
2039 2044 # hg version
2040 2045 hgver = util.version()
2041 2046 fm.write(
2042 2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2043 2048 )
2044 2049 fm.write(
2045 2050 b'hgverextra',
2046 2051 _(b"checking Mercurial custom build (%s)\n"),
2047 2052 b'+'.join(hgver.split(b'+')[1:]),
2048 2053 )
2049 2054
2050 2055 # compiled modules
2051 2056 hgmodules = None
2052 2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2053 2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2054 2059 elif getattr(sys, 'oxidized', False):
2055 2060 hgmodules = pycompat.sysexecutable
2056 2061
2057 2062 fm.write(
2058 2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2059 2064 )
2060 2065 fm.write(
2061 2066 b'hgmodules',
2062 2067 _(b"checking installed modules (%s)...\n"),
2063 2068 hgmodules or _(b"unknown"),
2064 2069 )
2065 2070
2066 2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2067 2072 rustext = rustandc # for now, that's the only case
2068 2073 cext = policy.policy in (b'c', b'allow') or rustandc
2069 2074 nopure = cext or rustext
2070 2075 if nopure:
2071 2076 err = None
2072 2077 try:
2073 2078 if cext:
2074 2079 from .cext import ( # pytype: disable=import-error
2075 2080 base85,
2076 2081 bdiff,
2077 2082 mpatch,
2078 2083 osutil,
2079 2084 )
2080 2085
2081 2086 # quiet pyflakes
2082 2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2083 2088 if rustext:
2084 2089 from .rustext import ( # pytype: disable=import-error
2085 2090 ancestor,
2086 2091 dirstate,
2087 2092 )
2088 2093
2089 2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2090 2095 except Exception as inst:
2091 2096 err = stringutil.forcebytestr(inst)
2092 2097 problems += 1
2093 2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2094 2099
2095 2100 compengines = util.compengines._engines.values()
2096 2101 fm.write(
2097 2102 b'compengines',
2098 2103 _(b'checking registered compression engines (%s)\n'),
2099 2104 fm.formatlist(
2100 2105 sorted(e.name() for e in compengines),
2101 2106 name=b'compengine',
2102 2107 fmt=b'%s',
2103 2108 sep=b', ',
2104 2109 ),
2105 2110 )
2106 2111 fm.write(
2107 2112 b'compenginesavail',
2108 2113 _(b'checking available compression engines (%s)\n'),
2109 2114 fm.formatlist(
2110 2115 sorted(e.name() for e in compengines if e.available()),
2111 2116 name=b'compengine',
2112 2117 fmt=b'%s',
2113 2118 sep=b', ',
2114 2119 ),
2115 2120 )
2116 2121 wirecompengines = compression.compengines.supportedwireengines(
2117 2122 compression.SERVERROLE
2118 2123 )
2119 2124 fm.write(
2120 2125 b'compenginesserver',
2121 2126 _(
2122 2127 b'checking available compression engines '
2123 2128 b'for wire protocol (%s)\n'
2124 2129 ),
2125 2130 fm.formatlist(
2126 2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2127 2132 name=b'compengine',
2128 2133 fmt=b'%s',
2129 2134 sep=b', ',
2130 2135 ),
2131 2136 )
2132 2137 re2 = b'missing'
2133 2138 if util._re2:
2134 2139 re2 = b'available'
2135 2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2136 2141 fm.data(re2=bool(util._re2))
2137 2142
2138 2143 # templates
2139 2144 p = templater.templatedir()
2140 2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2141 2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2142 2147 if p:
2143 2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2144 2149 if m:
2145 2150 # template found, check if it is working
2146 2151 err = None
2147 2152 try:
2148 2153 templater.templater.frommapfile(m)
2149 2154 except Exception as inst:
2150 2155 err = stringutil.forcebytestr(inst)
2151 2156 p = None
2152 2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2153 2158 else:
2154 2159 p = None
2155 2160 fm.condwrite(
2156 2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2157 2162 )
2158 2163 fm.condwrite(
2159 2164 not m,
2160 2165 b'defaulttemplatenotfound',
2161 2166 _(b" template '%s' not found\n"),
2162 2167 b"default",
2163 2168 )
2164 2169 if not p:
2165 2170 problems += 1
2166 2171 fm.condwrite(
2167 2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2168 2173 )
2169 2174
2170 2175 # editor
2171 2176 editor = ui.geteditor()
2172 2177 editor = util.expandpath(editor)
2173 2178 editorbin = procutil.shellsplit(editor)[0]
2174 2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2175 2180 cmdpath = procutil.findexe(editorbin)
2176 2181 fm.condwrite(
2177 2182 not cmdpath and editor == b'vi',
2178 2183 b'vinotfound',
2179 2184 _(
2180 2185 b" No commit editor set and can't find %s in PATH\n"
2181 2186 b" (specify a commit editor in your configuration"
2182 2187 b" file)\n"
2183 2188 ),
2184 2189 not cmdpath and editor == b'vi' and editorbin,
2185 2190 )
2186 2191 fm.condwrite(
2187 2192 not cmdpath and editor != b'vi',
2188 2193 b'editornotfound',
2189 2194 _(
2190 2195 b" Can't find editor '%s' in PATH\n"
2191 2196 b" (specify a commit editor in your configuration"
2192 2197 b" file)\n"
2193 2198 ),
2194 2199 not cmdpath and editorbin,
2195 2200 )
2196 2201 if not cmdpath and editor != b'vi':
2197 2202 problems += 1
2198 2203
2199 2204 # check username
2200 2205 username = None
2201 2206 err = None
2202 2207 try:
2203 2208 username = ui.username()
2204 2209 except error.Abort as e:
2205 2210 err = e.message
2206 2211 problems += 1
2207 2212
2208 2213 fm.condwrite(
2209 2214 username, b'username', _(b"checking username (%s)\n"), username
2210 2215 )
2211 2216 fm.condwrite(
2212 2217 err,
2213 2218 b'usernameerror',
2214 2219 _(
2215 2220 b"checking username...\n %s\n"
2216 2221 b" (specify a username in your configuration file)\n"
2217 2222 ),
2218 2223 err,
2219 2224 )
2220 2225
2221 2226 for name, mod in extensions.extensions():
2222 2227 handler = getattr(mod, 'debuginstall', None)
2223 2228 if handler is not None:
2224 2229 problems += handler(ui, fm)
2225 2230
2226 2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2227 2232 if not problems:
2228 2233 fm.data(problems=problems)
2229 2234 fm.condwrite(
2230 2235 problems,
2231 2236 b'problems',
2232 2237 _(b"%d problems detected, please check your install!\n"),
2233 2238 problems,
2234 2239 )
2235 2240 fm.end()
2236 2241
2237 2242 return problems
2238 2243
2239 2244
2240 2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2241 2246 def debugknown(ui, repopath, *ids, **opts):
2242 2247 """test whether node ids are known to a repo
2243 2248
2244 2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2245 2250 and 1s indicating unknown/known.
2246 2251 """
2247 2252 opts = pycompat.byteskwargs(opts)
2248 2253 repo = hg.peer(ui, opts, repopath)
2249 2254 if not repo.capable(b'known'):
2250 2255 raise error.Abort(b"known() not supported by target repository")
2251 2256 flags = repo.known([bin(s) for s in ids])
2252 2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2253 2258
2254 2259
2255 2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2256 2261 def debuglabelcomplete(ui, repo, *args):
2257 2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2258 2263 debugnamecomplete(ui, repo, *args)
2259 2264
2260 2265
2261 2266 @command(
2262 2267 b'debuglocks',
2263 2268 [
2264 2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2265 2270 (
2266 2271 b'W',
2267 2272 b'force-free-wlock',
2268 2273 None,
2269 2274 _(b'free the working state lock (DANGEROUS)'),
2270 2275 ),
2271 2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2272 2277 (
2273 2278 b'S',
2274 2279 b'set-wlock',
2275 2280 None,
2276 2281 _(b'set the working state lock until stopped'),
2277 2282 ),
2278 2283 ],
2279 2284 _(b'[OPTION]...'),
2280 2285 )
2281 2286 def debuglocks(ui, repo, **opts):
2282 2287 """show or modify state of locks
2283 2288
2284 2289 By default, this command will show which locks are held. This
2285 2290 includes the user and process holding the lock, the amount of time
2286 2291 the lock has been held, and the machine name where the process is
2287 2292 running if it's not local.
2288 2293
2289 2294 Locks protect the integrity of Mercurial's data, so should be
2290 2295 treated with care. System crashes or other interruptions may cause
2291 2296 locks to not be properly released, though Mercurial will usually
2292 2297 detect and remove such stale locks automatically.
2293 2298
2294 2299 However, detecting stale locks may not always be possible (for
2295 2300 instance, on a shared filesystem). Removing locks may also be
2296 2301 blocked by filesystem permissions.
2297 2302
2298 2303 Setting a lock will prevent other commands from changing the data.
2299 2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2300 2305 The set locks are removed when the command exits.
2301 2306
2302 2307 Returns 0 if no locks are held.
2303 2308
2304 2309 """
2305 2310
2306 2311 if opts.get('force_free_lock'):
2307 2312 repo.svfs.tryunlink(b'lock')
2308 2313 if opts.get('force_free_wlock'):
2309 2314 repo.vfs.tryunlink(b'wlock')
2310 2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2311 2316 return 0
2312 2317
2313 2318 locks = []
2314 2319 try:
2315 2320 if opts.get('set_wlock'):
2316 2321 try:
2317 2322 locks.append(repo.wlock(False))
2318 2323 except error.LockHeld:
2319 2324 raise error.Abort(_(b'wlock is already held'))
2320 2325 if opts.get('set_lock'):
2321 2326 try:
2322 2327 locks.append(repo.lock(False))
2323 2328 except error.LockHeld:
2324 2329 raise error.Abort(_(b'lock is already held'))
2325 2330 if len(locks):
2326 2331 try:
2327 2332 if ui.interactive():
2328 2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2329 2334 ui.promptchoice(prompt)
2330 2335 else:
2331 2336 msg = b"%d locks held, waiting for signal\n"
2332 2337 msg %= len(locks)
2333 2338 ui.status(msg)
2334 2339 while True: # XXX wait for a signal
2335 2340 time.sleep(0.1)
2336 2341 except KeyboardInterrupt:
2337 2342 msg = b"signal-received releasing locks\n"
2338 2343 ui.status(msg)
2339 2344 return 0
2340 2345 finally:
2341 2346 release(*locks)
2342 2347
2343 2348 now = time.time()
2344 2349 held = 0
2345 2350
2346 2351 def report(vfs, name, method):
2347 2352 # this causes stale locks to get reaped for more accurate reporting
2348 2353 try:
2349 2354 l = method(False)
2350 2355 except error.LockHeld:
2351 2356 l = None
2352 2357
2353 2358 if l:
2354 2359 l.release()
2355 2360 else:
2356 2361 try:
2357 2362 st = vfs.lstat(name)
2358 2363 age = now - st[stat.ST_MTIME]
2359 2364 user = util.username(st.st_uid)
2360 2365 locker = vfs.readlock(name)
2361 2366 if b":" in locker:
2362 2367 host, pid = locker.split(b':')
2363 2368 if host == socket.gethostname():
2364 2369 locker = b'user %s, process %s' % (user or b'None', pid)
2365 2370 else:
2366 2371 locker = b'user %s, process %s, host %s' % (
2367 2372 user or b'None',
2368 2373 pid,
2369 2374 host,
2370 2375 )
2371 2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2372 2377 return 1
2373 2378 except FileNotFoundError:
2374 2379 pass
2375 2380
2376 2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2377 2382 return 0
2378 2383
2379 2384 held += report(repo.svfs, b"lock", repo.lock)
2380 2385 held += report(repo.vfs, b"wlock", repo.wlock)
2381 2386
2382 2387 return held
2383 2388
2384 2389
2385 2390 @command(
2386 2391 b'debugmanifestfulltextcache',
2387 2392 [
2388 2393 (b'', b'clear', False, _(b'clear the cache')),
2389 2394 (
2390 2395 b'a',
2391 2396 b'add',
2392 2397 [],
2393 2398 _(b'add the given manifest nodes to the cache'),
2394 2399 _(b'NODE'),
2395 2400 ),
2396 2401 ],
2397 2402 b'',
2398 2403 )
2399 2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2400 2405 """show, clear or amend the contents of the manifest fulltext cache"""
2401 2406
2402 2407 def getcache():
2403 2408 r = repo.manifestlog.getstorage(b'')
2404 2409 try:
2405 2410 return r._fulltextcache
2406 2411 except AttributeError:
2407 2412 msg = _(
2408 2413 b"Current revlog implementation doesn't appear to have a "
2409 2414 b"manifest fulltext cache\n"
2410 2415 )
2411 2416 raise error.Abort(msg)
2412 2417
2413 2418 if opts.get('clear'):
2414 2419 with repo.wlock():
2415 2420 cache = getcache()
2416 2421 cache.clear(clear_persisted_data=True)
2417 2422 return
2418 2423
2419 2424 if add:
2420 2425 with repo.wlock():
2421 2426 m = repo.manifestlog
2422 2427 store = m.getstorage(b'')
2423 2428 for n in add:
2424 2429 try:
2425 2430 manifest = m[store.lookup(n)]
2426 2431 except error.LookupError as e:
2427 2432 raise error.Abort(
2428 2433 bytes(e), hint=b"Check your manifest node id"
2429 2434 )
2430 2435 manifest.read() # stores revisision in cache too
2431 2436 return
2432 2437
2433 2438 cache = getcache()
2434 2439 if not len(cache):
2435 2440 ui.write(_(b'cache empty\n'))
2436 2441 else:
2437 2442 ui.write(
2438 2443 _(
2439 2444 b'cache contains %d manifest entries, in order of most to '
2440 2445 b'least recent:\n'
2441 2446 )
2442 2447 % (len(cache),)
2443 2448 )
2444 2449 totalsize = 0
2445 2450 for nodeid in cache:
2446 2451 # Use cache.get to not update the LRU order
2447 2452 data = cache.peek(nodeid)
2448 2453 size = len(data)
2449 2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2450 2455 ui.write(
2451 2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2452 2457 )
2453 2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2454 2459 ui.write(
2455 2460 _(b'total cache data size %s, on-disk %s\n')
2456 2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2457 2462 )
2458 2463
2459 2464
2460 2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2461 2466 def debugmergestate(ui, repo, *args, **opts):
2462 2467 """print merge state
2463 2468
2464 2469 Use --verbose to print out information about whether v1 or v2 merge state
2465 2470 was chosen."""
2466 2471
2467 2472 if ui.verbose:
2468 2473 ms = mergestatemod.mergestate(repo)
2469 2474
2470 2475 # sort so that reasonable information is on top
2471 2476 v1records = ms._readrecordsv1()
2472 2477 v2records = ms._readrecordsv2()
2473 2478
2474 2479 if not v1records and not v2records:
2475 2480 pass
2476 2481 elif not v2records:
2477 2482 ui.writenoi18n(b'no version 2 merge state\n')
2478 2483 elif ms._v1v2match(v1records, v2records):
2479 2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2480 2485 else:
2481 2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2482 2487
2483 2488 opts = pycompat.byteskwargs(opts)
2484 2489 if not opts[b'template']:
2485 2490 opts[b'template'] = (
2486 2491 b'{if(commits, "", "no merge state found\n")}'
2487 2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2488 2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2489 2494 b'{if(local_path, "'
2490 2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2491 2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2492 2497 b' other path: {other_path} (node {other_node})\n'
2493 2498 b'")}'
2494 2499 b'{if(rename_side, "'
2495 2500 b' rename side: {rename_side}\n'
2496 2501 b' renamed path: {renamed_path}\n'
2497 2502 b'")}'
2498 2503 b'{extras % " extra: {key} = {value}\n"}'
2499 2504 b'"}'
2500 2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2501 2506 )
2502 2507
2503 2508 ms = mergestatemod.mergestate.read(repo)
2504 2509
2505 2510 fm = ui.formatter(b'debugmergestate', opts)
2506 2511 fm.startitem()
2507 2512
2508 2513 fm_commits = fm.nested(b'commits')
2509 2514 if ms.active():
2510 2515 for name, node, label_index in (
2511 2516 (b'local', ms.local, 0),
2512 2517 (b'other', ms.other, 1),
2513 2518 ):
2514 2519 fm_commits.startitem()
2515 2520 fm_commits.data(name=name)
2516 2521 fm_commits.data(node=hex(node))
2517 2522 if ms._labels and len(ms._labels) > label_index:
2518 2523 fm_commits.data(label=ms._labels[label_index])
2519 2524 fm_commits.end()
2520 2525
2521 2526 fm_files = fm.nested(b'files')
2522 2527 if ms.active():
2523 2528 for f in ms:
2524 2529 fm_files.startitem()
2525 2530 fm_files.data(path=f)
2526 2531 state = ms._state[f]
2527 2532 fm_files.data(state=state[0])
2528 2533 if state[0] in (
2529 2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2530 2535 mergestatemod.MERGE_RECORD_RESOLVED,
2531 2536 ):
2532 2537 fm_files.data(local_key=state[1])
2533 2538 fm_files.data(local_path=state[2])
2534 2539 fm_files.data(ancestor_path=state[3])
2535 2540 fm_files.data(ancestor_node=state[4])
2536 2541 fm_files.data(other_path=state[5])
2537 2542 fm_files.data(other_node=state[6])
2538 2543 fm_files.data(local_flags=state[7])
2539 2544 elif state[0] in (
2540 2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2541 2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2542 2547 ):
2543 2548 fm_files.data(renamed_path=state[1])
2544 2549 fm_files.data(rename_side=state[2])
2545 2550 fm_extras = fm_files.nested(b'extras')
2546 2551 for k, v in sorted(ms.extras(f).items()):
2547 2552 fm_extras.startitem()
2548 2553 fm_extras.data(key=k)
2549 2554 fm_extras.data(value=v)
2550 2555 fm_extras.end()
2551 2556
2552 2557 fm_files.end()
2553 2558
2554 2559 fm_extras = fm.nested(b'extras')
2555 2560 for f, d in sorted(ms.allextras().items()):
2556 2561 if f in ms:
2557 2562 # If file is in mergestate, we have already processed it's extras
2558 2563 continue
2559 2564 for k, v in d.items():
2560 2565 fm_extras.startitem()
2561 2566 fm_extras.data(file=f)
2562 2567 fm_extras.data(key=k)
2563 2568 fm_extras.data(value=v)
2564 2569 fm_extras.end()
2565 2570
2566 2571 fm.end()
2567 2572
2568 2573
2569 2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2570 2575 def debugnamecomplete(ui, repo, *args):
2571 2576 '''complete "names" - tags, open branch names, bookmark names'''
2572 2577
2573 2578 names = set()
2574 2579 # since we previously only listed open branches, we will handle that
2575 2580 # specially (after this for loop)
2576 2581 for name, ns in repo.names.items():
2577 2582 if name != b'branches':
2578 2583 names.update(ns.listnames(repo))
2579 2584 names.update(
2580 2585 tag
2581 2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2582 2587 if not closed
2583 2588 )
2584 2589 completions = set()
2585 2590 if not args:
2586 2591 args = [b'']
2587 2592 for a in args:
2588 2593 completions.update(n for n in names if n.startswith(a))
2589 2594 ui.write(b'\n'.join(sorted(completions)))
2590 2595 ui.write(b'\n')
2591 2596
2592 2597
2593 2598 @command(
2594 2599 b'debugnodemap',
2595 2600 [
2596 2601 (
2597 2602 b'',
2598 2603 b'dump-new',
2599 2604 False,
2600 2605 _(b'write a (new) persistent binary nodemap on stdout'),
2601 2606 ),
2602 2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2603 2608 (
2604 2609 b'',
2605 2610 b'check',
2606 2611 False,
2607 2612 _(b'check that the data on disk data are correct.'),
2608 2613 ),
2609 2614 (
2610 2615 b'',
2611 2616 b'metadata',
2612 2617 False,
2613 2618 _(b'display the on disk meta data for the nodemap'),
2614 2619 ),
2615 2620 ],
2616 2621 )
2617 2622 def debugnodemap(ui, repo, **opts):
2618 2623 """write and inspect on disk nodemap"""
2619 2624 if opts['dump_new']:
2620 2625 unfi = repo.unfiltered()
2621 2626 cl = unfi.changelog
2622 2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2623 2628 data = cl.index.nodemap_data_all()
2624 2629 else:
2625 2630 data = nodemap.persistent_data(cl.index)
2626 2631 ui.write(data)
2627 2632 elif opts['dump_disk']:
2628 2633 unfi = repo.unfiltered()
2629 2634 cl = unfi.changelog
2630 2635 nm_data = nodemap.persisted_data(cl)
2631 2636 if nm_data is not None:
2632 2637 docket, data = nm_data
2633 2638 ui.write(data[:])
2634 2639 elif opts['check']:
2635 2640 unfi = repo.unfiltered()
2636 2641 cl = unfi.changelog
2637 2642 nm_data = nodemap.persisted_data(cl)
2638 2643 if nm_data is not None:
2639 2644 docket, data = nm_data
2640 2645 return nodemap.check_data(ui, cl.index, data)
2641 2646 elif opts['metadata']:
2642 2647 unfi = repo.unfiltered()
2643 2648 cl = unfi.changelog
2644 2649 nm_data = nodemap.persisted_data(cl)
2645 2650 if nm_data is not None:
2646 2651 docket, data = nm_data
2647 2652 ui.write((b"uid: %s\n") % docket.uid)
2648 2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2649 2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2650 2655 ui.write((b"data-length: %d\n") % docket.data_length)
2651 2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2652 2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2653 2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2654 2659
2655 2660
2656 2661 @command(
2657 2662 b'debugobsolete',
2658 2663 [
2659 2664 (b'', b'flags', 0, _(b'markers flag')),
2660 2665 (
2661 2666 b'',
2662 2667 b'record-parents',
2663 2668 False,
2664 2669 _(b'record parent information for the precursor'),
2665 2670 ),
2666 2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2667 2672 (
2668 2673 b'',
2669 2674 b'exclusive',
2670 2675 False,
2671 2676 _(b'restrict display to markers only relevant to REV'),
2672 2677 ),
2673 2678 (b'', b'index', False, _(b'display index of the marker')),
2674 2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2675 2680 ]
2676 2681 + cmdutil.commitopts2
2677 2682 + cmdutil.formatteropts,
2678 2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2679 2684 )
2680 2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2681 2686 """create arbitrary obsolete marker
2682 2687
2683 2688 With no arguments, displays the list of obsolescence markers."""
2684 2689
2685 2690 opts = pycompat.byteskwargs(opts)
2686 2691
2687 2692 def parsenodeid(s):
2688 2693 try:
2689 2694 # We do not use revsingle/revrange functions here to accept
2690 2695 # arbitrary node identifiers, possibly not present in the
2691 2696 # local repository.
2692 2697 n = bin(s)
2693 2698 if len(n) != repo.nodeconstants.nodelen:
2694 2699 raise ValueError
2695 2700 return n
2696 2701 except ValueError:
2697 2702 raise error.InputError(
2698 2703 b'changeset references must be full hexadecimal '
2699 2704 b'node identifiers'
2700 2705 )
2701 2706
2702 2707 if opts.get(b'delete'):
2703 2708 indices = []
2704 2709 for v in opts.get(b'delete'):
2705 2710 try:
2706 2711 indices.append(int(v))
2707 2712 except ValueError:
2708 2713 raise error.InputError(
2709 2714 _(b'invalid index value: %r') % v,
2710 2715 hint=_(b'use integers for indices'),
2711 2716 )
2712 2717
2713 2718 if repo.currenttransaction():
2714 2719 raise error.Abort(
2715 2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2716 2721 )
2717 2722
2718 2723 with repo.lock():
2719 2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2720 2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2721 2726
2722 2727 return
2723 2728
2724 2729 if precursor is not None:
2725 2730 if opts[b'rev']:
2726 2731 raise error.InputError(
2727 2732 b'cannot select revision when creating marker'
2728 2733 )
2729 2734 metadata = {}
2730 2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2731 2736 succs = tuple(parsenodeid(succ) for succ in successors)
2732 2737 l = repo.lock()
2733 2738 try:
2734 2739 tr = repo.transaction(b'debugobsolete')
2735 2740 try:
2736 2741 date = opts.get(b'date')
2737 2742 if date:
2738 2743 date = dateutil.parsedate(date)
2739 2744 else:
2740 2745 date = None
2741 2746 prec = parsenodeid(precursor)
2742 2747 parents = None
2743 2748 if opts[b'record_parents']:
2744 2749 if prec not in repo.unfiltered():
2745 2750 raise error.Abort(
2746 2751 b'cannot used --record-parents on '
2747 2752 b'unknown changesets'
2748 2753 )
2749 2754 parents = repo.unfiltered()[prec].parents()
2750 2755 parents = tuple(p.node() for p in parents)
2751 2756 repo.obsstore.create(
2752 2757 tr,
2753 2758 prec,
2754 2759 succs,
2755 2760 opts[b'flags'],
2756 2761 parents=parents,
2757 2762 date=date,
2758 2763 metadata=metadata,
2759 2764 ui=ui,
2760 2765 )
2761 2766 tr.close()
2762 2767 except ValueError as exc:
2763 2768 raise error.Abort(
2764 2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2765 2770 )
2766 2771 finally:
2767 2772 tr.release()
2768 2773 finally:
2769 2774 l.release()
2770 2775 else:
2771 2776 if opts[b'rev']:
2772 2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2773 2778 nodes = [repo[r].node() for r in revs]
2774 2779 markers = list(
2775 2780 obsutil.getmarkers(
2776 2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2777 2782 )
2778 2783 )
2779 2784 markers.sort(key=lambda x: x._data)
2780 2785 else:
2781 2786 markers = obsutil.getmarkers(repo)
2782 2787
2783 2788 markerstoiter = markers
2784 2789 isrelevant = lambda m: True
2785 2790 if opts.get(b'rev') and opts.get(b'index'):
2786 2791 markerstoiter = obsutil.getmarkers(repo)
2787 2792 markerset = set(markers)
2788 2793 isrelevant = lambda m: m in markerset
2789 2794
2790 2795 fm = ui.formatter(b'debugobsolete', opts)
2791 2796 for i, m in enumerate(markerstoiter):
2792 2797 if not isrelevant(m):
2793 2798 # marker can be irrelevant when we're iterating over a set
2794 2799 # of markers (markerstoiter) which is bigger than the set
2795 2800 # of markers we want to display (markers)
2796 2801 # this can happen if both --index and --rev options are
2797 2802 # provided and thus we need to iterate over all of the markers
2798 2803 # to get the correct indices, but only display the ones that
2799 2804 # are relevant to --rev value
2800 2805 continue
2801 2806 fm.startitem()
2802 2807 ind = i if opts.get(b'index') else None
2803 2808 cmdutil.showmarker(fm, m, index=ind)
2804 2809 fm.end()
2805 2810
2806 2811
2807 2812 @command(
2808 2813 b'debugp1copies',
2809 2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 2815 _(b'[-r REV]'),
2811 2816 )
2812 2817 def debugp1copies(ui, repo, **opts):
2813 2818 """dump copy information compared to p1"""
2814 2819
2815 2820 opts = pycompat.byteskwargs(opts)
2816 2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 2822 for dst, src in ctx.p1copies().items():
2818 2823 ui.write(b'%s -> %s\n' % (src, dst))
2819 2824
2820 2825
2821 2826 @command(
2822 2827 b'debugp2copies',
2823 2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 2829 _(b'[-r REV]'),
2825 2830 )
2826 2831 def debugp2copies(ui, repo, **opts):
2827 2832 """dump copy information compared to p2"""
2828 2833
2829 2834 opts = pycompat.byteskwargs(opts)
2830 2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2831 2836 for dst, src in ctx.p2copies().items():
2832 2837 ui.write(b'%s -> %s\n' % (src, dst))
2833 2838
2834 2839
2835 2840 @command(
2836 2841 b'debugpathcomplete',
2837 2842 [
2838 2843 (b'f', b'full', None, _(b'complete an entire path')),
2839 2844 (b'n', b'normal', None, _(b'show only normal files')),
2840 2845 (b'a', b'added', None, _(b'show only added files')),
2841 2846 (b'r', b'removed', None, _(b'show only removed files')),
2842 2847 ],
2843 2848 _(b'FILESPEC...'),
2844 2849 )
2845 2850 def debugpathcomplete(ui, repo, *specs, **opts):
2846 2851 """complete part or all of a tracked path
2847 2852
2848 2853 This command supports shells that offer path name completion. It
2849 2854 currently completes only files already known to the dirstate.
2850 2855
2851 2856 Completion extends only to the next path segment unless
2852 2857 --full is specified, in which case entire paths are used."""
2853 2858
2854 2859 def complete(path, acceptable):
2855 2860 dirstate = repo.dirstate
2856 2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2857 2862 rootdir = repo.root + pycompat.ossep
2858 2863 if spec != repo.root and not spec.startswith(rootdir):
2859 2864 return [], []
2860 2865 if os.path.isdir(spec):
2861 2866 spec += b'/'
2862 2867 spec = spec[len(rootdir) :]
2863 2868 fixpaths = pycompat.ossep != b'/'
2864 2869 if fixpaths:
2865 2870 spec = spec.replace(pycompat.ossep, b'/')
2866 2871 speclen = len(spec)
2867 2872 fullpaths = opts['full']
2868 2873 files, dirs = set(), set()
2869 2874 adddir, addfile = dirs.add, files.add
2870 2875 for f, st in dirstate.items():
2871 2876 if f.startswith(spec) and st.state in acceptable:
2872 2877 if fixpaths:
2873 2878 f = f.replace(b'/', pycompat.ossep)
2874 2879 if fullpaths:
2875 2880 addfile(f)
2876 2881 continue
2877 2882 s = f.find(pycompat.ossep, speclen)
2878 2883 if s >= 0:
2879 2884 adddir(f[:s])
2880 2885 else:
2881 2886 addfile(f)
2882 2887 return files, dirs
2883 2888
2884 2889 acceptable = b''
2885 2890 if opts['normal']:
2886 2891 acceptable += b'nm'
2887 2892 if opts['added']:
2888 2893 acceptable += b'a'
2889 2894 if opts['removed']:
2890 2895 acceptable += b'r'
2891 2896 cwd = repo.getcwd()
2892 2897 if not specs:
2893 2898 specs = [b'.']
2894 2899
2895 2900 files, dirs = set(), set()
2896 2901 for spec in specs:
2897 2902 f, d = complete(spec, acceptable or b'nmar')
2898 2903 files.update(f)
2899 2904 dirs.update(d)
2900 2905 files.update(dirs)
2901 2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2902 2907 ui.write(b'\n')
2903 2908
2904 2909
2905 2910 @command(
2906 2911 b'debugpathcopies',
2907 2912 cmdutil.walkopts,
2908 2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2909 2914 inferrepo=True,
2910 2915 )
2911 2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2912 2917 """show copies between two revisions"""
2913 2918 ctx1 = scmutil.revsingle(repo, rev1)
2914 2919 ctx2 = scmutil.revsingle(repo, rev2)
2915 2920 m = scmutil.match(ctx1, pats, opts)
2916 2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2917 2922 ui.write(b'%s -> %s\n' % (src, dst))
2918 2923
2919 2924
2920 2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2921 2926 def debugpeer(ui, path):
2922 2927 """establish a connection to a peer repository"""
2923 2928 # Always enable peer request logging. Requires --debug to display
2924 2929 # though.
2925 2930 overrides = {
2926 2931 (b'devel', b'debug.peer-request'): True,
2927 2932 }
2928 2933
2929 2934 with ui.configoverride(overrides):
2930 2935 peer = hg.peer(ui, {}, path)
2931 2936
2932 2937 try:
2933 2938 local = peer.local() is not None
2934 2939 canpush = peer.canpush()
2935 2940
2936 2941 ui.write(_(b'url: %s\n') % peer.url())
2937 2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2938 2943 ui.write(
2939 2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2940 2945 )
2941 2946 finally:
2942 2947 peer.close()
2943 2948
2944 2949
2945 2950 @command(
2946 2951 b'debugpickmergetool',
2947 2952 [
2948 2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2949 2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2950 2955 ]
2951 2956 + cmdutil.walkopts
2952 2957 + cmdutil.mergetoolopts,
2953 2958 _(b'[PATTERN]...'),
2954 2959 inferrepo=True,
2955 2960 )
2956 2961 def debugpickmergetool(ui, repo, *pats, **opts):
2957 2962 """examine which merge tool is chosen for specified file
2958 2963
2959 2964 As described in :hg:`help merge-tools`, Mercurial examines
2960 2965 configurations below in this order to decide which merge tool is
2961 2966 chosen for specified file.
2962 2967
2963 2968 1. ``--tool`` option
2964 2969 2. ``HGMERGE`` environment variable
2965 2970 3. configurations in ``merge-patterns`` section
2966 2971 4. configuration of ``ui.merge``
2967 2972 5. configurations in ``merge-tools`` section
2968 2973 6. ``hgmerge`` tool (for historical reason only)
2969 2974 7. default tool for fallback (``:merge`` or ``:prompt``)
2970 2975
2971 2976 This command writes out examination result in the style below::
2972 2977
2973 2978 FILE = MERGETOOL
2974 2979
2975 2980 By default, all files known in the first parent context of the
2976 2981 working directory are examined. Use file patterns and/or -I/-X
2977 2982 options to limit target files. -r/--rev is also useful to examine
2978 2983 files in another context without actual updating to it.
2979 2984
2980 2985 With --debug, this command shows warning messages while matching
2981 2986 against ``merge-patterns`` and so on, too. It is recommended to
2982 2987 use this option with explicit file patterns and/or -I/-X options,
2983 2988 because this option increases amount of output per file according
2984 2989 to configurations in hgrc.
2985 2990
2986 2991 With -v/--verbose, this command shows configurations below at
2987 2992 first (only if specified).
2988 2993
2989 2994 - ``--tool`` option
2990 2995 - ``HGMERGE`` environment variable
2991 2996 - configuration of ``ui.merge``
2992 2997
2993 2998 If merge tool is chosen before matching against
2994 2999 ``merge-patterns``, this command can't show any helpful
2995 3000 information, even with --debug. In such case, information above is
2996 3001 useful to know why a merge tool is chosen.
2997 3002 """
2998 3003 opts = pycompat.byteskwargs(opts)
2999 3004 overrides = {}
3000 3005 if opts[b'tool']:
3001 3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3002 3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3003 3008
3004 3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3005 3010 hgmerge = encoding.environ.get(b"HGMERGE")
3006 3011 if hgmerge is not None:
3007 3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3008 3013 uimerge = ui.config(b"ui", b"merge")
3009 3014 if uimerge:
3010 3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3011 3016
3012 3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3013 3018 m = scmutil.match(ctx, pats, opts)
3014 3019 changedelete = opts[b'changedelete']
3015 3020 for path in ctx.walk(m):
3016 3021 fctx = ctx[path]
3017 3022 with ui.silent(
3018 3023 error=True
3019 3024 ) if not ui.debugflag else util.nullcontextmanager():
3020 3025 tool, toolpath = filemerge._picktool(
3021 3026 repo,
3022 3027 ui,
3023 3028 path,
3024 3029 fctx.isbinary(),
3025 3030 b'l' in fctx.flags(),
3026 3031 changedelete,
3027 3032 )
3028 3033 ui.write(b'%s = %s\n' % (path, tool))
3029 3034
3030 3035
3031 3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3032 3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3033 3038 """access the pushkey key/value protocol
3034 3039
3035 3040 With two args, list the keys in the given namespace.
3036 3041
3037 3042 With five args, set a key to new if it currently is set to old.
3038 3043 Reports success or failure.
3039 3044 """
3040 3045
3041 3046 target = hg.peer(ui, {}, repopath)
3042 3047 try:
3043 3048 if keyinfo:
3044 3049 key, old, new = keyinfo
3045 3050 with target.commandexecutor() as e:
3046 3051 r = e.callcommand(
3047 3052 b'pushkey',
3048 3053 {
3049 3054 b'namespace': namespace,
3050 3055 b'key': key,
3051 3056 b'old': old,
3052 3057 b'new': new,
3053 3058 },
3054 3059 ).result()
3055 3060
3056 3061 ui.status(pycompat.bytestr(r) + b'\n')
3057 3062 return not r
3058 3063 else:
3059 3064 for k, v in sorted(target.listkeys(namespace).items()):
3060 3065 ui.write(
3061 3066 b"%s\t%s\n"
3062 3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3063 3068 )
3064 3069 finally:
3065 3070 target.close()
3066 3071
3067 3072
3068 3073 @command(b'debugpvec', [], _(b'A B'))
3069 3074 def debugpvec(ui, repo, a, b=None):
3070 3075 ca = scmutil.revsingle(repo, a)
3071 3076 cb = scmutil.revsingle(repo, b)
3072 3077 pa = pvec.ctxpvec(ca)
3073 3078 pb = pvec.ctxpvec(cb)
3074 3079 if pa == pb:
3075 3080 rel = b"="
3076 3081 elif pa > pb:
3077 3082 rel = b">"
3078 3083 elif pa < pb:
3079 3084 rel = b"<"
3080 3085 elif pa | pb:
3081 3086 rel = b"|"
3082 3087 ui.write(_(b"a: %s\n") % pa)
3083 3088 ui.write(_(b"b: %s\n") % pb)
3084 3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3085 3090 ui.write(
3086 3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3087 3092 % (
3088 3093 abs(pa._depth - pb._depth),
3089 3094 pvec._hamming(pa._vec, pb._vec),
3090 3095 pa.distance(pb),
3091 3096 rel,
3092 3097 )
3093 3098 )
3094 3099
3095 3100
3096 3101 @command(
3097 3102 b'debugrebuilddirstate|debugrebuildstate',
3098 3103 [
3099 3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3100 3105 (
3101 3106 b'',
3102 3107 b'minimal',
3103 3108 None,
3104 3109 _(
3105 3110 b'only rebuild files that are inconsistent with '
3106 3111 b'the working copy parent'
3107 3112 ),
3108 3113 ),
3109 3114 ],
3110 3115 _(b'[-r REV]'),
3111 3116 )
3112 3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3113 3118 """rebuild the dirstate as it would look like for the given revision
3114 3119
3115 3120 If no revision is specified the first current parent will be used.
3116 3121
3117 3122 The dirstate will be set to the files of the given revision.
3118 3123 The actual working directory content or existing dirstate
3119 3124 information such as adds or removes is not considered.
3120 3125
3121 3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3122 3127 tracked but are not in the parent manifest, or that exist in the parent
3123 3128 manifest but are not in the dirstate. It will not change adds, removes, or
3124 3129 modified files that are in the working copy parent.
3125 3130
3126 3131 One use of this command is to make the next :hg:`status` invocation
3127 3132 check the actual file content.
3128 3133 """
3129 3134 ctx = scmutil.revsingle(repo, rev)
3130 3135 with repo.wlock():
3131 3136 dirstate = repo.dirstate
3132 3137 changedfiles = None
3133 3138 # See command doc for what minimal does.
3134 3139 if opts.get('minimal'):
3135 3140 manifestfiles = set(ctx.manifest().keys())
3136 3141 dirstatefiles = set(dirstate)
3137 3142 manifestonly = manifestfiles - dirstatefiles
3138 3143 dsonly = dirstatefiles - manifestfiles
3139 3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3140 3145 changedfiles = manifestonly | dsnotadded
3141 3146
3142 3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3143 3148
3144 3149
3145 3150 @command(
3146 3151 b'debugrebuildfncache',
3147 3152 [
3148 3153 (
3149 3154 b'',
3150 3155 b'only-data',
3151 3156 False,
3152 3157 _(b'only look for wrong .d files (much faster)'),
3153 3158 )
3154 3159 ],
3155 3160 b'',
3156 3161 )
3157 3162 def debugrebuildfncache(ui, repo, **opts):
3158 3163 """rebuild the fncache file"""
3159 3164 opts = pycompat.byteskwargs(opts)
3160 3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3161 3166
3162 3167
3163 3168 @command(
3164 3169 b'debugrename',
3165 3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3166 3171 _(b'[-r REV] [FILE]...'),
3167 3172 )
3168 3173 def debugrename(ui, repo, *pats, **opts):
3169 3174 """dump rename information"""
3170 3175
3171 3176 opts = pycompat.byteskwargs(opts)
3172 3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3173 3178 m = scmutil.match(ctx, pats, opts)
3174 3179 for abs in ctx.walk(m):
3175 3180 fctx = ctx[abs]
3176 3181 o = fctx.filelog().renamed(fctx.filenode())
3177 3182 rel = repo.pathto(abs)
3178 3183 if o:
3179 3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3180 3185 else:
3181 3186 ui.write(_(b"%s not renamed\n") % rel)
3182 3187
3183 3188
3184 3189 @command(b'debugrequires|debugrequirements', [], b'')
3185 3190 def debugrequirements(ui, repo):
3186 3191 """print the current repo requirements"""
3187 3192 for r in sorted(repo.requirements):
3188 3193 ui.write(b"%s\n" % r)
3189 3194
3190 3195
3191 3196 @command(
3192 3197 b'debugrevlog',
3193 3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3194 3199 _(b'-c|-m|FILE'),
3195 3200 optionalrepo=True,
3196 3201 )
3197 3202 def debugrevlog(ui, repo, file_=None, **opts):
3198 3203 """show data and statistics about a revlog"""
3199 3204 opts = pycompat.byteskwargs(opts)
3200 3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3201 3206
3202 3207 if opts.get(b"dump"):
3203 3208 numrevs = len(r)
3204 3209 ui.write(
3205 3210 (
3206 3211 b"# rev p1rev p2rev start end deltastart base p1 p2"
3207 3212 b" rawsize totalsize compression heads chainlen\n"
3208 3213 )
3209 3214 )
3210 3215 ts = 0
3211 3216 heads = set()
3212 3217
3213 3218 for rev in range(numrevs):
3214 3219 dbase = r.deltaparent(rev)
3215 3220 if dbase == -1:
3216 3221 dbase = rev
3217 3222 cbase = r.chainbase(rev)
3218 3223 clen = r.chainlen(rev)
3219 3224 p1, p2 = r.parentrevs(rev)
3220 3225 rs = r.rawsize(rev)
3221 3226 ts = ts + rs
3222 3227 heads -= set(r.parentrevs(rev))
3223 3228 heads.add(rev)
3224 3229 try:
3225 3230 compression = ts / r.end(rev)
3226 3231 except ZeroDivisionError:
3227 3232 compression = 0
3228 3233 ui.write(
3229 3234 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3230 3235 b"%11d %5d %8d\n"
3231 3236 % (
3232 3237 rev,
3233 3238 p1,
3234 3239 p2,
3235 3240 r.start(rev),
3236 3241 r.end(rev),
3237 3242 r.start(dbase),
3238 3243 r.start(cbase),
3239 3244 r.start(p1),
3240 3245 r.start(p2),
3241 3246 rs,
3242 3247 ts,
3243 3248 compression,
3244 3249 len(heads),
3245 3250 clen,
3246 3251 )
3247 3252 )
3248 3253 return 0
3249 3254
3250 3255 format = r._format_version
3251 3256 v = r._format_flags
3252 3257 flags = []
3253 3258 gdelta = False
3254 3259 if v & revlog.FLAG_INLINE_DATA:
3255 3260 flags.append(b'inline')
3256 3261 if v & revlog.FLAG_GENERALDELTA:
3257 3262 gdelta = True
3258 3263 flags.append(b'generaldelta')
3259 3264 if not flags:
3260 3265 flags = [b'(none)']
3261 3266
3262 3267 ### tracks merge vs single parent
3263 3268 nummerges = 0
3264 3269
3265 3270 ### tracks ways the "delta" are build
3266 3271 # nodelta
3267 3272 numempty = 0
3268 3273 numemptytext = 0
3269 3274 numemptydelta = 0
3270 3275 # full file content
3271 3276 numfull = 0
3272 3277 # intermediate snapshot against a prior snapshot
3273 3278 numsemi = 0
3274 3279 # snapshot count per depth
3275 3280 numsnapdepth = collections.defaultdict(lambda: 0)
3276 3281 # delta against previous revision
3277 3282 numprev = 0
3278 3283 # delta against first or second parent (not prev)
3279 3284 nump1 = 0
3280 3285 nump2 = 0
3281 3286 # delta against neither prev nor parents
3282 3287 numother = 0
3283 3288 # delta against prev that are also first or second parent
3284 3289 # (details of `numprev`)
3285 3290 nump1prev = 0
3286 3291 nump2prev = 0
3287 3292
3288 3293 # data about delta chain of each revs
3289 3294 chainlengths = []
3290 3295 chainbases = []
3291 3296 chainspans = []
3292 3297
3293 3298 # data about each revision
3294 3299 datasize = [None, 0, 0]
3295 3300 fullsize = [None, 0, 0]
3296 3301 semisize = [None, 0, 0]
3297 3302 # snapshot count per depth
3298 3303 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3299 3304 deltasize = [None, 0, 0]
3300 3305 chunktypecounts = {}
3301 3306 chunktypesizes = {}
3302 3307
3303 3308 def addsize(size, l):
3304 3309 if l[0] is None or size < l[0]:
3305 3310 l[0] = size
3306 3311 if size > l[1]:
3307 3312 l[1] = size
3308 3313 l[2] += size
3309 3314
3310 3315 numrevs = len(r)
3311 3316 for rev in range(numrevs):
3312 3317 p1, p2 = r.parentrevs(rev)
3313 3318 delta = r.deltaparent(rev)
3314 3319 if format > 0:
3315 3320 addsize(r.rawsize(rev), datasize)
3316 3321 if p2 != nullrev:
3317 3322 nummerges += 1
3318 3323 size = r.length(rev)
3319 3324 if delta == nullrev:
3320 3325 chainlengths.append(0)
3321 3326 chainbases.append(r.start(rev))
3322 3327 chainspans.append(size)
3323 3328 if size == 0:
3324 3329 numempty += 1
3325 3330 numemptytext += 1
3326 3331 else:
3327 3332 numfull += 1
3328 3333 numsnapdepth[0] += 1
3329 3334 addsize(size, fullsize)
3330 3335 addsize(size, snapsizedepth[0])
3331 3336 else:
3332 3337 chainlengths.append(chainlengths[delta] + 1)
3333 3338 baseaddr = chainbases[delta]
3334 3339 revaddr = r.start(rev)
3335 3340 chainbases.append(baseaddr)
3336 3341 chainspans.append((revaddr - baseaddr) + size)
3337 3342 if size == 0:
3338 3343 numempty += 1
3339 3344 numemptydelta += 1
3340 3345 elif r.issnapshot(rev):
3341 3346 addsize(size, semisize)
3342 3347 numsemi += 1
3343 3348 depth = r.snapshotdepth(rev)
3344 3349 numsnapdepth[depth] += 1
3345 3350 addsize(size, snapsizedepth[depth])
3346 3351 else:
3347 3352 addsize(size, deltasize)
3348 3353 if delta == rev - 1:
3349 3354 numprev += 1
3350 3355 if delta == p1:
3351 3356 nump1prev += 1
3352 3357 elif delta == p2:
3353 3358 nump2prev += 1
3354 3359 elif delta == p1:
3355 3360 nump1 += 1
3356 3361 elif delta == p2:
3357 3362 nump2 += 1
3358 3363 elif delta != nullrev:
3359 3364 numother += 1
3360 3365
3361 3366 # Obtain data on the raw chunks in the revlog.
3362 3367 if util.safehasattr(r, b'_getsegmentforrevs'):
3363 3368 segment = r._getsegmentforrevs(rev, rev)[1]
3364 3369 else:
3365 3370 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3366 3371 if segment:
3367 3372 chunktype = bytes(segment[0:1])
3368 3373 else:
3369 3374 chunktype = b'empty'
3370 3375
3371 3376 if chunktype not in chunktypecounts:
3372 3377 chunktypecounts[chunktype] = 0
3373 3378 chunktypesizes[chunktype] = 0
3374 3379
3375 3380 chunktypecounts[chunktype] += 1
3376 3381 chunktypesizes[chunktype] += size
3377 3382
3378 3383 # Adjust size min value for empty cases
3379 3384 for size in (datasize, fullsize, semisize, deltasize):
3380 3385 if size[0] is None:
3381 3386 size[0] = 0
3382 3387
3383 3388 numdeltas = numrevs - numfull - numempty - numsemi
3384 3389 numoprev = numprev - nump1prev - nump2prev
3385 3390 totalrawsize = datasize[2]
3386 3391 datasize[2] /= numrevs
3387 3392 fulltotal = fullsize[2]
3388 3393 if numfull == 0:
3389 3394 fullsize[2] = 0
3390 3395 else:
3391 3396 fullsize[2] /= numfull
3392 3397 semitotal = semisize[2]
3393 3398 snaptotal = {}
3394 3399 if numsemi > 0:
3395 3400 semisize[2] /= numsemi
3396 3401 for depth in snapsizedepth:
3397 3402 snaptotal[depth] = snapsizedepth[depth][2]
3398 3403 snapsizedepth[depth][2] /= numsnapdepth[depth]
3399 3404
3400 3405 deltatotal = deltasize[2]
3401 3406 if numdeltas > 0:
3402 3407 deltasize[2] /= numdeltas
3403 3408 totalsize = fulltotal + semitotal + deltatotal
3404 3409 avgchainlen = sum(chainlengths) / numrevs
3405 3410 maxchainlen = max(chainlengths)
3406 3411 maxchainspan = max(chainspans)
3407 3412 compratio = 1
3408 3413 if totalsize:
3409 3414 compratio = totalrawsize / totalsize
3410 3415
3411 3416 basedfmtstr = b'%%%dd\n'
3412 3417 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3413 3418
3414 3419 def dfmtstr(max):
3415 3420 return basedfmtstr % len(str(max))
3416 3421
3417 3422 def pcfmtstr(max, padding=0):
3418 3423 return basepcfmtstr % (len(str(max)), b' ' * padding)
3419 3424
3420 3425 def pcfmt(value, total):
3421 3426 if total:
3422 3427 return (value, 100 * float(value) / total)
3423 3428 else:
3424 3429 return value, 100.0
3425 3430
3426 3431 ui.writenoi18n(b'format : %d\n' % format)
3427 3432 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3428 3433
3429 3434 ui.write(b'\n')
3430 3435 fmt = pcfmtstr(totalsize)
3431 3436 fmt2 = dfmtstr(totalsize)
3432 3437 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3433 3438 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3434 3439 ui.writenoi18n(
3435 3440 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3436 3441 )
3437 3442 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3438 3443 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3439 3444 ui.writenoi18n(
3440 3445 b' text : '
3441 3446 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3442 3447 )
3443 3448 ui.writenoi18n(
3444 3449 b' delta : '
3445 3450 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3446 3451 )
3447 3452 ui.writenoi18n(
3448 3453 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3449 3454 )
3450 3455 for depth in sorted(numsnapdepth):
3451 3456 ui.write(
3452 3457 (b' lvl-%-3d : ' % depth)
3453 3458 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3454 3459 )
3455 3460 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3456 3461 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3457 3462 ui.writenoi18n(
3458 3463 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3459 3464 )
3460 3465 for depth in sorted(numsnapdepth):
3461 3466 ui.write(
3462 3467 (b' lvl-%-3d : ' % depth)
3463 3468 + fmt % pcfmt(snaptotal[depth], totalsize)
3464 3469 )
3465 3470 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3466 3471
3467 3472 def fmtchunktype(chunktype):
3468 3473 if chunktype == b'empty':
3469 3474 return b' %s : ' % chunktype
3470 3475 elif chunktype in pycompat.bytestr(string.ascii_letters):
3471 3476 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3472 3477 else:
3473 3478 return b' 0x%s : ' % hex(chunktype)
3474 3479
3475 3480 ui.write(b'\n')
3476 3481 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3477 3482 for chunktype in sorted(chunktypecounts):
3478 3483 ui.write(fmtchunktype(chunktype))
3479 3484 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3480 3485 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3481 3486 for chunktype in sorted(chunktypecounts):
3482 3487 ui.write(fmtchunktype(chunktype))
3483 3488 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3484 3489
3485 3490 ui.write(b'\n')
3486 3491 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3487 3492 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3488 3493 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3489 3494 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3490 3495 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3491 3496
3492 3497 if format > 0:
3493 3498 ui.write(b'\n')
3494 3499 ui.writenoi18n(
3495 3500 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3496 3501 % tuple(datasize)
3497 3502 )
3498 3503 ui.writenoi18n(
3499 3504 b'full revision size (min/max/avg) : %d / %d / %d\n'
3500 3505 % tuple(fullsize)
3501 3506 )
3502 3507 ui.writenoi18n(
3503 3508 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3504 3509 % tuple(semisize)
3505 3510 )
3506 3511 for depth in sorted(snapsizedepth):
3507 3512 if depth == 0:
3508 3513 continue
3509 3514 ui.writenoi18n(
3510 3515 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3511 3516 % ((depth,) + tuple(snapsizedepth[depth]))
3512 3517 )
3513 3518 ui.writenoi18n(
3514 3519 b'delta size (min/max/avg) : %d / %d / %d\n'
3515 3520 % tuple(deltasize)
3516 3521 )
3517 3522
3518 3523 if numdeltas > 0:
3519 3524 ui.write(b'\n')
3520 3525 fmt = pcfmtstr(numdeltas)
3521 3526 fmt2 = pcfmtstr(numdeltas, 4)
3522 3527 ui.writenoi18n(
3523 3528 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3524 3529 )
3525 3530 if numprev > 0:
3526 3531 ui.writenoi18n(
3527 3532 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3528 3533 )
3529 3534 ui.writenoi18n(
3530 3535 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3531 3536 )
3532 3537 ui.writenoi18n(
3533 3538 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3534 3539 )
3535 3540 if gdelta:
3536 3541 ui.writenoi18n(
3537 3542 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3538 3543 )
3539 3544 ui.writenoi18n(
3540 3545 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3541 3546 )
3542 3547 ui.writenoi18n(
3543 3548 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3544 3549 )
3545 3550
3546 3551
3547 3552 @command(
3548 3553 b'debugrevlogindex',
3549 3554 cmdutil.debugrevlogopts
3550 3555 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3551 3556 _(b'[-f FORMAT] -c|-m|FILE'),
3552 3557 optionalrepo=True,
3553 3558 )
3554 3559 def debugrevlogindex(ui, repo, file_=None, **opts):
3555 3560 """dump the contents of a revlog index"""
3556 3561 opts = pycompat.byteskwargs(opts)
3557 3562 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3558 3563 format = opts.get(b'format', 0)
3559 3564 if format not in (0, 1):
3560 3565 raise error.Abort(_(b"unknown format %d") % format)
3561 3566
3562 3567 if ui.debugflag:
3563 3568 shortfn = hex
3564 3569 else:
3565 3570 shortfn = short
3566 3571
3567 3572 # There might not be anything in r, so have a sane default
3568 3573 idlen = 12
3569 3574 for i in r:
3570 3575 idlen = len(shortfn(r.node(i)))
3571 3576 break
3572 3577
3573 3578 if format == 0:
3574 3579 if ui.verbose:
3575 3580 ui.writenoi18n(
3576 3581 b" rev offset length linkrev %s %s p2\n"
3577 3582 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3578 3583 )
3579 3584 else:
3580 3585 ui.writenoi18n(
3581 3586 b" rev linkrev %s %s p2\n"
3582 3587 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3583 3588 )
3584 3589 elif format == 1:
3585 3590 if ui.verbose:
3586 3591 ui.writenoi18n(
3587 3592 (
3588 3593 b" rev flag offset length size link p1"
3589 3594 b" p2 %s\n"
3590 3595 )
3591 3596 % b"nodeid".rjust(idlen)
3592 3597 )
3593 3598 else:
3594 3599 ui.writenoi18n(
3595 3600 b" rev flag size link p1 p2 %s\n"
3596 3601 % b"nodeid".rjust(idlen)
3597 3602 )
3598 3603
3599 3604 for i in r:
3600 3605 node = r.node(i)
3601 3606 if format == 0:
3602 3607 try:
3603 3608 pp = r.parents(node)
3604 3609 except Exception:
3605 3610 pp = [repo.nullid, repo.nullid]
3606 3611 if ui.verbose:
3607 3612 ui.write(
3608 3613 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3609 3614 % (
3610 3615 i,
3611 3616 r.start(i),
3612 3617 r.length(i),
3613 3618 r.linkrev(i),
3614 3619 shortfn(node),
3615 3620 shortfn(pp[0]),
3616 3621 shortfn(pp[1]),
3617 3622 )
3618 3623 )
3619 3624 else:
3620 3625 ui.write(
3621 3626 b"% 6d % 7d %s %s %s\n"
3622 3627 % (
3623 3628 i,
3624 3629 r.linkrev(i),
3625 3630 shortfn(node),
3626 3631 shortfn(pp[0]),
3627 3632 shortfn(pp[1]),
3628 3633 )
3629 3634 )
3630 3635 elif format == 1:
3631 3636 pr = r.parentrevs(i)
3632 3637 if ui.verbose:
3633 3638 ui.write(
3634 3639 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3635 3640 % (
3636 3641 i,
3637 3642 r.flags(i),
3638 3643 r.start(i),
3639 3644 r.length(i),
3640 3645 r.rawsize(i),
3641 3646 r.linkrev(i),
3642 3647 pr[0],
3643 3648 pr[1],
3644 3649 shortfn(node),
3645 3650 )
3646 3651 )
3647 3652 else:
3648 3653 ui.write(
3649 3654 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3650 3655 % (
3651 3656 i,
3652 3657 r.flags(i),
3653 3658 r.rawsize(i),
3654 3659 r.linkrev(i),
3655 3660 pr[0],
3656 3661 pr[1],
3657 3662 shortfn(node),
3658 3663 )
3659 3664 )
3660 3665
3661 3666
3662 3667 @command(
3663 3668 b'debugrevspec',
3664 3669 [
3665 3670 (
3666 3671 b'',
3667 3672 b'optimize',
3668 3673 None,
3669 3674 _(b'print parsed tree after optimizing (DEPRECATED)'),
3670 3675 ),
3671 3676 (
3672 3677 b'',
3673 3678 b'show-revs',
3674 3679 True,
3675 3680 _(b'print list of result revisions (default)'),
3676 3681 ),
3677 3682 (
3678 3683 b's',
3679 3684 b'show-set',
3680 3685 None,
3681 3686 _(b'print internal representation of result set'),
3682 3687 ),
3683 3688 (
3684 3689 b'p',
3685 3690 b'show-stage',
3686 3691 [],
3687 3692 _(b'print parsed tree at the given stage'),
3688 3693 _(b'NAME'),
3689 3694 ),
3690 3695 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3691 3696 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3692 3697 ],
3693 3698 b'REVSPEC',
3694 3699 )
3695 3700 def debugrevspec(ui, repo, expr, **opts):
3696 3701 """parse and apply a revision specification
3697 3702
3698 3703 Use -p/--show-stage option to print the parsed tree at the given stages.
3699 3704 Use -p all to print tree at every stage.
3700 3705
3701 3706 Use --no-show-revs option with -s or -p to print only the set
3702 3707 representation or the parsed tree respectively.
3703 3708
3704 3709 Use --verify-optimized to compare the optimized result with the unoptimized
3705 3710 one. Returns 1 if the optimized result differs.
3706 3711 """
3707 3712 opts = pycompat.byteskwargs(opts)
3708 3713 aliases = ui.configitems(b'revsetalias')
3709 3714 stages = [
3710 3715 (b'parsed', lambda tree: tree),
3711 3716 (
3712 3717 b'expanded',
3713 3718 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3714 3719 ),
3715 3720 (b'concatenated', revsetlang.foldconcat),
3716 3721 (b'analyzed', revsetlang.analyze),
3717 3722 (b'optimized', revsetlang.optimize),
3718 3723 ]
3719 3724 if opts[b'no_optimized']:
3720 3725 stages = stages[:-1]
3721 3726 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3722 3727 raise error.Abort(
3723 3728 _(b'cannot use --verify-optimized with --no-optimized')
3724 3729 )
3725 3730 stagenames = {n for n, f in stages}
3726 3731
3727 3732 showalways = set()
3728 3733 showchanged = set()
3729 3734 if ui.verbose and not opts[b'show_stage']:
3730 3735 # show parsed tree by --verbose (deprecated)
3731 3736 showalways.add(b'parsed')
3732 3737 showchanged.update([b'expanded', b'concatenated'])
3733 3738 if opts[b'optimize']:
3734 3739 showalways.add(b'optimized')
3735 3740 if opts[b'show_stage'] and opts[b'optimize']:
3736 3741 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3737 3742 if opts[b'show_stage'] == [b'all']:
3738 3743 showalways.update(stagenames)
3739 3744 else:
3740 3745 for n in opts[b'show_stage']:
3741 3746 if n not in stagenames:
3742 3747 raise error.Abort(_(b'invalid stage name: %s') % n)
3743 3748 showalways.update(opts[b'show_stage'])
3744 3749
3745 3750 treebystage = {}
3746 3751 printedtree = None
3747 3752 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3748 3753 for n, f in stages:
3749 3754 treebystage[n] = tree = f(tree)
3750 3755 if n in showalways or (n in showchanged and tree != printedtree):
3751 3756 if opts[b'show_stage'] or n != b'parsed':
3752 3757 ui.write(b"* %s:\n" % n)
3753 3758 ui.write(revsetlang.prettyformat(tree), b"\n")
3754 3759 printedtree = tree
3755 3760
3756 3761 if opts[b'verify_optimized']:
3757 3762 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3758 3763 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3759 3764 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3760 3765 ui.writenoi18n(
3761 3766 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3762 3767 )
3763 3768 ui.writenoi18n(
3764 3769 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3765 3770 )
3766 3771 arevs = list(arevs)
3767 3772 brevs = list(brevs)
3768 3773 if arevs == brevs:
3769 3774 return 0
3770 3775 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3771 3776 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3772 3777 sm = difflib.SequenceMatcher(None, arevs, brevs)
3773 3778 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3774 3779 if tag in ('delete', 'replace'):
3775 3780 for c in arevs[alo:ahi]:
3776 3781 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3777 3782 if tag in ('insert', 'replace'):
3778 3783 for c in brevs[blo:bhi]:
3779 3784 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3780 3785 if tag == 'equal':
3781 3786 for c in arevs[alo:ahi]:
3782 3787 ui.write(b' %d\n' % c)
3783 3788 return 1
3784 3789
3785 3790 func = revset.makematcher(tree)
3786 3791 revs = func(repo)
3787 3792 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3788 3793 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3789 3794 if not opts[b'show_revs']:
3790 3795 return
3791 3796 for c in revs:
3792 3797 ui.write(b"%d\n" % c)
3793 3798
3794 3799
3795 3800 @command(
3796 3801 b'debugserve',
3797 3802 [
3798 3803 (
3799 3804 b'',
3800 3805 b'sshstdio',
3801 3806 False,
3802 3807 _(b'run an SSH server bound to process handles'),
3803 3808 ),
3804 3809 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3805 3810 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3806 3811 ],
3807 3812 b'',
3808 3813 )
3809 3814 def debugserve(ui, repo, **opts):
3810 3815 """run a server with advanced settings
3811 3816
3812 3817 This command is similar to :hg:`serve`. It exists partially as a
3813 3818 workaround to the fact that ``hg serve --stdio`` must have specific
3814 3819 arguments for security reasons.
3815 3820 """
3816 3821 opts = pycompat.byteskwargs(opts)
3817 3822
3818 3823 if not opts[b'sshstdio']:
3819 3824 raise error.Abort(_(b'only --sshstdio is currently supported'))
3820 3825
3821 3826 logfh = None
3822 3827
3823 3828 if opts[b'logiofd'] and opts[b'logiofile']:
3824 3829 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3825 3830
3826 3831 if opts[b'logiofd']:
3827 3832 # Ideally we would be line buffered. But line buffering in binary
3828 3833 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3829 3834 # buffering could have performance impacts. But since this isn't
3830 3835 # performance critical code, it should be fine.
3831 3836 try:
3832 3837 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3833 3838 except OSError as e:
3834 3839 if e.errno != errno.ESPIPE:
3835 3840 raise
3836 3841 # can't seek a pipe, so `ab` mode fails on py3
3837 3842 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3838 3843 elif opts[b'logiofile']:
3839 3844 logfh = open(opts[b'logiofile'], b'ab', 0)
3840 3845
3841 3846 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3842 3847 s.serve_forever()
3843 3848
3844 3849
3845 3850 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3846 3851 def debugsetparents(ui, repo, rev1, rev2=None):
3847 3852 """manually set the parents of the current working directory (DANGEROUS)
3848 3853
3849 3854 This command is not what you are looking for and should not be used. Using
3850 3855 this command will most certainly results in slight corruption of the file
3851 3856 level histories withing your repository. DO NOT USE THIS COMMAND.
3852 3857
3853 3858 The command update the p1 and p2 field in the dirstate, and not touching
3854 3859 anything else. This useful for writing repository conversion tools, but
3855 3860 should be used with extreme care. For example, neither the working
3856 3861 directory nor the dirstate is updated, so file status may be incorrect
3857 3862 after running this command. Only used if you are one of the few people that
3858 3863 deeply unstand both conversion tools and file level histories. If you are
3859 3864 reading this help, you are not one of this people (most of them sailed west
3860 3865 from Mithlond anyway.
3861 3866
3862 3867 So one last time DO NOT USE THIS COMMAND.
3863 3868
3864 3869 Returns 0 on success.
3865 3870 """
3866 3871
3867 3872 node1 = scmutil.revsingle(repo, rev1).node()
3868 3873 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3869 3874
3870 3875 with repo.wlock():
3871 3876 repo.setparents(node1, node2)
3872 3877
3873 3878
3874 3879 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3875 3880 def debugsidedata(ui, repo, file_, rev=None, **opts):
3876 3881 """dump the side data for a cl/manifest/file revision
3877 3882
3878 3883 Use --verbose to dump the sidedata content."""
3879 3884 opts = pycompat.byteskwargs(opts)
3880 3885 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3881 3886 if rev is not None:
3882 3887 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3883 3888 file_, rev = None, file_
3884 3889 elif rev is None:
3885 3890 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3886 3891 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3887 3892 r = getattr(r, '_revlog', r)
3888 3893 try:
3889 3894 sidedata = r.sidedata(r.lookup(rev))
3890 3895 except KeyError:
3891 3896 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3892 3897 if sidedata:
3893 3898 sidedata = list(sidedata.items())
3894 3899 sidedata.sort()
3895 3900 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3896 3901 for key, value in sidedata:
3897 3902 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3898 3903 if ui.verbose:
3899 3904 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3900 3905
3901 3906
3902 3907 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3903 3908 def debugssl(ui, repo, source=None, **opts):
3904 3909 """test a secure connection to a server
3905 3910
3906 3911 This builds the certificate chain for the server on Windows, installing the
3907 3912 missing intermediates and trusted root via Windows Update if necessary. It
3908 3913 does nothing on other platforms.
3909 3914
3910 3915 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3911 3916 that server is used. See :hg:`help urls` for more information.
3912 3917
3913 3918 If the update succeeds, retry the original operation. Otherwise, the cause
3914 3919 of the SSL error is likely another issue.
3915 3920 """
3916 3921 if not pycompat.iswindows:
3917 3922 raise error.Abort(
3918 3923 _(b'certificate chain building is only possible on Windows')
3919 3924 )
3920 3925
3921 3926 if not source:
3922 3927 if not repo:
3923 3928 raise error.Abort(
3924 3929 _(
3925 3930 b"there is no Mercurial repository here, and no "
3926 3931 b"server specified"
3927 3932 )
3928 3933 )
3929 3934 source = b"default"
3930 3935
3931 3936 source, branches = urlutil.get_unique_pull_path(
3932 3937 b'debugssl', repo, ui, source
3933 3938 )
3934 3939 url = urlutil.url(source)
3935 3940
3936 3941 defaultport = {b'https': 443, b'ssh': 22}
3937 3942 if url.scheme in defaultport:
3938 3943 try:
3939 3944 addr = (url.host, int(url.port or defaultport[url.scheme]))
3940 3945 except ValueError:
3941 3946 raise error.Abort(_(b"malformed port number in URL"))
3942 3947 else:
3943 3948 raise error.Abort(_(b"only https and ssh connections are supported"))
3944 3949
3945 3950 from . import win32
3946 3951
3947 3952 s = ssl.wrap_socket(
3948 3953 socket.socket(),
3949 3954 ssl_version=ssl.PROTOCOL_TLS,
3950 3955 cert_reqs=ssl.CERT_NONE,
3951 3956 ca_certs=None,
3952 3957 )
3953 3958
3954 3959 try:
3955 3960 s.connect(addr)
3956 3961 cert = s.getpeercert(True)
3957 3962
3958 3963 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3959 3964
3960 3965 complete = win32.checkcertificatechain(cert, build=False)
3961 3966
3962 3967 if not complete:
3963 3968 ui.status(_(b'certificate chain is incomplete, updating... '))
3964 3969
3965 3970 if not win32.checkcertificatechain(cert):
3966 3971 ui.status(_(b'failed.\n'))
3967 3972 else:
3968 3973 ui.status(_(b'done.\n'))
3969 3974 else:
3970 3975 ui.status(_(b'full certificate chain is available\n'))
3971 3976 finally:
3972 3977 s.close()
3973 3978
3974 3979
3975 3980 @command(
3976 3981 b"debugbackupbundle",
3977 3982 [
3978 3983 (
3979 3984 b"",
3980 3985 b"recover",
3981 3986 b"",
3982 3987 b"brings the specified changeset back into the repository",
3983 3988 )
3984 3989 ]
3985 3990 + cmdutil.logopts,
3986 3991 _(b"hg debugbackupbundle [--recover HASH]"),
3987 3992 )
3988 3993 def debugbackupbundle(ui, repo, *pats, **opts):
3989 3994 """lists the changesets available in backup bundles
3990 3995
3991 3996 Without any arguments, this command prints a list of the changesets in each
3992 3997 backup bundle.
3993 3998
3994 3999 --recover takes a changeset hash and unbundles the first bundle that
3995 4000 contains that hash, which puts that changeset back in your repository.
3996 4001
3997 4002 --verbose will print the entire commit message and the bundle path for that
3998 4003 backup.
3999 4004 """
4000 4005 backups = list(
4001 4006 filter(
4002 4007 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4003 4008 )
4004 4009 )
4005 4010 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4006 4011
4007 4012 opts = pycompat.byteskwargs(opts)
4008 4013 opts[b"bundle"] = b""
4009 4014 opts[b"force"] = None
4010 4015 limit = logcmdutil.getlimit(opts)
4011 4016
4012 4017 def display(other, chlist, displayer):
4013 4018 if opts.get(b"newest_first"):
4014 4019 chlist.reverse()
4015 4020 count = 0
4016 4021 for n in chlist:
4017 4022 if limit is not None and count >= limit:
4018 4023 break
4019 4024 parents = [
4020 4025 True for p in other.changelog.parents(n) if p != repo.nullid
4021 4026 ]
4022 4027 if opts.get(b"no_merges") and len(parents) == 2:
4023 4028 continue
4024 4029 count += 1
4025 4030 displayer.show(other[n])
4026 4031
4027 4032 recovernode = opts.get(b"recover")
4028 4033 if recovernode:
4029 4034 if scmutil.isrevsymbol(repo, recovernode):
4030 4035 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4031 4036 return
4032 4037 elif backups:
4033 4038 msg = _(
4034 4039 b"Recover changesets using: hg debugbackupbundle --recover "
4035 4040 b"<changeset hash>\n\nAvailable backup changesets:"
4036 4041 )
4037 4042 ui.status(msg, label=b"status.removed")
4038 4043 else:
4039 4044 ui.status(_(b"no backup changesets found\n"))
4040 4045 return
4041 4046
4042 4047 for backup in backups:
4043 4048 # Much of this is copied from the hg incoming logic
4044 4049 source = os.path.relpath(backup, encoding.getcwd())
4045 4050 source, branches = urlutil.get_unique_pull_path(
4046 4051 b'debugbackupbundle',
4047 4052 repo,
4048 4053 ui,
4049 4054 source,
4050 4055 default_branches=opts.get(b'branch'),
4051 4056 )
4052 4057 try:
4053 4058 other = hg.peer(repo, opts, source)
4054 4059 except error.LookupError as ex:
4055 4060 msg = _(b"\nwarning: unable to open bundle %s") % source
4056 4061 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4057 4062 ui.warn(msg, hint=hint)
4058 4063 continue
4059 4064 revs, checkout = hg.addbranchrevs(
4060 4065 repo, other, branches, opts.get(b"rev")
4061 4066 )
4062 4067
4063 4068 if revs:
4064 4069 revs = [other.lookup(rev) for rev in revs]
4065 4070
4066 4071 with ui.silent():
4067 4072 try:
4068 4073 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4069 4074 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4070 4075 )
4071 4076 except error.LookupError:
4072 4077 continue
4073 4078
4074 4079 try:
4075 4080 if not chlist:
4076 4081 continue
4077 4082 if recovernode:
4078 4083 with repo.lock(), repo.transaction(b"unbundle") as tr:
4079 4084 if scmutil.isrevsymbol(other, recovernode):
4080 4085 ui.status(_(b"Unbundling %s\n") % (recovernode))
4081 4086 f = hg.openpath(ui, source)
4082 4087 gen = exchange.readbundle(ui, f, source)
4083 4088 if isinstance(gen, bundle2.unbundle20):
4084 4089 bundle2.applybundle(
4085 4090 repo,
4086 4091 gen,
4087 4092 tr,
4088 4093 source=b"unbundle",
4089 4094 url=b"bundle:" + source,
4090 4095 )
4091 4096 else:
4092 4097 gen.apply(repo, b"unbundle", b"bundle:" + source)
4093 4098 break
4094 4099 else:
4095 4100 backupdate = encoding.strtolocal(
4096 4101 time.strftime(
4097 4102 "%a %H:%M, %Y-%m-%d",
4098 4103 time.localtime(os.path.getmtime(source)),
4099 4104 )
4100 4105 )
4101 4106 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4102 4107 if ui.verbose:
4103 4108 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4104 4109 else:
4105 4110 opts[
4106 4111 b"template"
4107 4112 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4108 4113 displayer = logcmdutil.changesetdisplayer(
4109 4114 ui, other, opts, False
4110 4115 )
4111 4116 display(other, chlist, displayer)
4112 4117 displayer.close()
4113 4118 finally:
4114 4119 cleanupfn()
4115 4120
4116 4121
4117 4122 @command(
4118 4123 b'debugsub',
4119 4124 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4120 4125 _(b'[-r REV] [REV]'),
4121 4126 )
4122 4127 def debugsub(ui, repo, rev=None):
4123 4128 ctx = scmutil.revsingle(repo, rev, None)
4124 4129 for k, v in sorted(ctx.substate.items()):
4125 4130 ui.writenoi18n(b'path %s\n' % k)
4126 4131 ui.writenoi18n(b' source %s\n' % v[0])
4127 4132 ui.writenoi18n(b' revision %s\n' % v[1])
4128 4133
4129 4134
4130 4135 @command(b'debugshell', optionalrepo=True)
4131 4136 def debugshell(ui, repo):
4132 4137 """run an interactive Python interpreter
4133 4138
4134 4139 The local namespace is provided with a reference to the ui and
4135 4140 the repo instance (if available).
4136 4141 """
4137 4142 import code
4138 4143
4139 4144 imported_objects = {
4140 4145 'ui': ui,
4141 4146 'repo': repo,
4142 4147 }
4143 4148
4144 4149 code.interact(local=imported_objects)
4145 4150
4146 4151
4147 4152 @command(
4148 4153 b'debugsuccessorssets',
4149 4154 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4150 4155 _(b'[REV]'),
4151 4156 )
4152 4157 def debugsuccessorssets(ui, repo, *revs, **opts):
4153 4158 """show set of successors for revision
4154 4159
4155 4160 A successors set of changeset A is a consistent group of revisions that
4156 4161 succeed A. It contains non-obsolete changesets only unless closests
4157 4162 successors set is set.
4158 4163
4159 4164 In most cases a changeset A has a single successors set containing a single
4160 4165 successor (changeset A replaced by A').
4161 4166
4162 4167 A changeset that is made obsolete with no successors are called "pruned".
4163 4168 Such changesets have no successors sets at all.
4164 4169
4165 4170 A changeset that has been "split" will have a successors set containing
4166 4171 more than one successor.
4167 4172
4168 4173 A changeset that has been rewritten in multiple different ways is called
4169 4174 "divergent". Such changesets have multiple successor sets (each of which
4170 4175 may also be split, i.e. have multiple successors).
4171 4176
4172 4177 Results are displayed as follows::
4173 4178
4174 4179 <rev1>
4175 4180 <successors-1A>
4176 4181 <rev2>
4177 4182 <successors-2A>
4178 4183 <successors-2B1> <successors-2B2> <successors-2B3>
4179 4184
4180 4185 Here rev2 has two possible (i.e. divergent) successors sets. The first
4181 4186 holds one element, whereas the second holds three (i.e. the changeset has
4182 4187 been split).
4183 4188 """
4184 4189 # passed to successorssets caching computation from one call to another
4185 4190 cache = {}
4186 4191 ctx2str = bytes
4187 4192 node2str = short
4188 4193 for rev in logcmdutil.revrange(repo, revs):
4189 4194 ctx = repo[rev]
4190 4195 ui.write(b'%s\n' % ctx2str(ctx))
4191 4196 for succsset in obsutil.successorssets(
4192 4197 repo, ctx.node(), closest=opts['closest'], cache=cache
4193 4198 ):
4194 4199 if succsset:
4195 4200 ui.write(b' ')
4196 4201 ui.write(node2str(succsset[0]))
4197 4202 for node in succsset[1:]:
4198 4203 ui.write(b' ')
4199 4204 ui.write(node2str(node))
4200 4205 ui.write(b'\n')
4201 4206
4202 4207
4203 4208 @command(b'debugtagscache', [])
4204 4209 def debugtagscache(ui, repo):
4205 4210 """display the contents of .hg/cache/hgtagsfnodes1"""
4206 4211 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4207 4212 flog = repo.file(b'.hgtags')
4208 4213 for r in repo:
4209 4214 node = repo[r].node()
4210 4215 tagsnode = cache.getfnode(node, computemissing=False)
4211 4216 if tagsnode:
4212 4217 tagsnodedisplay = hex(tagsnode)
4213 4218 if not flog.hasnode(tagsnode):
4214 4219 tagsnodedisplay += b' (unknown node)'
4215 4220 elif tagsnode is None:
4216 4221 tagsnodedisplay = b'missing'
4217 4222 else:
4218 4223 tagsnodedisplay = b'invalid'
4219 4224
4220 4225 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4221 4226
4222 4227
4223 4228 @command(
4224 4229 b'debugtemplate',
4225 4230 [
4226 4231 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4227 4232 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4228 4233 ],
4229 4234 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4230 4235 optionalrepo=True,
4231 4236 )
4232 4237 def debugtemplate(ui, repo, tmpl, **opts):
4233 4238 """parse and apply a template
4234 4239
4235 4240 If -r/--rev is given, the template is processed as a log template and
4236 4241 applied to the given changesets. Otherwise, it is processed as a generic
4237 4242 template.
4238 4243
4239 4244 Use --verbose to print the parsed tree.
4240 4245 """
4241 4246 revs = None
4242 4247 if opts['rev']:
4243 4248 if repo is None:
4244 4249 raise error.RepoError(
4245 4250 _(b'there is no Mercurial repository here (.hg not found)')
4246 4251 )
4247 4252 revs = logcmdutil.revrange(repo, opts['rev'])
4248 4253
4249 4254 props = {}
4250 4255 for d in opts['define']:
4251 4256 try:
4252 4257 k, v = (e.strip() for e in d.split(b'=', 1))
4253 4258 if not k or k == b'ui':
4254 4259 raise ValueError
4255 4260 props[k] = v
4256 4261 except ValueError:
4257 4262 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4258 4263
4259 4264 if ui.verbose:
4260 4265 aliases = ui.configitems(b'templatealias')
4261 4266 tree = templater.parse(tmpl)
4262 4267 ui.note(templater.prettyformat(tree), b'\n')
4263 4268 newtree = templater.expandaliases(tree, aliases)
4264 4269 if newtree != tree:
4265 4270 ui.notenoi18n(
4266 4271 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4267 4272 )
4268 4273
4269 4274 if revs is None:
4270 4275 tres = formatter.templateresources(ui, repo)
4271 4276 t = formatter.maketemplater(ui, tmpl, resources=tres)
4272 4277 if ui.verbose:
4273 4278 kwds, funcs = t.symbolsuseddefault()
4274 4279 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4275 4280 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4276 4281 ui.write(t.renderdefault(props))
4277 4282 else:
4278 4283 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4279 4284 if ui.verbose:
4280 4285 kwds, funcs = displayer.t.symbolsuseddefault()
4281 4286 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4282 4287 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4283 4288 for r in revs:
4284 4289 displayer.show(repo[r], **pycompat.strkwargs(props))
4285 4290 displayer.close()
4286 4291
4287 4292
4288 4293 @command(
4289 4294 b'debuguigetpass',
4290 4295 [
4291 4296 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4292 4297 ],
4293 4298 _(b'[-p TEXT]'),
4294 4299 norepo=True,
4295 4300 )
4296 4301 def debuguigetpass(ui, prompt=b''):
4297 4302 """show prompt to type password"""
4298 4303 r = ui.getpass(prompt)
4299 4304 if r is None:
4300 4305 r = b"<default response>"
4301 4306 ui.writenoi18n(b'response: %s\n' % r)
4302 4307
4303 4308
4304 4309 @command(
4305 4310 b'debuguiprompt',
4306 4311 [
4307 4312 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4308 4313 ],
4309 4314 _(b'[-p TEXT]'),
4310 4315 norepo=True,
4311 4316 )
4312 4317 def debuguiprompt(ui, prompt=b''):
4313 4318 """show plain prompt"""
4314 4319 r = ui.prompt(prompt)
4315 4320 ui.writenoi18n(b'response: %s\n' % r)
4316 4321
4317 4322
4318 4323 @command(b'debugupdatecaches', [])
4319 4324 def debugupdatecaches(ui, repo, *pats, **opts):
4320 4325 """warm all known caches in the repository"""
4321 4326 with repo.wlock(), repo.lock():
4322 4327 repo.updatecaches(caches=repository.CACHES_ALL)
4323 4328
4324 4329
4325 4330 @command(
4326 4331 b'debugupgraderepo',
4327 4332 [
4328 4333 (
4329 4334 b'o',
4330 4335 b'optimize',
4331 4336 [],
4332 4337 _(b'extra optimization to perform'),
4333 4338 _(b'NAME'),
4334 4339 ),
4335 4340 (b'', b'run', False, _(b'performs an upgrade')),
4336 4341 (b'', b'backup', True, _(b'keep the old repository content around')),
4337 4342 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4338 4343 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4339 4344 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4340 4345 ],
4341 4346 )
4342 4347 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4343 4348 """upgrade a repository to use different features
4344 4349
4345 4350 If no arguments are specified, the repository is evaluated for upgrade
4346 4351 and a list of problems and potential optimizations is printed.
4347 4352
4348 4353 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4349 4354 can be influenced via additional arguments. More details will be provided
4350 4355 by the command output when run without ``--run``.
4351 4356
4352 4357 During the upgrade, the repository will be locked and no writes will be
4353 4358 allowed.
4354 4359
4355 4360 At the end of the upgrade, the repository may not be readable while new
4356 4361 repository data is swapped in. This window will be as long as it takes to
4357 4362 rename some directories inside the ``.hg`` directory. On most machines, this
4358 4363 should complete almost instantaneously and the chances of a consumer being
4359 4364 unable to access the repository should be low.
4360 4365
4361 4366 By default, all revlogs will be upgraded. You can restrict this using flags
4362 4367 such as `--manifest`:
4363 4368
4364 4369 * `--manifest`: only optimize the manifest
4365 4370 * `--no-manifest`: optimize all revlog but the manifest
4366 4371 * `--changelog`: optimize the changelog only
4367 4372 * `--no-changelog --no-manifest`: optimize filelogs only
4368 4373 * `--filelogs`: optimize the filelogs only
4369 4374 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4370 4375 """
4371 4376 return upgrade.upgraderepo(
4372 4377 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4373 4378 )
4374 4379
4375 4380
4376 4381 @command(
4377 4382 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4378 4383 )
4379 4384 def debugwalk(ui, repo, *pats, **opts):
4380 4385 """show how files match on given patterns"""
4381 4386 opts = pycompat.byteskwargs(opts)
4382 4387 m = scmutil.match(repo[None], pats, opts)
4383 4388 if ui.verbose:
4384 4389 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4385 4390 items = list(repo[None].walk(m))
4386 4391 if not items:
4387 4392 return
4388 4393 f = lambda fn: fn
4389 4394 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4390 4395 f = lambda fn: util.normpath(fn)
4391 4396 fmt = b'f %%-%ds %%-%ds %%s' % (
4392 4397 max([len(abs) for abs in items]),
4393 4398 max([len(repo.pathto(abs)) for abs in items]),
4394 4399 )
4395 4400 for abs in items:
4396 4401 line = fmt % (
4397 4402 abs,
4398 4403 f(repo.pathto(abs)),
4399 4404 m.exact(abs) and b'exact' or b'',
4400 4405 )
4401 4406 ui.write(b"%s\n" % line.rstrip())
4402 4407
4403 4408
4404 4409 @command(b'debugwhyunstable', [], _(b'REV'))
4405 4410 def debugwhyunstable(ui, repo, rev):
4406 4411 """explain instabilities of a changeset"""
4407 4412 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4408 4413 dnodes = b''
4409 4414 if entry.get(b'divergentnodes'):
4410 4415 dnodes = (
4411 4416 b' '.join(
4412 4417 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4413 4418 for ctx in entry[b'divergentnodes']
4414 4419 )
4415 4420 + b' '
4416 4421 )
4417 4422 ui.write(
4418 4423 b'%s: %s%s %s\n'
4419 4424 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4420 4425 )
4421 4426
4422 4427
4423 4428 @command(
4424 4429 b'debugwireargs',
4425 4430 [
4426 4431 (b'', b'three', b'', b'three'),
4427 4432 (b'', b'four', b'', b'four'),
4428 4433 (b'', b'five', b'', b'five'),
4429 4434 ]
4430 4435 + cmdutil.remoteopts,
4431 4436 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4432 4437 norepo=True,
4433 4438 )
4434 4439 def debugwireargs(ui, repopath, *vals, **opts):
4435 4440 opts = pycompat.byteskwargs(opts)
4436 4441 repo = hg.peer(ui, opts, repopath)
4437 4442 try:
4438 4443 for opt in cmdutil.remoteopts:
4439 4444 del opts[opt[1]]
4440 4445 args = {}
4441 4446 for k, v in opts.items():
4442 4447 if v:
4443 4448 args[k] = v
4444 4449 args = pycompat.strkwargs(args)
4445 4450 # run twice to check that we don't mess up the stream for the next command
4446 4451 res1 = repo.debugwireargs(*vals, **args)
4447 4452 res2 = repo.debugwireargs(*vals, **args)
4448 4453 ui.write(b"%s\n" % res1)
4449 4454 if res1 != res2:
4450 4455 ui.warn(b"%s\n" % res2)
4451 4456 finally:
4452 4457 repo.close()
4453 4458
4454 4459
4455 4460 def _parsewirelangblocks(fh):
4456 4461 activeaction = None
4457 4462 blocklines = []
4458 4463 lastindent = 0
4459 4464
4460 4465 for line in fh:
4461 4466 line = line.rstrip()
4462 4467 if not line:
4463 4468 continue
4464 4469
4465 4470 if line.startswith(b'#'):
4466 4471 continue
4467 4472
4468 4473 if not line.startswith(b' '):
4469 4474 # New block. Flush previous one.
4470 4475 if activeaction:
4471 4476 yield activeaction, blocklines
4472 4477
4473 4478 activeaction = line
4474 4479 blocklines = []
4475 4480 lastindent = 0
4476 4481 continue
4477 4482
4478 4483 # Else we start with an indent.
4479 4484
4480 4485 if not activeaction:
4481 4486 raise error.Abort(_(b'indented line outside of block'))
4482 4487
4483 4488 indent = len(line) - len(line.lstrip())
4484 4489
4485 4490 # If this line is indented more than the last line, concatenate it.
4486 4491 if indent > lastindent and blocklines:
4487 4492 blocklines[-1] += line.lstrip()
4488 4493 else:
4489 4494 blocklines.append(line)
4490 4495 lastindent = indent
4491 4496
4492 4497 # Flush last block.
4493 4498 if activeaction:
4494 4499 yield activeaction, blocklines
4495 4500
4496 4501
4497 4502 @command(
4498 4503 b'debugwireproto',
4499 4504 [
4500 4505 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4501 4506 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4502 4507 (
4503 4508 b'',
4504 4509 b'noreadstderr',
4505 4510 False,
4506 4511 _(b'do not read from stderr of the remote'),
4507 4512 ),
4508 4513 (
4509 4514 b'',
4510 4515 b'nologhandshake',
4511 4516 False,
4512 4517 _(b'do not log I/O related to the peer handshake'),
4513 4518 ),
4514 4519 ]
4515 4520 + cmdutil.remoteopts,
4516 4521 _(b'[PATH]'),
4517 4522 optionalrepo=True,
4518 4523 )
4519 4524 def debugwireproto(ui, repo, path=None, **opts):
4520 4525 """send wire protocol commands to a server
4521 4526
4522 4527 This command can be used to issue wire protocol commands to remote
4523 4528 peers and to debug the raw data being exchanged.
4524 4529
4525 4530 ``--localssh`` will start an SSH server against the current repository
4526 4531 and connect to that. By default, the connection will perform a handshake
4527 4532 and establish an appropriate peer instance.
4528 4533
4529 4534 ``--peer`` can be used to bypass the handshake protocol and construct a
4530 4535 peer instance using the specified class type. Valid values are ``raw``,
4531 4536 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4532 4537 don't support higher-level command actions.
4533 4538
4534 4539 ``--noreadstderr`` can be used to disable automatic reading from stderr
4535 4540 of the peer (for SSH connections only). Disabling automatic reading of
4536 4541 stderr is useful for making output more deterministic.
4537 4542
4538 4543 Commands are issued via a mini language which is specified via stdin.
4539 4544 The language consists of individual actions to perform. An action is
4540 4545 defined by a block. A block is defined as a line with no leading
4541 4546 space followed by 0 or more lines with leading space. Blocks are
4542 4547 effectively a high-level command with additional metadata.
4543 4548
4544 4549 Lines beginning with ``#`` are ignored.
4545 4550
4546 4551 The following sections denote available actions.
4547 4552
4548 4553 raw
4549 4554 ---
4550 4555
4551 4556 Send raw data to the server.
4552 4557
4553 4558 The block payload contains the raw data to send as one atomic send
4554 4559 operation. The data may not actually be delivered in a single system
4555 4560 call: it depends on the abilities of the transport being used.
4556 4561
4557 4562 Each line in the block is de-indented and concatenated. Then, that
4558 4563 value is evaluated as a Python b'' literal. This allows the use of
4559 4564 backslash escaping, etc.
4560 4565
4561 4566 raw+
4562 4567 ----
4563 4568
4564 4569 Behaves like ``raw`` except flushes output afterwards.
4565 4570
4566 4571 command <X>
4567 4572 -----------
4568 4573
4569 4574 Send a request to run a named command, whose name follows the ``command``
4570 4575 string.
4571 4576
4572 4577 Arguments to the command are defined as lines in this block. The format of
4573 4578 each line is ``<key> <value>``. e.g.::
4574 4579
4575 4580 command listkeys
4576 4581 namespace bookmarks
4577 4582
4578 4583 If the value begins with ``eval:``, it will be interpreted as a Python
4579 4584 literal expression. Otherwise values are interpreted as Python b'' literals.
4580 4585 This allows sending complex types and encoding special byte sequences via
4581 4586 backslash escaping.
4582 4587
4583 4588 The following arguments have special meaning:
4584 4589
4585 4590 ``PUSHFILE``
4586 4591 When defined, the *push* mechanism of the peer will be used instead
4587 4592 of the static request-response mechanism and the content of the
4588 4593 file specified in the value of this argument will be sent as the
4589 4594 command payload.
4590 4595
4591 4596 This can be used to submit a local bundle file to the remote.
4592 4597
4593 4598 batchbegin
4594 4599 ----------
4595 4600
4596 4601 Instruct the peer to begin a batched send.
4597 4602
4598 4603 All ``command`` blocks are queued for execution until the next
4599 4604 ``batchsubmit`` block.
4600 4605
4601 4606 batchsubmit
4602 4607 -----------
4603 4608
4604 4609 Submit previously queued ``command`` blocks as a batch request.
4605 4610
4606 4611 This action MUST be paired with a ``batchbegin`` action.
4607 4612
4608 4613 httprequest <method> <path>
4609 4614 ---------------------------
4610 4615
4611 4616 (HTTP peer only)
4612 4617
4613 4618 Send an HTTP request to the peer.
4614 4619
4615 4620 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4616 4621
4617 4622 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4618 4623 headers to add to the request. e.g. ``Accept: foo``.
4619 4624
4620 4625 The following arguments are special:
4621 4626
4622 4627 ``BODYFILE``
4623 4628 The content of the file defined as the value to this argument will be
4624 4629 transferred verbatim as the HTTP request body.
4625 4630
4626 4631 ``frame <type> <flags> <payload>``
4627 4632 Send a unified protocol frame as part of the request body.
4628 4633
4629 4634 All frames will be collected and sent as the body to the HTTP
4630 4635 request.
4631 4636
4632 4637 close
4633 4638 -----
4634 4639
4635 4640 Close the connection to the server.
4636 4641
4637 4642 flush
4638 4643 -----
4639 4644
4640 4645 Flush data written to the server.
4641 4646
4642 4647 readavailable
4643 4648 -------------
4644 4649
4645 4650 Close the write end of the connection and read all available data from
4646 4651 the server.
4647 4652
4648 4653 If the connection to the server encompasses multiple pipes, we poll both
4649 4654 pipes and read available data.
4650 4655
4651 4656 readline
4652 4657 --------
4653 4658
4654 4659 Read a line of output from the server. If there are multiple output
4655 4660 pipes, reads only the main pipe.
4656 4661
4657 4662 ereadline
4658 4663 ---------
4659 4664
4660 4665 Like ``readline``, but read from the stderr pipe, if available.
4661 4666
4662 4667 read <X>
4663 4668 --------
4664 4669
4665 4670 ``read()`` N bytes from the server's main output pipe.
4666 4671
4667 4672 eread <X>
4668 4673 ---------
4669 4674
4670 4675 ``read()`` N bytes from the server's stderr pipe, if available.
4671 4676
4672 4677 Specifying Unified Frame-Based Protocol Frames
4673 4678 ----------------------------------------------
4674 4679
4675 4680 It is possible to emit a *Unified Frame-Based Protocol* by using special
4676 4681 syntax.
4677 4682
4678 4683 A frame is composed as a type, flags, and payload. These can be parsed
4679 4684 from a string of the form:
4680 4685
4681 4686 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4682 4687
4683 4688 ``request-id`` and ``stream-id`` are integers defining the request and
4684 4689 stream identifiers.
4685 4690
4686 4691 ``type`` can be an integer value for the frame type or the string name
4687 4692 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4688 4693 ``command-name``.
4689 4694
4690 4695 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4691 4696 components. Each component (and there can be just one) can be an integer
4692 4697 or a flag name for stream flags or frame flags, respectively. Values are
4693 4698 resolved to integers and then bitwise OR'd together.
4694 4699
4695 4700 ``payload`` represents the raw frame payload. If it begins with
4696 4701 ``cbor:``, the following string is evaluated as Python code and the
4697 4702 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4698 4703 as a Python byte string literal.
4699 4704 """
4700 4705 opts = pycompat.byteskwargs(opts)
4701 4706
4702 4707 if opts[b'localssh'] and not repo:
4703 4708 raise error.Abort(_(b'--localssh requires a repository'))
4704 4709
4705 4710 if opts[b'peer'] and opts[b'peer'] not in (
4706 4711 b'raw',
4707 4712 b'ssh1',
4708 4713 ):
4709 4714 raise error.Abort(
4710 4715 _(b'invalid value for --peer'),
4711 4716 hint=_(b'valid values are "raw" and "ssh1"'),
4712 4717 )
4713 4718
4714 4719 if path and opts[b'localssh']:
4715 4720 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4716 4721
4717 4722 if ui.interactive():
4718 4723 ui.write(_(b'(waiting for commands on stdin)\n'))
4719 4724
4720 4725 blocks = list(_parsewirelangblocks(ui.fin))
4721 4726
4722 4727 proc = None
4723 4728 stdin = None
4724 4729 stdout = None
4725 4730 stderr = None
4726 4731 opener = None
4727 4732
4728 4733 if opts[b'localssh']:
4729 4734 # We start the SSH server in its own process so there is process
4730 4735 # separation. This prevents a whole class of potential bugs around
4731 4736 # shared state from interfering with server operation.
4732 4737 args = procutil.hgcmd() + [
4733 4738 b'-R',
4734 4739 repo.root,
4735 4740 b'debugserve',
4736 4741 b'--sshstdio',
4737 4742 ]
4738 4743 proc = subprocess.Popen(
4739 4744 pycompat.rapply(procutil.tonativestr, args),
4740 4745 stdin=subprocess.PIPE,
4741 4746 stdout=subprocess.PIPE,
4742 4747 stderr=subprocess.PIPE,
4743 4748 bufsize=0,
4744 4749 )
4745 4750
4746 4751 stdin = proc.stdin
4747 4752 stdout = proc.stdout
4748 4753 stderr = proc.stderr
4749 4754
4750 4755 # We turn the pipes into observers so we can log I/O.
4751 4756 if ui.verbose or opts[b'peer'] == b'raw':
4752 4757 stdin = util.makeloggingfileobject(
4753 4758 ui, proc.stdin, b'i', logdata=True
4754 4759 )
4755 4760 stdout = util.makeloggingfileobject(
4756 4761 ui, proc.stdout, b'o', logdata=True
4757 4762 )
4758 4763 stderr = util.makeloggingfileobject(
4759 4764 ui, proc.stderr, b'e', logdata=True
4760 4765 )
4761 4766
4762 4767 # --localssh also implies the peer connection settings.
4763 4768
4764 4769 url = b'ssh://localserver'
4765 4770 autoreadstderr = not opts[b'noreadstderr']
4766 4771
4767 4772 if opts[b'peer'] == b'ssh1':
4768 4773 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4769 4774 peer = sshpeer.sshv1peer(
4770 4775 ui,
4771 4776 url,
4772 4777 proc,
4773 4778 stdin,
4774 4779 stdout,
4775 4780 stderr,
4776 4781 None,
4777 4782 autoreadstderr=autoreadstderr,
4778 4783 )
4779 4784 elif opts[b'peer'] == b'raw':
4780 4785 ui.write(_(b'using raw connection to peer\n'))
4781 4786 peer = None
4782 4787 else:
4783 4788 ui.write(_(b'creating ssh peer from handshake results\n'))
4784 4789 peer = sshpeer.makepeer(
4785 4790 ui,
4786 4791 url,
4787 4792 proc,
4788 4793 stdin,
4789 4794 stdout,
4790 4795 stderr,
4791 4796 autoreadstderr=autoreadstderr,
4792 4797 )
4793 4798
4794 4799 elif path:
4795 4800 # We bypass hg.peer() so we can proxy the sockets.
4796 4801 # TODO consider not doing this because we skip
4797 4802 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4798 4803 u = urlutil.url(path)
4799 4804 if u.scheme != b'http':
4800 4805 raise error.Abort(_(b'only http:// paths are currently supported'))
4801 4806
4802 4807 url, authinfo = u.authinfo()
4803 4808 openerargs = {
4804 4809 'useragent': b'Mercurial debugwireproto',
4805 4810 }
4806 4811
4807 4812 # Turn pipes/sockets into observers so we can log I/O.
4808 4813 if ui.verbose:
4809 4814 openerargs.update(
4810 4815 {
4811 4816 'loggingfh': ui,
4812 4817 'loggingname': b's',
4813 4818 'loggingopts': {
4814 4819 'logdata': True,
4815 4820 'logdataapis': False,
4816 4821 },
4817 4822 }
4818 4823 )
4819 4824
4820 4825 if ui.debugflag:
4821 4826 openerargs['loggingopts']['logdataapis'] = True
4822 4827
4823 4828 # Don't send default headers when in raw mode. This allows us to
4824 4829 # bypass most of the behavior of our URL handling code so we can
4825 4830 # have near complete control over what's sent on the wire.
4826 4831 if opts[b'peer'] == b'raw':
4827 4832 openerargs['sendaccept'] = False
4828 4833
4829 4834 opener = urlmod.opener(ui, authinfo, **openerargs)
4830 4835
4831 4836 if opts[b'peer'] == b'raw':
4832 4837 ui.write(_(b'using raw connection to peer\n'))
4833 4838 peer = None
4834 4839 elif opts[b'peer']:
4835 4840 raise error.Abort(
4836 4841 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4837 4842 )
4838 4843 else:
4839 4844 peer = httppeer.makepeer(ui, path, opener=opener)
4840 4845
4841 4846 # We /could/ populate stdin/stdout with sock.makefile()...
4842 4847 else:
4843 4848 raise error.Abort(_(b'unsupported connection configuration'))
4844 4849
4845 4850 batchedcommands = None
4846 4851
4847 4852 # Now perform actions based on the parsed wire language instructions.
4848 4853 for action, lines in blocks:
4849 4854 if action in (b'raw', b'raw+'):
4850 4855 if not stdin:
4851 4856 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4852 4857
4853 4858 # Concatenate the data together.
4854 4859 data = b''.join(l.lstrip() for l in lines)
4855 4860 data = stringutil.unescapestr(data)
4856 4861 stdin.write(data)
4857 4862
4858 4863 if action == b'raw+':
4859 4864 stdin.flush()
4860 4865 elif action == b'flush':
4861 4866 if not stdin:
4862 4867 raise error.Abort(_(b'cannot call flush on this peer'))
4863 4868 stdin.flush()
4864 4869 elif action.startswith(b'command'):
4865 4870 if not peer:
4866 4871 raise error.Abort(
4867 4872 _(
4868 4873 b'cannot send commands unless peer instance '
4869 4874 b'is available'
4870 4875 )
4871 4876 )
4872 4877
4873 4878 command = action.split(b' ', 1)[1]
4874 4879
4875 4880 args = {}
4876 4881 for line in lines:
4877 4882 # We need to allow empty values.
4878 4883 fields = line.lstrip().split(b' ', 1)
4879 4884 if len(fields) == 1:
4880 4885 key = fields[0]
4881 4886 value = b''
4882 4887 else:
4883 4888 key, value = fields
4884 4889
4885 4890 if value.startswith(b'eval:'):
4886 4891 value = stringutil.evalpythonliteral(value[5:])
4887 4892 else:
4888 4893 value = stringutil.unescapestr(value)
4889 4894
4890 4895 args[key] = value
4891 4896
4892 4897 if batchedcommands is not None:
4893 4898 batchedcommands.append((command, args))
4894 4899 continue
4895 4900
4896 4901 ui.status(_(b'sending %s command\n') % command)
4897 4902
4898 4903 if b'PUSHFILE' in args:
4899 4904 with open(args[b'PUSHFILE'], 'rb') as fh:
4900 4905 del args[b'PUSHFILE']
4901 4906 res, output = peer._callpush(
4902 4907 command, fh, **pycompat.strkwargs(args)
4903 4908 )
4904 4909 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4905 4910 ui.status(
4906 4911 _(b'remote output: %s\n') % stringutil.escapestr(output)
4907 4912 )
4908 4913 else:
4909 4914 with peer.commandexecutor() as e:
4910 4915 res = e.callcommand(command, args).result()
4911 4916
4912 4917 ui.status(
4913 4918 _(b'response: %s\n')
4914 4919 % stringutil.pprint(res, bprefix=True, indent=2)
4915 4920 )
4916 4921
4917 4922 elif action == b'batchbegin':
4918 4923 if batchedcommands is not None:
4919 4924 raise error.Abort(_(b'nested batchbegin not allowed'))
4920 4925
4921 4926 batchedcommands = []
4922 4927 elif action == b'batchsubmit':
4923 4928 # There is a batching API we could go through. But it would be
4924 4929 # difficult to normalize requests into function calls. It is easier
4925 4930 # to bypass this layer and normalize to commands + args.
4926 4931 ui.status(
4927 4932 _(b'sending batch with %d sub-commands\n')
4928 4933 % len(batchedcommands)
4929 4934 )
4930 4935 assert peer is not None
4931 4936 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4932 4937 ui.status(
4933 4938 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4934 4939 )
4935 4940
4936 4941 batchedcommands = None
4937 4942
4938 4943 elif action.startswith(b'httprequest '):
4939 4944 if not opener:
4940 4945 raise error.Abort(
4941 4946 _(b'cannot use httprequest without an HTTP peer')
4942 4947 )
4943 4948
4944 4949 request = action.split(b' ', 2)
4945 4950 if len(request) != 3:
4946 4951 raise error.Abort(
4947 4952 _(
4948 4953 b'invalid httprequest: expected format is '
4949 4954 b'"httprequest <method> <path>'
4950 4955 )
4951 4956 )
4952 4957
4953 4958 method, httppath = request[1:]
4954 4959 headers = {}
4955 4960 body = None
4956 4961 frames = []
4957 4962 for line in lines:
4958 4963 line = line.lstrip()
4959 4964 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4960 4965 if m:
4961 4966 # Headers need to use native strings.
4962 4967 key = pycompat.strurl(m.group(1))
4963 4968 value = pycompat.strurl(m.group(2))
4964 4969 headers[key] = value
4965 4970 continue
4966 4971
4967 4972 if line.startswith(b'BODYFILE '):
4968 4973 with open(line.split(b' ', 1), b'rb') as fh:
4969 4974 body = fh.read()
4970 4975 elif line.startswith(b'frame '):
4971 4976 frame = wireprotoframing.makeframefromhumanstring(
4972 4977 line[len(b'frame ') :]
4973 4978 )
4974 4979
4975 4980 frames.append(frame)
4976 4981 else:
4977 4982 raise error.Abort(
4978 4983 _(b'unknown argument to httprequest: %s') % line
4979 4984 )
4980 4985
4981 4986 url = path + httppath
4982 4987
4983 4988 if frames:
4984 4989 body = b''.join(bytes(f) for f in frames)
4985 4990
4986 4991 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4987 4992
4988 4993 # urllib.Request insists on using has_data() as a proxy for
4989 4994 # determining the request method. Override that to use our
4990 4995 # explicitly requested method.
4991 4996 req.get_method = lambda: pycompat.sysstr(method)
4992 4997
4993 4998 try:
4994 4999 res = opener.open(req)
4995 5000 body = res.read()
4996 5001 except util.urlerr.urlerror as e:
4997 5002 # read() method must be called, but only exists in Python 2
4998 5003 getattr(e, 'read', lambda: None)()
4999 5004 continue
5000 5005
5001 5006 ct = res.headers.get('Content-Type')
5002 5007 if ct == 'application/mercurial-cbor':
5003 5008 ui.write(
5004 5009 _(b'cbor> %s\n')
5005 5010 % stringutil.pprint(
5006 5011 cborutil.decodeall(body), bprefix=True, indent=2
5007 5012 )
5008 5013 )
5009 5014
5010 5015 elif action == b'close':
5011 5016 assert peer is not None
5012 5017 peer.close()
5013 5018 elif action == b'readavailable':
5014 5019 if not stdout or not stderr:
5015 5020 raise error.Abort(
5016 5021 _(b'readavailable not available on this peer')
5017 5022 )
5018 5023
5019 5024 stdin.close()
5020 5025 stdout.read()
5021 5026 stderr.read()
5022 5027
5023 5028 elif action == b'readline':
5024 5029 if not stdout:
5025 5030 raise error.Abort(_(b'readline not available on this peer'))
5026 5031 stdout.readline()
5027 5032 elif action == b'ereadline':
5028 5033 if not stderr:
5029 5034 raise error.Abort(_(b'ereadline not available on this peer'))
5030 5035 stderr.readline()
5031 5036 elif action.startswith(b'read '):
5032 5037 count = int(action.split(b' ', 1)[1])
5033 5038 if not stdout:
5034 5039 raise error.Abort(_(b'read not available on this peer'))
5035 5040 stdout.read(count)
5036 5041 elif action.startswith(b'eread '):
5037 5042 count = int(action.split(b' ', 1)[1])
5038 5043 if not stderr:
5039 5044 raise error.Abort(_(b'eread not available on this peer'))
5040 5045 stderr.read(count)
5041 5046 else:
5042 5047 raise error.Abort(_(b'unknown action: %s') % action)
5043 5048
5044 5049 if batchedcommands is not None:
5045 5050 raise error.Abort(_(b'unclosed "batchbegin" request'))
5046 5051
5047 5052 if peer:
5048 5053 peer.close()
5049 5054
5050 5055 if proc:
5051 5056 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now