##// END OF EJS Templates
path: pass `path` to `peer` in `hg debugssl`...
marmoute -
r50622:aae6b10d default
parent child Browse files
Show More
@@ -1,4718 +1,4716
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 bundlerepo,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 dirstateutils,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 requirements,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 )
94 94 from .interfaces import repository
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 constants as revlog_constants,
106 106 debug as revlog_debug,
107 107 deltas as deltautil,
108 108 nodemap,
109 109 rewrite,
110 110 sidedata,
111 111 )
112 112
113 113 release = lockmod.release
114 114
115 115 table = {}
116 116 table.update(strip.command._table)
117 117 command = registrar.command(table)
118 118
119 119
120 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 121 def debugancestor(ui, repo, *args):
122 122 """find the ancestor revision of two revisions in a given index"""
123 123 if len(args) == 3:
124 124 index, rev1, rev2 = args
125 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 126 lookup = r.lookup
127 127 elif len(args) == 2:
128 128 if not repo:
129 129 raise error.Abort(
130 130 _(b'there is no Mercurial repository here (.hg not found)')
131 131 )
132 132 rev1, rev2 = args
133 133 r = repo.changelog
134 134 lookup = repo.lookup
135 135 else:
136 136 raise error.Abort(_(b'either two or three arguments required'))
137 137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 139
140 140
141 141 @command(b'debugantivirusrunning', [])
142 142 def debugantivirusrunning(ui, repo):
143 143 """attempt to trigger an antivirus scanner to see if one is active"""
144 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 145 f.write(
146 146 util.b85decode(
147 147 # This is a base85-armored version of the EICAR test file. See
148 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 151 )
152 152 )
153 153 # Give an AV engine time to scan the file.
154 154 time.sleep(2)
155 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 156
157 157
158 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 159 def debugapplystreamclonebundle(ui, repo, fname):
160 160 """apply a stream clone bundle file"""
161 161 f = hg.openpath(ui, fname)
162 162 gen = exchange.readbundle(ui, f, fname)
163 163 gen.apply(repo)
164 164
165 165
166 166 @command(
167 167 b'debugbuilddag',
168 168 [
169 169 (
170 170 b'm',
171 171 b'mergeable-file',
172 172 None,
173 173 _(b'add single file mergeable changes'),
174 174 ),
175 175 (
176 176 b'o',
177 177 b'overwritten-file',
178 178 None,
179 179 _(b'add single file all revs overwrite'),
180 180 ),
181 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 182 (
183 183 b'',
184 184 b'from-existing',
185 185 None,
186 186 _(b'continue from a non-empty repository'),
187 187 ),
188 188 ],
189 189 _(b'[OPTION]... [TEXT]'),
190 190 )
191 191 def debugbuilddag(
192 192 ui,
193 193 repo,
194 194 text=None,
195 195 mergeable_file=False,
196 196 overwritten_file=False,
197 197 new_file=False,
198 198 from_existing=False,
199 199 ):
200 200 """builds a repo with a given DAG from scratch in the current empty repo
201 201
202 202 The description of the DAG is read from stdin if not given on the
203 203 command line.
204 204
205 205 Elements:
206 206
207 207 - "+n" is a linear run of n nodes based on the current default parent
208 208 - "." is a single node based on the current default parent
209 209 - "$" resets the default parent to null (implied at the start);
210 210 otherwise the default parent is always the last node created
211 211 - "<p" sets the default parent to the backref p
212 212 - "*p" is a fork at parent p, which is a backref
213 213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 214 - "/p2" is a merge of the preceding node and p2
215 215 - ":tag" defines a local tag for the preceding node
216 216 - "@branch" sets the named branch for subsequent nodes
217 217 - "#...\\n" is a comment up to the end of the line
218 218
219 219 Whitespace between the above elements is ignored.
220 220
221 221 A backref is either
222 222
223 223 - a number n, which references the node curr-n, where curr is the current
224 224 node, or
225 225 - the name of a local tag you placed earlier using ":tag", or
226 226 - empty to denote the default parent.
227 227
228 228 All string valued-elements are either strictly alphanumeric, or must
229 229 be enclosed in double quotes ("..."), with "\\" as escape character.
230 230 """
231 231
232 232 if text is None:
233 233 ui.status(_(b"reading DAG from stdin\n"))
234 234 text = ui.fin.read()
235 235
236 236 cl = repo.changelog
237 237 if len(cl) > 0 and not from_existing:
238 238 raise error.Abort(_(b'repository is not empty'))
239 239
240 240 # determine number of revs in DAG
241 241 total = 0
242 242 for type, data in dagparser.parsedag(text):
243 243 if type == b'n':
244 244 total += 1
245 245
246 246 if mergeable_file:
247 247 linesperrev = 2
248 248 # make a file with k lines per rev
249 249 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
250 250 initialmergedlines.append(b"")
251 251
252 252 tags = []
253 253 progress = ui.makeprogress(
254 254 _(b'building'), unit=_(b'revisions'), total=total
255 255 )
256 256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 257 at = -1
258 258 atbranch = b'default'
259 259 nodeids = []
260 260 id = 0
261 261 progress.update(id)
262 262 for type, data in dagparser.parsedag(text):
263 263 if type == b'n':
264 264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 265 id, ps = data
266 266
267 267 files = []
268 268 filecontent = {}
269 269
270 270 p2 = None
271 271 if mergeable_file:
272 272 fn = b"mf"
273 273 p1 = repo[ps[0]]
274 274 if len(ps) > 1:
275 275 p2 = repo[ps[1]]
276 276 pa = p1.ancestor(p2)
277 277 base, local, other = [
278 278 x[fn].data() for x in (pa, p1, p2)
279 279 ]
280 280 m3 = simplemerge.Merge3Text(base, local, other)
281 281 ml = [
282 282 l.strip()
283 283 for l in simplemerge.render_minimized(m3)[0]
284 284 ]
285 285 ml.append(b"")
286 286 elif at > 0:
287 287 ml = p1[fn].data().split(b"\n")
288 288 else:
289 289 ml = initialmergedlines
290 290 ml[id * linesperrev] += b" r%i" % id
291 291 mergedtext = b"\n".join(ml)
292 292 files.append(fn)
293 293 filecontent[fn] = mergedtext
294 294
295 295 if overwritten_file:
296 296 fn = b"of"
297 297 files.append(fn)
298 298 filecontent[fn] = b"r%i\n" % id
299 299
300 300 if new_file:
301 301 fn = b"nf%i" % id
302 302 files.append(fn)
303 303 filecontent[fn] = b"r%i\n" % id
304 304 if len(ps) > 1:
305 305 if not p2:
306 306 p2 = repo[ps[1]]
307 307 for fn in p2:
308 308 if fn.startswith(b"nf"):
309 309 files.append(fn)
310 310 filecontent[fn] = p2[fn].data()
311 311
312 312 def fctxfn(repo, cx, path):
313 313 if path in filecontent:
314 314 return context.memfilectx(
315 315 repo, cx, path, filecontent[path]
316 316 )
317 317 return None
318 318
319 319 if len(ps) == 0 or ps[0] < 0:
320 320 pars = [None, None]
321 321 elif len(ps) == 1:
322 322 pars = [nodeids[ps[0]], None]
323 323 else:
324 324 pars = [nodeids[p] for p in ps]
325 325 cx = context.memctx(
326 326 repo,
327 327 pars,
328 328 b"r%i" % id,
329 329 files,
330 330 fctxfn,
331 331 date=(id, 0),
332 332 user=b"debugbuilddag",
333 333 extra={b'branch': atbranch},
334 334 )
335 335 nodeid = repo.commitctx(cx)
336 336 nodeids.append(nodeid)
337 337 at = id
338 338 elif type == b'l':
339 339 id, name = data
340 340 ui.note((b'tag %s\n' % name))
341 341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 342 elif type == b'a':
343 343 ui.note((b'branch %s\n' % data))
344 344 atbranch = data
345 345 progress.update(id)
346 346
347 347 if tags:
348 348 repo.vfs.write(b"localtags", b"".join(tags))
349 349
350 350
351 351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 352 indent_string = b' ' * indent
353 353 if all:
354 354 ui.writenoi18n(
355 355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 356 % indent_string
357 357 )
358 358
359 359 def showchunks(named):
360 360 ui.write(b"\n%s%s\n" % (indent_string, named))
361 361 for deltadata in gen.deltaiter():
362 362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 363 ui.write(
364 364 b"%s%s %s %s %s %s %d\n"
365 365 % (
366 366 indent_string,
367 367 hex(node),
368 368 hex(p1),
369 369 hex(p2),
370 370 hex(cs),
371 371 hex(deltabase),
372 372 len(delta),
373 373 )
374 374 )
375 375
376 376 gen.changelogheader()
377 377 showchunks(b"changelog")
378 378 gen.manifestheader()
379 379 showchunks(b"manifest")
380 380 for chunkdata in iter(gen.filelogheader, {}):
381 381 fname = chunkdata[b'filename']
382 382 showchunks(fname)
383 383 else:
384 384 if isinstance(gen, bundle2.unbundle20):
385 385 raise error.Abort(_(b'use debugbundle2 for this file'))
386 386 gen.changelogheader()
387 387 for deltadata in gen.deltaiter():
388 388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 390
391 391
392 392 def _debugobsmarkers(ui, part, indent=0, **opts):
393 393 """display version and markers contained in 'data'"""
394 394 opts = pycompat.byteskwargs(opts)
395 395 data = part.read()
396 396 indent_string = b' ' * indent
397 397 try:
398 398 version, markers = obsolete._readmarkers(data)
399 399 except error.UnknownVersion as exc:
400 400 msg = b"%sunsupported version: %s (%d bytes)\n"
401 401 msg %= indent_string, exc.version, len(data)
402 402 ui.write(msg)
403 403 else:
404 404 msg = b"%sversion: %d (%d bytes)\n"
405 405 msg %= indent_string, version, len(data)
406 406 ui.write(msg)
407 407 fm = ui.formatter(b'debugobsolete', opts)
408 408 for rawmarker in sorted(markers):
409 409 m = obsutil.marker(None, rawmarker)
410 410 fm.startitem()
411 411 fm.plain(indent_string)
412 412 cmdutil.showmarker(fm, m)
413 413 fm.end()
414 414
415 415
416 416 def _debugphaseheads(ui, data, indent=0):
417 417 """display version and markers contained in 'data'"""
418 418 indent_string = b' ' * indent
419 419 headsbyphase = phases.binarydecode(data)
420 420 for phase in phases.allphases:
421 421 for head in headsbyphase[phase]:
422 422 ui.write(indent_string)
423 423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 424
425 425
426 426 def _quasirepr(thing):
427 427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 428 return b'{%s}' % (
429 429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 430 )
431 431 return pycompat.bytestr(repr(thing))
432 432
433 433
434 434 def _debugbundle2(ui, gen, all=None, **opts):
435 435 """lists the contents of a bundle2"""
436 436 if not isinstance(gen, bundle2.unbundle20):
437 437 raise error.Abort(_(b'not a bundle2 file'))
438 438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 439 parttypes = opts.get('part_type', [])
440 440 for part in gen.iterparts():
441 441 if parttypes and part.type not in parttypes:
442 442 continue
443 443 msg = b'%s -- %s (mandatory: %r)\n'
444 444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 445 if part.type == b'changegroup':
446 446 version = part.params.get(b'version', b'01')
447 447 cg = changegroup.getunbundler(version, part, b'UN')
448 448 if not ui.quiet:
449 449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 450 if part.type == b'obsmarkers':
451 451 if not ui.quiet:
452 452 _debugobsmarkers(ui, part, indent=4, **opts)
453 453 if part.type == b'phase-heads':
454 454 if not ui.quiet:
455 455 _debugphaseheads(ui, part, indent=4)
456 456
457 457
458 458 @command(
459 459 b'debugbundle',
460 460 [
461 461 (b'a', b'all', None, _(b'show all details')),
462 462 (b'', b'part-type', [], _(b'show only the named part type')),
463 463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 464 ],
465 465 _(b'FILE'),
466 466 norepo=True,
467 467 )
468 468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 469 """lists the contents of a bundle"""
470 470 with hg.openpath(ui, bundlepath) as f:
471 471 if spec:
472 472 spec = exchange.getbundlespec(ui, f)
473 473 ui.write(b'%s\n' % spec)
474 474 return
475 475
476 476 gen = exchange.readbundle(ui, f, bundlepath)
477 477 if isinstance(gen, bundle2.unbundle20):
478 478 return _debugbundle2(ui, gen, all=all, **opts)
479 479 _debugchangegroup(ui, gen, all=all, **opts)
480 480
481 481
482 482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 483 def debugcapabilities(ui, path, **opts):
484 484 """lists the capabilities of a remote peer"""
485 485 opts = pycompat.byteskwargs(opts)
486 486 peer = hg.peer(ui, opts, path)
487 487 try:
488 488 caps = peer.capabilities()
489 489 ui.writenoi18n(b'Main capabilities:\n')
490 490 for c in sorted(caps):
491 491 ui.write(b' %s\n' % c)
492 492 b2caps = bundle2.bundle2caps(peer)
493 493 if b2caps:
494 494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 495 for key, values in sorted(b2caps.items()):
496 496 ui.write(b' %s\n' % key)
497 497 for v in values:
498 498 ui.write(b' %s\n' % v)
499 499 finally:
500 500 peer.close()
501 501
502 502
503 503 @command(
504 504 b'debugchangedfiles',
505 505 [
506 506 (
507 507 b'',
508 508 b'compute',
509 509 False,
510 510 b"compute information instead of reading it from storage",
511 511 ),
512 512 ],
513 513 b'REV',
514 514 )
515 515 def debugchangedfiles(ui, repo, rev, **opts):
516 516 """list the stored files changes for a revision"""
517 517 ctx = logcmdutil.revsingle(repo, rev, None)
518 518 files = None
519 519
520 520 if opts['compute']:
521 521 files = metadata.compute_all_files_changes(ctx)
522 522 else:
523 523 sd = repo.changelog.sidedata(ctx.rev())
524 524 files_block = sd.get(sidedata.SD_FILES)
525 525 if files_block is not None:
526 526 files = metadata.decode_files_sidedata(sd)
527 527 if files is not None:
528 528 for f in sorted(files.touched):
529 529 if f in files.added:
530 530 action = b"added"
531 531 elif f in files.removed:
532 532 action = b"removed"
533 533 elif f in files.merged:
534 534 action = b"merged"
535 535 elif f in files.salvaged:
536 536 action = b"salvaged"
537 537 else:
538 538 action = b"touched"
539 539
540 540 copy_parent = b""
541 541 copy_source = b""
542 542 if f in files.copied_from_p1:
543 543 copy_parent = b"p1"
544 544 copy_source = files.copied_from_p1[f]
545 545 elif f in files.copied_from_p2:
546 546 copy_parent = b"p2"
547 547 copy_source = files.copied_from_p2[f]
548 548
549 549 data = (action, copy_parent, f, copy_source)
550 550 template = b"%-8s %2s: %s, %s;\n"
551 551 ui.write(template % data)
552 552
553 553
554 554 @command(b'debugcheckstate', [], b'')
555 555 def debugcheckstate(ui, repo):
556 556 """validate the correctness of the current dirstate"""
557 557 parent1, parent2 = repo.dirstate.parents()
558 558 m1 = repo[parent1].manifest()
559 559 m2 = repo[parent2].manifest()
560 560 errors = 0
561 561 for err in repo.dirstate.verify(m1, m2):
562 562 ui.warn(err[0] % err[1:])
563 563 errors += 1
564 564 if errors:
565 565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 566 raise error.Abort(errstr)
567 567
568 568
569 569 @command(
570 570 b'debugcolor',
571 571 [(b'', b'style', None, _(b'show all configured styles'))],
572 572 b'hg debugcolor',
573 573 )
574 574 def debugcolor(ui, repo, **opts):
575 575 """show available color, effects or style"""
576 576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 577 if opts.get('style'):
578 578 return _debugdisplaystyle(ui)
579 579 else:
580 580 return _debugdisplaycolor(ui)
581 581
582 582
583 583 def _debugdisplaycolor(ui):
584 584 ui = ui.copy()
585 585 ui._styles.clear()
586 586 for effect in color._activeeffects(ui).keys():
587 587 ui._styles[effect] = effect
588 588 if ui._terminfoparams:
589 589 for k, v in ui.configitems(b'color'):
590 590 if k.startswith(b'color.'):
591 591 ui._styles[k] = k[6:]
592 592 elif k.startswith(b'terminfo.'):
593 593 ui._styles[k] = k[9:]
594 594 ui.write(_(b'available colors:\n'))
595 595 # sort label with a '_' after the other to group '_background' entry.
596 596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 597 for colorname, label in items:
598 598 ui.write(b'%s\n' % colorname, label=label)
599 599
600 600
601 601 def _debugdisplaystyle(ui):
602 602 ui.write(_(b'available style:\n'))
603 603 if not ui._styles:
604 604 return
605 605 width = max(len(s) for s in ui._styles)
606 606 for label, effects in sorted(ui._styles.items()):
607 607 ui.write(b'%s' % label, label=label)
608 608 if effects:
609 609 # 50
610 610 ui.write(b': ')
611 611 ui.write(b' ' * (max(0, width - len(label))))
612 612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 613 ui.write(b'\n')
614 614
615 615
616 616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 617 def debugcreatestreamclonebundle(ui, repo, fname):
618 618 """create a stream clone bundle file
619 619
620 620 Stream bundles are special bundles that are essentially archives of
621 621 revlog files. They are commonly used for cloning very quickly.
622 622 """
623 623 # TODO we may want to turn this into an abort when this functionality
624 624 # is moved into `hg bundle`.
625 625 if phases.hassecret(repo):
626 626 ui.warn(
627 627 _(
628 628 b'(warning: stream clone bundle will contain secret '
629 629 b'revisions)\n'
630 630 )
631 631 )
632 632
633 633 requirements, gen = streamclone.generatebundlev1(repo)
634 634 changegroup.writechunks(ui, gen, fname)
635 635
636 636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637 637
638 638
639 639 @command(
640 640 b'debugdag',
641 641 [
642 642 (b't', b'tags', None, _(b'use tags as labels')),
643 643 (b'b', b'branches', None, _(b'annotate with branch names')),
644 644 (b'', b'dots', None, _(b'use dots for runs')),
645 645 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 646 ],
647 647 _(b'[OPTION]... [FILE [REV]...]'),
648 648 optionalrepo=True,
649 649 )
650 650 def debugdag(ui, repo, file_=None, *revs, **opts):
651 651 """format the changelog or an index DAG as a concise textual description
652 652
653 653 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 654 revision numbers, they get labeled in the output as rN.
655 655
656 656 Otherwise, the changelog DAG of the current repo is emitted.
657 657 """
658 658 spaces = opts.get('spaces')
659 659 dots = opts.get('dots')
660 660 if file_:
661 661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 662 revs = {int(r) for r in revs}
663 663
664 664 def events():
665 665 for r in rlog:
666 666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 667 if r in revs:
668 668 yield b'l', (r, b"r%i" % r)
669 669
670 670 elif repo:
671 671 cl = repo.changelog
672 672 tags = opts.get('tags')
673 673 branches = opts.get('branches')
674 674 if tags:
675 675 labels = {}
676 676 for l, n in repo.tags().items():
677 677 labels.setdefault(cl.rev(n), []).append(l)
678 678
679 679 def events():
680 680 b = b"default"
681 681 for r in cl:
682 682 if branches:
683 683 newb = cl.read(cl.node(r))[5][b'branch']
684 684 if newb != b:
685 685 yield b'a', newb
686 686 b = newb
687 687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 688 if tags:
689 689 ls = labels.get(r)
690 690 if ls:
691 691 for l in ls:
692 692 yield b'l', (r, l)
693 693
694 694 else:
695 695 raise error.Abort(_(b'need repo for changelog dag'))
696 696
697 697 for line in dagparser.dagtextlines(
698 698 events(),
699 699 addspaces=spaces,
700 700 wraplabels=True,
701 701 wrapannotations=True,
702 702 wrapnonlinear=dots,
703 703 usedots=dots,
704 704 maxlinewidth=70,
705 705 ):
706 706 ui.write(line)
707 707 ui.write(b"\n")
708 708
709 709
710 710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 711 def debugdata(ui, repo, file_, rev=None, **opts):
712 712 """dump the contents of a data file revision"""
713 713 opts = pycompat.byteskwargs(opts)
714 714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 715 if rev is not None:
716 716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 717 file_, rev = None, file_
718 718 elif rev is None:
719 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 721 try:
722 722 ui.write(r.rawdata(r.lookup(rev)))
723 723 except KeyError:
724 724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 725
726 726
727 727 @command(
728 728 b'debugdate',
729 729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 730 _(b'[-e] DATE [RANGE]'),
731 731 norepo=True,
732 732 optionalrepo=True,
733 733 )
734 734 def debugdate(ui, date, range=None, **opts):
735 735 """parse and display a date"""
736 736 if opts["extended"]:
737 737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 738 else:
739 739 d = dateutil.parsedate(date)
740 740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 742 if range:
743 743 m = dateutil.matchdate(range)
744 744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 745
746 746
747 747 @command(
748 748 b'debugdeltachain',
749 749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 750 _(b'-c|-m|FILE'),
751 751 optionalrepo=True,
752 752 )
753 753 def debugdeltachain(ui, repo, file_=None, **opts):
754 754 """dump information about delta chains in a revlog
755 755
756 756 Output can be templatized. Available template keywords are:
757 757
758 758 :``rev``: revision number
759 759 :``p1``: parent 1 revision number (for reference)
760 760 :``p2``: parent 2 revision number (for reference)
761 761 :``chainid``: delta chain identifier (numbered by unique base)
762 762 :``chainlen``: delta chain length to this revision
763 763 :``prevrev``: previous revision in delta chain
764 764 :``deltatype``: role of delta / how it was computed
765 765 - base: a full snapshot
766 766 - snap: an intermediate snapshot
767 767 - p1: a delta against the first parent
768 768 - p2: a delta against the second parent
769 769 - skip1: a delta against the same base as p1
770 770 (when p1 has empty delta
771 771 - skip2: a delta against the same base as p2
772 772 (when p2 has empty delta
773 773 - prev: a delta against the previous revision
774 774 - other: a delta against an arbitrary revision
775 775 :``compsize``: compressed size of revision
776 776 :``uncompsize``: uncompressed size of revision
777 777 :``chainsize``: total size of compressed revisions in chain
778 778 :``chainratio``: total chain size divided by uncompressed revision size
779 779 (new delta chains typically start at ratio 2.00)
780 780 :``lindist``: linear distance from base revision in delta chain to end
781 781 of this revision
782 782 :``extradist``: total size of revisions not part of this delta chain from
783 783 base of delta chain to end of this revision; a measurement
784 784 of how much extra data we need to read/seek across to read
785 785 the delta chain for this revision
786 786 :``extraratio``: extradist divided by chainsize; another representation of
787 787 how much unrelated data is needed to load this delta chain
788 788
789 789 If the repository is configured to use the sparse read, additional keywords
790 790 are available:
791 791
792 792 :``readsize``: total size of data read from the disk for a revision
793 793 (sum of the sizes of all the blocks)
794 794 :``largestblock``: size of the largest block of data read from the disk
795 795 :``readdensity``: density of useful bytes in the data read from the disk
796 796 :``srchunks``: in how many data hunks the whole revision would be read
797 797
798 798 The sparse read can be enabled with experimental.sparse-read = True
799 799 """
800 800 opts = pycompat.byteskwargs(opts)
801 801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
802 802 index = r.index
803 803 start = r.start
804 804 length = r.length
805 805 generaldelta = r._generaldelta
806 806 withsparseread = getattr(r, '_withsparseread', False)
807 807
808 808 # security to avoid crash on corrupted revlogs
809 809 total_revs = len(index)
810 810
811 811 def revinfo(rev):
812 812 e = index[rev]
813 813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
814 814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
815 815 chainsize = 0
816 816
817 817 base = e[revlog_constants.ENTRY_DELTA_BASE]
818 818 p1 = e[revlog_constants.ENTRY_PARENT_1]
819 819 p2 = e[revlog_constants.ENTRY_PARENT_2]
820 820
821 821 # If the parents of a revision has an empty delta, we never try to delta
822 822 # against that parent, but directly against the delta base of that
823 823 # parent (recursively). It avoids adding a useless entry in the chain.
824 824 #
825 825 # However we need to detect that as a special case for delta-type, that
826 826 # is not simply "other".
827 827 p1_base = p1
828 828 if p1 != nullrev and p1 < total_revs:
829 829 e1 = index[p1]
830 830 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
831 831 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
832 832 if (
833 833 new_base == p1_base
834 834 or new_base == nullrev
835 835 or new_base >= total_revs
836 836 ):
837 837 break
838 838 p1_base = new_base
839 839 e1 = index[p1_base]
840 840 p2_base = p2
841 841 if p2 != nullrev and p2 < total_revs:
842 842 e2 = index[p2]
843 843 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
844 844 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
845 845 if (
846 846 new_base == p2_base
847 847 or new_base == nullrev
848 848 or new_base >= total_revs
849 849 ):
850 850 break
851 851 p2_base = new_base
852 852 e2 = index[p2_base]
853 853
854 854 if generaldelta:
855 855 if base == p1:
856 856 deltatype = b'p1'
857 857 elif base == p2:
858 858 deltatype = b'p2'
859 859 elif base == rev:
860 860 deltatype = b'base'
861 861 elif base == p1_base:
862 862 deltatype = b'skip1'
863 863 elif base == p2_base:
864 864 deltatype = b'skip2'
865 865 elif r.issnapshot(rev):
866 866 deltatype = b'snap'
867 867 elif base == rev - 1:
868 868 deltatype = b'prev'
869 869 else:
870 870 deltatype = b'other'
871 871 else:
872 872 if base == rev:
873 873 deltatype = b'base'
874 874 else:
875 875 deltatype = b'prev'
876 876
877 877 chain = r._deltachain(rev)[0]
878 878 for iterrev in chain:
879 879 e = index[iterrev]
880 880 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
881 881
882 882 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
883 883
884 884 fm = ui.formatter(b'debugdeltachain', opts)
885 885
886 886 fm.plain(
887 887 b' rev p1 p2 chain# chainlen prev delta '
888 888 b'size rawsize chainsize ratio lindist extradist '
889 889 b'extraratio'
890 890 )
891 891 if withsparseread:
892 892 fm.plain(b' readsize largestblk rddensity srchunks')
893 893 fm.plain(b'\n')
894 894
895 895 chainbases = {}
896 896 for rev in r:
897 897 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
898 898 chainbase = chain[0]
899 899 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
900 900 basestart = start(chainbase)
901 901 revstart = start(rev)
902 902 lineardist = revstart + comp - basestart
903 903 extradist = lineardist - chainsize
904 904 try:
905 905 prevrev = chain[-2]
906 906 except IndexError:
907 907 prevrev = -1
908 908
909 909 if uncomp != 0:
910 910 chainratio = float(chainsize) / float(uncomp)
911 911 else:
912 912 chainratio = chainsize
913 913
914 914 if chainsize != 0:
915 915 extraratio = float(extradist) / float(chainsize)
916 916 else:
917 917 extraratio = extradist
918 918
919 919 fm.startitem()
920 920 fm.write(
921 921 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
922 922 b'uncompsize chainsize chainratio lindist extradist '
923 923 b'extraratio',
924 924 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
925 925 rev,
926 926 p1,
927 927 p2,
928 928 chainid,
929 929 len(chain),
930 930 prevrev,
931 931 deltatype,
932 932 comp,
933 933 uncomp,
934 934 chainsize,
935 935 chainratio,
936 936 lineardist,
937 937 extradist,
938 938 extraratio,
939 939 rev=rev,
940 940 chainid=chainid,
941 941 chainlen=len(chain),
942 942 prevrev=prevrev,
943 943 deltatype=deltatype,
944 944 compsize=comp,
945 945 uncompsize=uncomp,
946 946 chainsize=chainsize,
947 947 chainratio=chainratio,
948 948 lindist=lineardist,
949 949 extradist=extradist,
950 950 extraratio=extraratio,
951 951 )
952 952 if withsparseread:
953 953 readsize = 0
954 954 largestblock = 0
955 955 srchunks = 0
956 956
957 957 for revschunk in deltautil.slicechunk(r, chain):
958 958 srchunks += 1
959 959 blkend = start(revschunk[-1]) + length(revschunk[-1])
960 960 blksize = blkend - start(revschunk[0])
961 961
962 962 readsize += blksize
963 963 if largestblock < blksize:
964 964 largestblock = blksize
965 965
966 966 if readsize:
967 967 readdensity = float(chainsize) / float(readsize)
968 968 else:
969 969 readdensity = 1
970 970
971 971 fm.write(
972 972 b'readsize largestblock readdensity srchunks',
973 973 b' %10d %10d %9.5f %8d',
974 974 readsize,
975 975 largestblock,
976 976 readdensity,
977 977 srchunks,
978 978 readsize=readsize,
979 979 largestblock=largestblock,
980 980 readdensity=readdensity,
981 981 srchunks=srchunks,
982 982 )
983 983
984 984 fm.plain(b'\n')
985 985
986 986 fm.end()
987 987
988 988
989 989 @command(
990 990 b'debug-delta-find',
991 991 cmdutil.debugrevlogopts
992 992 + cmdutil.formatteropts
993 993 + [
994 994 (
995 995 b'',
996 996 b'source',
997 997 b'full',
998 998 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
999 999 ),
1000 1000 ],
1001 1001 _(b'-c|-m|FILE REV'),
1002 1002 optionalrepo=True,
1003 1003 )
1004 1004 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1005 1005 """display the computation to get to a valid delta for storing REV
1006 1006
1007 1007 This command will replay the process used to find the "best" delta to store
1008 1008 a revision and display information about all the steps used to get to that
1009 1009 result.
1010 1010
1011 1011 By default, the process is fed with a the full-text for the revision. This
1012 1012 can be controlled with the --source flag.
1013 1013
1014 1014 The revision use the revision number of the target storage (not changelog
1015 1015 revision number).
1016 1016
1017 1017 note: the process is initiated from a full text of the revision to store.
1018 1018 """
1019 1019 opts = pycompat.byteskwargs(opts)
1020 1020 if arg_2 is None:
1021 1021 file_ = None
1022 1022 rev = arg_1
1023 1023 else:
1024 1024 file_ = arg_1
1025 1025 rev = arg_2
1026 1026
1027 1027 rev = int(rev)
1028 1028
1029 1029 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1030 1030 p1r, p2r = revlog.parentrevs(rev)
1031 1031
1032 1032 if source == b'full':
1033 1033 base_rev = nullrev
1034 1034 elif source == b'storage':
1035 1035 base_rev = revlog.deltaparent(rev)
1036 1036 elif source == b'p1':
1037 1037 base_rev = p1r
1038 1038 elif source == b'p2':
1039 1039 base_rev = p2r
1040 1040 elif source == b'prev':
1041 1041 base_rev = rev - 1
1042 1042 else:
1043 1043 raise error.InputError(b"invalid --source value: %s" % source)
1044 1044
1045 1045 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1046 1046
1047 1047
1048 1048 @command(
1049 1049 b'debugdirstate|debugstate',
1050 1050 [
1051 1051 (
1052 1052 b'',
1053 1053 b'nodates',
1054 1054 None,
1055 1055 _(b'do not display the saved mtime (DEPRECATED)'),
1056 1056 ),
1057 1057 (b'', b'dates', True, _(b'display the saved mtime')),
1058 1058 (b'', b'datesort', None, _(b'sort by saved mtime')),
1059 1059 (
1060 1060 b'',
1061 1061 b'docket',
1062 1062 False,
1063 1063 _(b'display the docket (metadata file) instead'),
1064 1064 ),
1065 1065 (
1066 1066 b'',
1067 1067 b'all',
1068 1068 False,
1069 1069 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1070 1070 ),
1071 1071 ],
1072 1072 _(b'[OPTION]...'),
1073 1073 )
1074 1074 def debugstate(ui, repo, **opts):
1075 1075 """show the contents of the current dirstate"""
1076 1076
1077 1077 if opts.get("docket"):
1078 1078 if not repo.dirstate._use_dirstate_v2:
1079 1079 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1080 1080
1081 1081 docket = repo.dirstate._map.docket
1082 1082 (
1083 1083 start_offset,
1084 1084 root_nodes,
1085 1085 nodes_with_entry,
1086 1086 nodes_with_copy,
1087 1087 unused_bytes,
1088 1088 _unused,
1089 1089 ignore_pattern,
1090 1090 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1091 1091
1092 1092 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1093 1093 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1094 1094 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1095 1095 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1096 1096 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1097 1097 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1098 1098 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1099 1099 ui.write(
1100 1100 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1101 1101 )
1102 1102 return
1103 1103
1104 1104 nodates = not opts['dates']
1105 1105 if opts.get('nodates') is not None:
1106 1106 nodates = True
1107 1107 datesort = opts.get('datesort')
1108 1108
1109 1109 if datesort:
1110 1110
1111 1111 def keyfunc(entry):
1112 1112 filename, _state, _mode, _size, mtime = entry
1113 1113 return (mtime, filename)
1114 1114
1115 1115 else:
1116 1116 keyfunc = None # sort by filename
1117 1117 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1118 1118 entries.sort(key=keyfunc)
1119 1119 for entry in entries:
1120 1120 filename, state, mode, size, mtime = entry
1121 1121 if mtime == -1:
1122 1122 timestr = b'unset '
1123 1123 elif nodates:
1124 1124 timestr = b'set '
1125 1125 else:
1126 1126 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1127 1127 timestr = encoding.strtolocal(timestr)
1128 1128 if mode & 0o20000:
1129 1129 mode = b'lnk'
1130 1130 else:
1131 1131 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1132 1132 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1133 1133 for f in repo.dirstate.copies():
1134 1134 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1135 1135
1136 1136
1137 1137 @command(
1138 1138 b'debugdirstateignorepatternshash',
1139 1139 [],
1140 1140 _(b''),
1141 1141 )
1142 1142 def debugdirstateignorepatternshash(ui, repo, **opts):
1143 1143 """show the hash of ignore patterns stored in dirstate if v2,
1144 1144 or nothing for dirstate-v2
1145 1145 """
1146 1146 if repo.dirstate._use_dirstate_v2:
1147 1147 docket = repo.dirstate._map.docket
1148 1148 hash_len = 20 # 160 bits for SHA-1
1149 1149 hash_bytes = docket.tree_metadata[-hash_len:]
1150 1150 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1151 1151
1152 1152
1153 1153 @command(
1154 1154 b'debugdiscovery',
1155 1155 [
1156 1156 (b'', b'old', None, _(b'use old-style discovery')),
1157 1157 (
1158 1158 b'',
1159 1159 b'nonheads',
1160 1160 None,
1161 1161 _(b'use old-style discovery with non-heads included'),
1162 1162 ),
1163 1163 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1164 1164 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1165 1165 (
1166 1166 b'',
1167 1167 b'local-as-revs',
1168 1168 b"",
1169 1169 b'treat local has having these revisions only',
1170 1170 ),
1171 1171 (
1172 1172 b'',
1173 1173 b'remote-as-revs',
1174 1174 b"",
1175 1175 b'use local as remote, with only these revisions',
1176 1176 ),
1177 1177 ]
1178 1178 + cmdutil.remoteopts
1179 1179 + cmdutil.formatteropts,
1180 1180 _(b'[--rev REV] [OTHER]'),
1181 1181 )
1182 1182 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1183 1183 """runs the changeset discovery protocol in isolation
1184 1184
1185 1185 The local peer can be "replaced" by a subset of the local repository by
1186 1186 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1187 1187 can be "replaced" by a subset of the local repository using the
1188 1188 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1189 1189 discovery situations.
1190 1190
1191 1191 The following developer oriented config are relevant for people playing with this command:
1192 1192
1193 1193 * devel.discovery.exchange-heads=True
1194 1194
1195 1195 If False, the discovery will not start with
1196 1196 remote head fetching and local head querying.
1197 1197
1198 1198 * devel.discovery.grow-sample=True
1199 1199
1200 1200 If False, the sample size used in set discovery will not be increased
1201 1201 through the process
1202 1202
1203 1203 * devel.discovery.grow-sample.dynamic=True
1204 1204
1205 1205 When discovery.grow-sample.dynamic is True, the default, the sample size is
1206 1206 adapted to the shape of the undecided set (it is set to the max of:
1207 1207 <target-size>, len(roots(undecided)), len(heads(undecided)
1208 1208
1209 1209 * devel.discovery.grow-sample.rate=1.05
1210 1210
1211 1211 the rate at which the sample grow
1212 1212
1213 1213 * devel.discovery.randomize=True
1214 1214
1215 1215 If andom sampling during discovery are deterministic. It is meant for
1216 1216 integration tests.
1217 1217
1218 1218 * devel.discovery.sample-size=200
1219 1219
1220 1220 Control the initial size of the discovery sample
1221 1221
1222 1222 * devel.discovery.sample-size.initial=100
1223 1223
1224 1224 Control the initial size of the discovery for initial change
1225 1225 """
1226 1226 opts = pycompat.byteskwargs(opts)
1227 1227 unfi = repo.unfiltered()
1228 1228
1229 1229 # setup potential extra filtering
1230 1230 local_revs = opts[b"local_as_revs"]
1231 1231 remote_revs = opts[b"remote_as_revs"]
1232 1232
1233 1233 # make sure tests are repeatable
1234 1234 random.seed(int(opts[b'seed']))
1235 1235
1236 1236 if not remote_revs:
1237 1237 path = urlutil.get_unique_pull_path_obj(
1238 1238 b'debugdiscovery', ui, remoteurl
1239 1239 )
1240 1240 branches = (path.branch, [])
1241 1241 remote = hg.peer(repo, opts, path)
1242 1242 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1243 1243 else:
1244 1244 branches = (None, [])
1245 1245 remote_filtered_revs = logcmdutil.revrange(
1246 1246 unfi, [b"not (::(%s))" % remote_revs]
1247 1247 )
1248 1248 remote_filtered_revs = frozenset(remote_filtered_revs)
1249 1249
1250 1250 def remote_func(x):
1251 1251 return remote_filtered_revs
1252 1252
1253 1253 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1254 1254
1255 1255 remote = repo.peer()
1256 1256 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1257 1257
1258 1258 if local_revs:
1259 1259 local_filtered_revs = logcmdutil.revrange(
1260 1260 unfi, [b"not (::(%s))" % local_revs]
1261 1261 )
1262 1262 local_filtered_revs = frozenset(local_filtered_revs)
1263 1263
1264 1264 def local_func(x):
1265 1265 return local_filtered_revs
1266 1266
1267 1267 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1268 1268 repo = repo.filtered(b'debug-discovery-local-filter')
1269 1269
1270 1270 data = {}
1271 1271 if opts.get(b'old'):
1272 1272
1273 1273 def doit(pushedrevs, remoteheads, remote=remote):
1274 1274 if not util.safehasattr(remote, b'branches'):
1275 1275 # enable in-client legacy support
1276 1276 remote = localrepo.locallegacypeer(remote.local())
1277 1277 if remote_revs:
1278 1278 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1279 1279 remote._repo = r
1280 1280 common, _in, hds = treediscovery.findcommonincoming(
1281 1281 repo, remote, force=True, audit=data
1282 1282 )
1283 1283 common = set(common)
1284 1284 if not opts.get(b'nonheads'):
1285 1285 ui.writenoi18n(
1286 1286 b"unpruned common: %s\n"
1287 1287 % b" ".join(sorted(short(n) for n in common))
1288 1288 )
1289 1289
1290 1290 clnode = repo.changelog.node
1291 1291 common = repo.revs(b'heads(::%ln)', common)
1292 1292 common = {clnode(r) for r in common}
1293 1293 return common, hds
1294 1294
1295 1295 else:
1296 1296
1297 1297 def doit(pushedrevs, remoteheads, remote=remote):
1298 1298 nodes = None
1299 1299 if pushedrevs:
1300 1300 revs = logcmdutil.revrange(repo, pushedrevs)
1301 1301 nodes = [repo[r].node() for r in revs]
1302 1302 common, any, hds = setdiscovery.findcommonheads(
1303 1303 ui,
1304 1304 repo,
1305 1305 remote,
1306 1306 ancestorsof=nodes,
1307 1307 audit=data,
1308 1308 abortwhenunrelated=False,
1309 1309 )
1310 1310 return common, hds
1311 1311
1312 1312 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1313 1313 localrevs = opts[b'rev']
1314 1314
1315 1315 fm = ui.formatter(b'debugdiscovery', opts)
1316 1316 if fm.strict_format:
1317 1317
1318 1318 @contextlib.contextmanager
1319 1319 def may_capture_output():
1320 1320 ui.pushbuffer()
1321 1321 yield
1322 1322 data[b'output'] = ui.popbuffer()
1323 1323
1324 1324 else:
1325 1325 may_capture_output = util.nullcontextmanager
1326 1326 with may_capture_output():
1327 1327 with util.timedcm('debug-discovery') as t:
1328 1328 common, hds = doit(localrevs, remoterevs)
1329 1329
1330 1330 # compute all statistics
1331 1331 if len(common) == 1 and repo.nullid in common:
1332 1332 common = set()
1333 1333 heads_common = set(common)
1334 1334 heads_remote = set(hds)
1335 1335 heads_local = set(repo.heads())
1336 1336 # note: they cannot be a local or remote head that is in common and not
1337 1337 # itself a head of common.
1338 1338 heads_common_local = heads_common & heads_local
1339 1339 heads_common_remote = heads_common & heads_remote
1340 1340 heads_common_both = heads_common & heads_remote & heads_local
1341 1341
1342 1342 all = repo.revs(b'all()')
1343 1343 common = repo.revs(b'::%ln', common)
1344 1344 roots_common = repo.revs(b'roots(::%ld)', common)
1345 1345 missing = repo.revs(b'not ::%ld', common)
1346 1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1347 1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1348 1348 assert len(common) + len(missing) == len(all)
1349 1349
1350 1350 initial_undecided = repo.revs(
1351 1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1352 1352 )
1353 1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1354 1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1355 1355 common_initial_undecided = initial_undecided & common
1356 1356 missing_initial_undecided = initial_undecided & missing
1357 1357
1358 1358 data[b'elapsed'] = t.elapsed
1359 1359 data[b'nb-common-heads'] = len(heads_common)
1360 1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1361 1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1362 1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1363 1363 data[b'nb-common-roots'] = len(roots_common)
1364 1364 data[b'nb-head-local'] = len(heads_local)
1365 1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1366 1366 data[b'nb-head-remote'] = len(heads_remote)
1367 1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1368 1368 heads_common_remote
1369 1369 )
1370 1370 data[b'nb-revs'] = len(all)
1371 1371 data[b'nb-revs-common'] = len(common)
1372 1372 data[b'nb-revs-missing'] = len(missing)
1373 1373 data[b'nb-missing-heads'] = len(heads_missing)
1374 1374 data[b'nb-missing-roots'] = len(roots_missing)
1375 1375 data[b'nb-ini_und'] = len(initial_undecided)
1376 1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1377 1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1378 1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1379 1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1380 1380
1381 1381 fm.startitem()
1382 1382 fm.data(**pycompat.strkwargs(data))
1383 1383 # display discovery summary
1384 1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1385 1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1386 1386 if b'total-round-trips-heads' in data:
1387 1387 fm.plain(
1388 1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1389 1389 )
1390 1390 if b'total-round-trips-branches' in data:
1391 1391 fm.plain(
1392 1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1393 1393 % data
1394 1394 )
1395 1395 if b'total-round-trips-between' in data:
1396 1396 fm.plain(
1397 1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1398 1398 )
1399 1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1400 1400 if b'total-queries-branches' in data:
1401 1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1402 1402 if b'total-queries-between' in data:
1403 1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1404 1404 fm.plain(b"heads summary:\n")
1405 1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1406 1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1407 1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1408 1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1409 1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1410 1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1411 1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1412 1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1413 1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1414 1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1415 1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1416 1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1417 1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1418 1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1419 1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1420 1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1421 1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1422 1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1423 1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1424 1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1425 1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1426 1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1427 1427
1428 1428 if ui.verbose:
1429 1429 fm.plain(
1430 1430 b"common heads: %s\n"
1431 1431 % b" ".join(sorted(short(n) for n in heads_common))
1432 1432 )
1433 1433 fm.end()
1434 1434
1435 1435
1436 1436 _chunksize = 4 << 10
1437 1437
1438 1438
1439 1439 @command(
1440 1440 b'debugdownload',
1441 1441 [
1442 1442 (b'o', b'output', b'', _(b'path')),
1443 1443 ],
1444 1444 optionalrepo=True,
1445 1445 )
1446 1446 def debugdownload(ui, repo, url, output=None, **opts):
1447 1447 """download a resource using Mercurial logic and config"""
1448 1448 fh = urlmod.open(ui, url, output)
1449 1449
1450 1450 dest = ui
1451 1451 if output:
1452 1452 dest = open(output, b"wb", _chunksize)
1453 1453 try:
1454 1454 data = fh.read(_chunksize)
1455 1455 while data:
1456 1456 dest.write(data)
1457 1457 data = fh.read(_chunksize)
1458 1458 finally:
1459 1459 if output:
1460 1460 dest.close()
1461 1461
1462 1462
1463 1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1464 1464 def debugextensions(ui, repo, **opts):
1465 1465 '''show information about active extensions'''
1466 1466 opts = pycompat.byteskwargs(opts)
1467 1467 exts = extensions.extensions(ui)
1468 1468 hgver = util.version()
1469 1469 fm = ui.formatter(b'debugextensions', opts)
1470 1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1471 1471 isinternal = extensions.ismoduleinternal(extmod)
1472 1472 extsource = None
1473 1473
1474 1474 if util.safehasattr(extmod, '__file__'):
1475 1475 extsource = pycompat.fsencode(extmod.__file__)
1476 1476 elif getattr(sys, 'oxidized', False):
1477 1477 extsource = pycompat.sysexecutable
1478 1478 if isinternal:
1479 1479 exttestedwith = [] # never expose magic string to users
1480 1480 else:
1481 1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1482 1482 extbuglink = getattr(extmod, 'buglink', None)
1483 1483
1484 1484 fm.startitem()
1485 1485
1486 1486 if ui.quiet or ui.verbose:
1487 1487 fm.write(b'name', b'%s\n', extname)
1488 1488 else:
1489 1489 fm.write(b'name', b'%s', extname)
1490 1490 if isinternal or hgver in exttestedwith:
1491 1491 fm.plain(b'\n')
1492 1492 elif not exttestedwith:
1493 1493 fm.plain(_(b' (untested!)\n'))
1494 1494 else:
1495 1495 lasttestedversion = exttestedwith[-1]
1496 1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1497 1497
1498 1498 fm.condwrite(
1499 1499 ui.verbose and extsource,
1500 1500 b'source',
1501 1501 _(b' location: %s\n'),
1502 1502 extsource or b"",
1503 1503 )
1504 1504
1505 1505 if ui.verbose:
1506 1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1507 1507 fm.data(bundled=isinternal)
1508 1508
1509 1509 fm.condwrite(
1510 1510 ui.verbose and exttestedwith,
1511 1511 b'testedwith',
1512 1512 _(b' tested with: %s\n'),
1513 1513 fm.formatlist(exttestedwith, name=b'ver'),
1514 1514 )
1515 1515
1516 1516 fm.condwrite(
1517 1517 ui.verbose and extbuglink,
1518 1518 b'buglink',
1519 1519 _(b' bug reporting: %s\n'),
1520 1520 extbuglink or b"",
1521 1521 )
1522 1522
1523 1523 fm.end()
1524 1524
1525 1525
1526 1526 @command(
1527 1527 b'debugfileset',
1528 1528 [
1529 1529 (
1530 1530 b'r',
1531 1531 b'rev',
1532 1532 b'',
1533 1533 _(b'apply the filespec on this revision'),
1534 1534 _(b'REV'),
1535 1535 ),
1536 1536 (
1537 1537 b'',
1538 1538 b'all-files',
1539 1539 False,
1540 1540 _(b'test files from all revisions and working directory'),
1541 1541 ),
1542 1542 (
1543 1543 b's',
1544 1544 b'show-matcher',
1545 1545 None,
1546 1546 _(b'print internal representation of matcher'),
1547 1547 ),
1548 1548 (
1549 1549 b'p',
1550 1550 b'show-stage',
1551 1551 [],
1552 1552 _(b'print parsed tree at the given stage'),
1553 1553 _(b'NAME'),
1554 1554 ),
1555 1555 ],
1556 1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1557 1557 )
1558 1558 def debugfileset(ui, repo, expr, **opts):
1559 1559 '''parse and apply a fileset specification'''
1560 1560 from . import fileset
1561 1561
1562 1562 fileset.symbols # force import of fileset so we have predicates to optimize
1563 1563 opts = pycompat.byteskwargs(opts)
1564 1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1565 1565
1566 1566 stages = [
1567 1567 (b'parsed', pycompat.identity),
1568 1568 (b'analyzed', filesetlang.analyze),
1569 1569 (b'optimized', filesetlang.optimize),
1570 1570 ]
1571 1571 stagenames = {n for n, f in stages}
1572 1572
1573 1573 showalways = set()
1574 1574 if ui.verbose and not opts[b'show_stage']:
1575 1575 # show parsed tree by --verbose (deprecated)
1576 1576 showalways.add(b'parsed')
1577 1577 if opts[b'show_stage'] == [b'all']:
1578 1578 showalways.update(stagenames)
1579 1579 else:
1580 1580 for n in opts[b'show_stage']:
1581 1581 if n not in stagenames:
1582 1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1583 1583 showalways.update(opts[b'show_stage'])
1584 1584
1585 1585 tree = filesetlang.parse(expr)
1586 1586 for n, f in stages:
1587 1587 tree = f(tree)
1588 1588 if n in showalways:
1589 1589 if opts[b'show_stage'] or n != b'parsed':
1590 1590 ui.write(b"* %s:\n" % n)
1591 1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1592 1592
1593 1593 files = set()
1594 1594 if opts[b'all_files']:
1595 1595 for r in repo:
1596 1596 c = repo[r]
1597 1597 files.update(c.files())
1598 1598 files.update(c.substate)
1599 1599 if opts[b'all_files'] or ctx.rev() is None:
1600 1600 wctx = repo[None]
1601 1601 files.update(
1602 1602 repo.dirstate.walk(
1603 1603 scmutil.matchall(repo),
1604 1604 subrepos=list(wctx.substate),
1605 1605 unknown=True,
1606 1606 ignored=True,
1607 1607 )
1608 1608 )
1609 1609 files.update(wctx.substate)
1610 1610 else:
1611 1611 files.update(ctx.files())
1612 1612 files.update(ctx.substate)
1613 1613
1614 1614 m = ctx.matchfileset(repo.getcwd(), expr)
1615 1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1616 1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1617 1617 for f in sorted(files):
1618 1618 if not m(f):
1619 1619 continue
1620 1620 ui.write(b"%s\n" % f)
1621 1621
1622 1622
1623 1623 @command(
1624 1624 b"debug-repair-issue6528",
1625 1625 [
1626 1626 (
1627 1627 b'',
1628 1628 b'to-report',
1629 1629 b'',
1630 1630 _(b'build a report of affected revisions to this file'),
1631 1631 _(b'FILE'),
1632 1632 ),
1633 1633 (
1634 1634 b'',
1635 1635 b'from-report',
1636 1636 b'',
1637 1637 _(b'repair revisions listed in this report file'),
1638 1638 _(b'FILE'),
1639 1639 ),
1640 1640 (
1641 1641 b'',
1642 1642 b'paranoid',
1643 1643 False,
1644 1644 _(b'check that both detection methods do the same thing'),
1645 1645 ),
1646 1646 ]
1647 1647 + cmdutil.dryrunopts,
1648 1648 )
1649 1649 def debug_repair_issue6528(ui, repo, **opts):
1650 1650 """find affected revisions and repair them. See issue6528 for more details.
1651 1651
1652 1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1653 1653 computation of affected revisions for a given repository across clones.
1654 1654 The report format is line-based (with empty lines ignored):
1655 1655
1656 1656 ```
1657 1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1658 1658 ```
1659 1659
1660 1660 There can be multiple broken revisions per filelog, they are separated by
1661 1661 a comma with no spaces. The only space is between the revision(s) and the
1662 1662 filename.
1663 1663
1664 1664 Note that this does *not* mean that this repairs future affected revisions,
1665 1665 that needs a separate fix at the exchange level that was introduced in
1666 1666 Mercurial 5.9.1.
1667 1667
1668 1668 There is a `--paranoid` flag to test that the fast implementation is correct
1669 1669 by checking it against the slow implementation. Since this matter is quite
1670 1670 urgent and testing every edge-case is probably quite costly, we use this
1671 1671 method to test on large repositories as a fuzzing method of sorts.
1672 1672 """
1673 1673 cmdutil.check_incompatible_arguments(
1674 1674 opts, 'to_report', ['from_report', 'dry_run']
1675 1675 )
1676 1676 dry_run = opts.get('dry_run')
1677 1677 to_report = opts.get('to_report')
1678 1678 from_report = opts.get('from_report')
1679 1679 paranoid = opts.get('paranoid')
1680 1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1681 1681 # narrow down the search for users that know what they're looking for?
1682 1682
1683 1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1684 1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1685 1685 raise error.Abort(_(msg))
1686 1686
1687 1687 rewrite.repair_issue6528(
1688 1688 ui,
1689 1689 repo,
1690 1690 dry_run=dry_run,
1691 1691 to_report=to_report,
1692 1692 from_report=from_report,
1693 1693 paranoid=paranoid,
1694 1694 )
1695 1695
1696 1696
1697 1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1698 1698 def debugformat(ui, repo, **opts):
1699 1699 """display format information about the current repository
1700 1700
1701 1701 Use --verbose to get extra information about current config value and
1702 1702 Mercurial default."""
1703 1703 opts = pycompat.byteskwargs(opts)
1704 1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1705 1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1706 1706
1707 1707 def makeformatname(name):
1708 1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1709 1709
1710 1710 fm = ui.formatter(b'debugformat', opts)
1711 1711 if fm.isplain():
1712 1712
1713 1713 def formatvalue(value):
1714 1714 if util.safehasattr(value, b'startswith'):
1715 1715 return value
1716 1716 if value:
1717 1717 return b'yes'
1718 1718 else:
1719 1719 return b'no'
1720 1720
1721 1721 else:
1722 1722 formatvalue = pycompat.identity
1723 1723
1724 1724 fm.plain(b'format-variant')
1725 1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1726 1726 fm.plain(b' repo')
1727 1727 if ui.verbose:
1728 1728 fm.plain(b' config default')
1729 1729 fm.plain(b'\n')
1730 1730 for fv in upgrade.allformatvariant:
1731 1731 fm.startitem()
1732 1732 repovalue = fv.fromrepo(repo)
1733 1733 configvalue = fv.fromconfig(repo)
1734 1734
1735 1735 if repovalue != configvalue:
1736 1736 namelabel = b'formatvariant.name.mismatchconfig'
1737 1737 repolabel = b'formatvariant.repo.mismatchconfig'
1738 1738 elif repovalue != fv.default:
1739 1739 namelabel = b'formatvariant.name.mismatchdefault'
1740 1740 repolabel = b'formatvariant.repo.mismatchdefault'
1741 1741 else:
1742 1742 namelabel = b'formatvariant.name.uptodate'
1743 1743 repolabel = b'formatvariant.repo.uptodate'
1744 1744
1745 1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1746 1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1747 1747 if fv.default != configvalue:
1748 1748 configlabel = b'formatvariant.config.special'
1749 1749 else:
1750 1750 configlabel = b'formatvariant.config.default'
1751 1751 fm.condwrite(
1752 1752 ui.verbose,
1753 1753 b'config',
1754 1754 b' %6s',
1755 1755 formatvalue(configvalue),
1756 1756 label=configlabel,
1757 1757 )
1758 1758 fm.condwrite(
1759 1759 ui.verbose,
1760 1760 b'default',
1761 1761 b' %7s',
1762 1762 formatvalue(fv.default),
1763 1763 label=b'formatvariant.default',
1764 1764 )
1765 1765 fm.plain(b'\n')
1766 1766 fm.end()
1767 1767
1768 1768
1769 1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1770 1770 def debugfsinfo(ui, path=b"."):
1771 1771 """show information detected about current filesystem"""
1772 1772 ui.writenoi18n(b'path: %s\n' % path)
1773 1773 ui.writenoi18n(
1774 1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1775 1775 )
1776 1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1777 1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1778 1778 ui.writenoi18n(
1779 1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1780 1780 )
1781 1781 ui.writenoi18n(
1782 1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1783 1783 )
1784 1784 casesensitive = b'(unknown)'
1785 1785 try:
1786 1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1787 1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1788 1788 except OSError:
1789 1789 pass
1790 1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1791 1791
1792 1792
1793 1793 @command(
1794 1794 b'debuggetbundle',
1795 1795 [
1796 1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1797 1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1798 1798 (
1799 1799 b't',
1800 1800 b'type',
1801 1801 b'bzip2',
1802 1802 _(b'bundle compression type to use'),
1803 1803 _(b'TYPE'),
1804 1804 ),
1805 1805 ],
1806 1806 _(b'REPO FILE [-H|-C ID]...'),
1807 1807 norepo=True,
1808 1808 )
1809 1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1810 1810 """retrieves a bundle from a repo
1811 1811
1812 1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1813 1813 given file.
1814 1814 """
1815 1815 opts = pycompat.byteskwargs(opts)
1816 1816 repo = hg.peer(ui, opts, repopath)
1817 1817 if not repo.capable(b'getbundle'):
1818 1818 raise error.Abort(b"getbundle() not supported by target repository")
1819 1819 args = {}
1820 1820 if common:
1821 1821 args['common'] = [bin(s) for s in common]
1822 1822 if head:
1823 1823 args['heads'] = [bin(s) for s in head]
1824 1824 # TODO: get desired bundlecaps from command line.
1825 1825 args['bundlecaps'] = None
1826 1826 bundle = repo.getbundle(b'debug', **args)
1827 1827
1828 1828 bundletype = opts.get(b'type', b'bzip2').lower()
1829 1829 btypes = {
1830 1830 b'none': b'HG10UN',
1831 1831 b'bzip2': b'HG10BZ',
1832 1832 b'gzip': b'HG10GZ',
1833 1833 b'bundle2': b'HG20',
1834 1834 }
1835 1835 bundletype = btypes.get(bundletype)
1836 1836 if bundletype not in bundle2.bundletypes:
1837 1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1838 1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1839 1839
1840 1840
1841 1841 @command(b'debugignore', [], b'[FILE]')
1842 1842 def debugignore(ui, repo, *files, **opts):
1843 1843 """display the combined ignore pattern and information about ignored files
1844 1844
1845 1845 With no argument display the combined ignore pattern.
1846 1846
1847 1847 Given space separated file names, shows if the given file is ignored and
1848 1848 if so, show the ignore rule (file and line number) that matched it.
1849 1849 """
1850 1850 ignore = repo.dirstate._ignore
1851 1851 if not files:
1852 1852 # Show all the patterns
1853 1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1854 1854 else:
1855 1855 m = scmutil.match(repo[None], pats=files)
1856 1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1857 1857 for f in m.files():
1858 1858 nf = util.normpath(f)
1859 1859 ignored = None
1860 1860 ignoredata = None
1861 1861 if nf != b'.':
1862 1862 if ignore(nf):
1863 1863 ignored = nf
1864 1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1865 1865 else:
1866 1866 for p in pathutil.finddirs(nf):
1867 1867 if ignore(p):
1868 1868 ignored = p
1869 1869 ignoredata = repo.dirstate._ignorefileandline(p)
1870 1870 break
1871 1871 if ignored:
1872 1872 if ignored == nf:
1873 1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1874 1874 else:
1875 1875 ui.write(
1876 1876 _(
1877 1877 b"%s is ignored because of "
1878 1878 b"containing directory %s\n"
1879 1879 )
1880 1880 % (uipathfn(f), ignored)
1881 1881 )
1882 1882 ignorefile, lineno, line = ignoredata
1883 1883 ui.write(
1884 1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1885 1885 % (ignorefile, lineno, line)
1886 1886 )
1887 1887 else:
1888 1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1889 1889
1890 1890
1891 1891 @command(
1892 1892 b'debug-revlog-index|debugindex',
1893 1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1894 1894 _(b'-c|-m|FILE'),
1895 1895 )
1896 1896 def debugindex(ui, repo, file_=None, **opts):
1897 1897 """dump index data for a revlog"""
1898 1898 opts = pycompat.byteskwargs(opts)
1899 1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1900 1900
1901 1901 fm = ui.formatter(b'debugindex', opts)
1902 1902
1903 1903 revlog = getattr(store, b'_revlog', store)
1904 1904
1905 1905 return revlog_debug.debug_index(
1906 1906 ui,
1907 1907 repo,
1908 1908 formatter=fm,
1909 1909 revlog=revlog,
1910 1910 full_node=ui.debugflag,
1911 1911 )
1912 1912
1913 1913
1914 1914 @command(
1915 1915 b'debugindexdot',
1916 1916 cmdutil.debugrevlogopts,
1917 1917 _(b'-c|-m|FILE'),
1918 1918 optionalrepo=True,
1919 1919 )
1920 1920 def debugindexdot(ui, repo, file_=None, **opts):
1921 1921 """dump an index DAG as a graphviz dot file"""
1922 1922 opts = pycompat.byteskwargs(opts)
1923 1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1924 1924 ui.writenoi18n(b"digraph G {\n")
1925 1925 for i in r:
1926 1926 node = r.node(i)
1927 1927 pp = r.parents(node)
1928 1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1929 1929 if pp[1] != repo.nullid:
1930 1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1931 1931 ui.write(b"}\n")
1932 1932
1933 1933
1934 1934 @command(b'debugindexstats', [])
1935 1935 def debugindexstats(ui, repo):
1936 1936 """show stats related to the changelog index"""
1937 1937 repo.changelog.shortest(repo.nullid, 1)
1938 1938 index = repo.changelog.index
1939 1939 if not util.safehasattr(index, b'stats'):
1940 1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1941 1941 for k, v in sorted(index.stats().items()):
1942 1942 ui.write(b'%s: %d\n' % (k, v))
1943 1943
1944 1944
1945 1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1946 1946 def debuginstall(ui, **opts):
1947 1947 """test Mercurial installation
1948 1948
1949 1949 Returns 0 on success.
1950 1950 """
1951 1951 opts = pycompat.byteskwargs(opts)
1952 1952
1953 1953 problems = 0
1954 1954
1955 1955 fm = ui.formatter(b'debuginstall', opts)
1956 1956 fm.startitem()
1957 1957
1958 1958 # encoding might be unknown or wrong. don't translate these messages.
1959 1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1960 1960 err = None
1961 1961 try:
1962 1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1963 1963 except LookupError as inst:
1964 1964 err = stringutil.forcebytestr(inst)
1965 1965 problems += 1
1966 1966 fm.condwrite(
1967 1967 err,
1968 1968 b'encodingerror',
1969 1969 b" %s\n (check that your locale is properly set)\n",
1970 1970 err,
1971 1971 )
1972 1972
1973 1973 # Python
1974 1974 pythonlib = None
1975 1975 if util.safehasattr(os, '__file__'):
1976 1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1977 1977 elif getattr(sys, 'oxidized', False):
1978 1978 pythonlib = pycompat.sysexecutable
1979 1979
1980 1980 fm.write(
1981 1981 b'pythonexe',
1982 1982 _(b"checking Python executable (%s)\n"),
1983 1983 pycompat.sysexecutable or _(b"unknown"),
1984 1984 )
1985 1985 fm.write(
1986 1986 b'pythonimplementation',
1987 1987 _(b"checking Python implementation (%s)\n"),
1988 1988 pycompat.sysbytes(platform.python_implementation()),
1989 1989 )
1990 1990 fm.write(
1991 1991 b'pythonver',
1992 1992 _(b"checking Python version (%s)\n"),
1993 1993 (b"%d.%d.%d" % sys.version_info[:3]),
1994 1994 )
1995 1995 fm.write(
1996 1996 b'pythonlib',
1997 1997 _(b"checking Python lib (%s)...\n"),
1998 1998 pythonlib or _(b"unknown"),
1999 1999 )
2000 2000
2001 2001 try:
2002 2002 from . import rustext # pytype: disable=import-error
2003 2003
2004 2004 rustext.__doc__ # trigger lazy import
2005 2005 except ImportError:
2006 2006 rustext = None
2007 2007
2008 2008 security = set(sslutil.supportedprotocols)
2009 2009 if sslutil.hassni:
2010 2010 security.add(b'sni')
2011 2011
2012 2012 fm.write(
2013 2013 b'pythonsecurity',
2014 2014 _(b"checking Python security support (%s)\n"),
2015 2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2016 2016 )
2017 2017
2018 2018 # These are warnings, not errors. So don't increment problem count. This
2019 2019 # may change in the future.
2020 2020 if b'tls1.2' not in security:
2021 2021 fm.plain(
2022 2022 _(
2023 2023 b' TLS 1.2 not supported by Python install; '
2024 2024 b'network connections lack modern security\n'
2025 2025 )
2026 2026 )
2027 2027 if b'sni' not in security:
2028 2028 fm.plain(
2029 2029 _(
2030 2030 b' SNI not supported by Python install; may have '
2031 2031 b'connectivity issues with some servers\n'
2032 2032 )
2033 2033 )
2034 2034
2035 2035 fm.plain(
2036 2036 _(
2037 2037 b"checking Rust extensions (%s)\n"
2038 2038 % (b'missing' if rustext is None else b'installed')
2039 2039 ),
2040 2040 )
2041 2041
2042 2042 # TODO print CA cert info
2043 2043
2044 2044 # hg version
2045 2045 hgver = util.version()
2046 2046 fm.write(
2047 2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2048 2048 )
2049 2049 fm.write(
2050 2050 b'hgverextra',
2051 2051 _(b"checking Mercurial custom build (%s)\n"),
2052 2052 b'+'.join(hgver.split(b'+')[1:]),
2053 2053 )
2054 2054
2055 2055 # compiled modules
2056 2056 hgmodules = None
2057 2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2058 2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2059 2059 elif getattr(sys, 'oxidized', False):
2060 2060 hgmodules = pycompat.sysexecutable
2061 2061
2062 2062 fm.write(
2063 2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2064 2064 )
2065 2065 fm.write(
2066 2066 b'hgmodules',
2067 2067 _(b"checking installed modules (%s)...\n"),
2068 2068 hgmodules or _(b"unknown"),
2069 2069 )
2070 2070
2071 2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2072 2072 rustext = rustandc # for now, that's the only case
2073 2073 cext = policy.policy in (b'c', b'allow') or rustandc
2074 2074 nopure = cext or rustext
2075 2075 if nopure:
2076 2076 err = None
2077 2077 try:
2078 2078 if cext:
2079 2079 from .cext import ( # pytype: disable=import-error
2080 2080 base85,
2081 2081 bdiff,
2082 2082 mpatch,
2083 2083 osutil,
2084 2084 )
2085 2085
2086 2086 # quiet pyflakes
2087 2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2088 2088 if rustext:
2089 2089 from .rustext import ( # pytype: disable=import-error
2090 2090 ancestor,
2091 2091 dirstate,
2092 2092 )
2093 2093
2094 2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2095 2095 except Exception as inst:
2096 2096 err = stringutil.forcebytestr(inst)
2097 2097 problems += 1
2098 2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2099 2099
2100 2100 compengines = util.compengines._engines.values()
2101 2101 fm.write(
2102 2102 b'compengines',
2103 2103 _(b'checking registered compression engines (%s)\n'),
2104 2104 fm.formatlist(
2105 2105 sorted(e.name() for e in compengines),
2106 2106 name=b'compengine',
2107 2107 fmt=b'%s',
2108 2108 sep=b', ',
2109 2109 ),
2110 2110 )
2111 2111 fm.write(
2112 2112 b'compenginesavail',
2113 2113 _(b'checking available compression engines (%s)\n'),
2114 2114 fm.formatlist(
2115 2115 sorted(e.name() for e in compengines if e.available()),
2116 2116 name=b'compengine',
2117 2117 fmt=b'%s',
2118 2118 sep=b', ',
2119 2119 ),
2120 2120 )
2121 2121 wirecompengines = compression.compengines.supportedwireengines(
2122 2122 compression.SERVERROLE
2123 2123 )
2124 2124 fm.write(
2125 2125 b'compenginesserver',
2126 2126 _(
2127 2127 b'checking available compression engines '
2128 2128 b'for wire protocol (%s)\n'
2129 2129 ),
2130 2130 fm.formatlist(
2131 2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2132 2132 name=b'compengine',
2133 2133 fmt=b'%s',
2134 2134 sep=b', ',
2135 2135 ),
2136 2136 )
2137 2137 re2 = b'missing'
2138 2138 if util._re2:
2139 2139 re2 = b'available'
2140 2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2141 2141 fm.data(re2=bool(util._re2))
2142 2142
2143 2143 # templates
2144 2144 p = templater.templatedir()
2145 2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2146 2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2147 2147 if p:
2148 2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2149 2149 if m:
2150 2150 # template found, check if it is working
2151 2151 err = None
2152 2152 try:
2153 2153 templater.templater.frommapfile(m)
2154 2154 except Exception as inst:
2155 2155 err = stringutil.forcebytestr(inst)
2156 2156 p = None
2157 2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2158 2158 else:
2159 2159 p = None
2160 2160 fm.condwrite(
2161 2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2162 2162 )
2163 2163 fm.condwrite(
2164 2164 not m,
2165 2165 b'defaulttemplatenotfound',
2166 2166 _(b" template '%s' not found\n"),
2167 2167 b"default",
2168 2168 )
2169 2169 if not p:
2170 2170 problems += 1
2171 2171 fm.condwrite(
2172 2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2173 2173 )
2174 2174
2175 2175 # editor
2176 2176 editor = ui.geteditor()
2177 2177 editor = util.expandpath(editor)
2178 2178 editorbin = procutil.shellsplit(editor)[0]
2179 2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2180 2180 cmdpath = procutil.findexe(editorbin)
2181 2181 fm.condwrite(
2182 2182 not cmdpath and editor == b'vi',
2183 2183 b'vinotfound',
2184 2184 _(
2185 2185 b" No commit editor set and can't find %s in PATH\n"
2186 2186 b" (specify a commit editor in your configuration"
2187 2187 b" file)\n"
2188 2188 ),
2189 2189 not cmdpath and editor == b'vi' and editorbin,
2190 2190 )
2191 2191 fm.condwrite(
2192 2192 not cmdpath and editor != b'vi',
2193 2193 b'editornotfound',
2194 2194 _(
2195 2195 b" Can't find editor '%s' in PATH\n"
2196 2196 b" (specify a commit editor in your configuration"
2197 2197 b" file)\n"
2198 2198 ),
2199 2199 not cmdpath and editorbin,
2200 2200 )
2201 2201 if not cmdpath and editor != b'vi':
2202 2202 problems += 1
2203 2203
2204 2204 # check username
2205 2205 username = None
2206 2206 err = None
2207 2207 try:
2208 2208 username = ui.username()
2209 2209 except error.Abort as e:
2210 2210 err = e.message
2211 2211 problems += 1
2212 2212
2213 2213 fm.condwrite(
2214 2214 username, b'username', _(b"checking username (%s)\n"), username
2215 2215 )
2216 2216 fm.condwrite(
2217 2217 err,
2218 2218 b'usernameerror',
2219 2219 _(
2220 2220 b"checking username...\n %s\n"
2221 2221 b" (specify a username in your configuration file)\n"
2222 2222 ),
2223 2223 err,
2224 2224 )
2225 2225
2226 2226 for name, mod in extensions.extensions():
2227 2227 handler = getattr(mod, 'debuginstall', None)
2228 2228 if handler is not None:
2229 2229 problems += handler(ui, fm)
2230 2230
2231 2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2232 2232 if not problems:
2233 2233 fm.data(problems=problems)
2234 2234 fm.condwrite(
2235 2235 problems,
2236 2236 b'problems',
2237 2237 _(b"%d problems detected, please check your install!\n"),
2238 2238 problems,
2239 2239 )
2240 2240 fm.end()
2241 2241
2242 2242 return problems
2243 2243
2244 2244
2245 2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2246 2246 def debugknown(ui, repopath, *ids, **opts):
2247 2247 """test whether node ids are known to a repo
2248 2248
2249 2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2250 2250 and 1s indicating unknown/known.
2251 2251 """
2252 2252 opts = pycompat.byteskwargs(opts)
2253 2253 repo = hg.peer(ui, opts, repopath)
2254 2254 if not repo.capable(b'known'):
2255 2255 raise error.Abort(b"known() not supported by target repository")
2256 2256 flags = repo.known([bin(s) for s in ids])
2257 2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2258 2258
2259 2259
2260 2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2261 2261 def debuglabelcomplete(ui, repo, *args):
2262 2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2263 2263 debugnamecomplete(ui, repo, *args)
2264 2264
2265 2265
2266 2266 @command(
2267 2267 b'debuglocks',
2268 2268 [
2269 2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2270 2270 (
2271 2271 b'W',
2272 2272 b'force-free-wlock',
2273 2273 None,
2274 2274 _(b'free the working state lock (DANGEROUS)'),
2275 2275 ),
2276 2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2277 2277 (
2278 2278 b'S',
2279 2279 b'set-wlock',
2280 2280 None,
2281 2281 _(b'set the working state lock until stopped'),
2282 2282 ),
2283 2283 ],
2284 2284 _(b'[OPTION]...'),
2285 2285 )
2286 2286 def debuglocks(ui, repo, **opts):
2287 2287 """show or modify state of locks
2288 2288
2289 2289 By default, this command will show which locks are held. This
2290 2290 includes the user and process holding the lock, the amount of time
2291 2291 the lock has been held, and the machine name where the process is
2292 2292 running if it's not local.
2293 2293
2294 2294 Locks protect the integrity of Mercurial's data, so should be
2295 2295 treated with care. System crashes or other interruptions may cause
2296 2296 locks to not be properly released, though Mercurial will usually
2297 2297 detect and remove such stale locks automatically.
2298 2298
2299 2299 However, detecting stale locks may not always be possible (for
2300 2300 instance, on a shared filesystem). Removing locks may also be
2301 2301 blocked by filesystem permissions.
2302 2302
2303 2303 Setting a lock will prevent other commands from changing the data.
2304 2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2305 2305 The set locks are removed when the command exits.
2306 2306
2307 2307 Returns 0 if no locks are held.
2308 2308
2309 2309 """
2310 2310
2311 2311 if opts.get('force_free_lock'):
2312 2312 repo.svfs.tryunlink(b'lock')
2313 2313 if opts.get('force_free_wlock'):
2314 2314 repo.vfs.tryunlink(b'wlock')
2315 2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2316 2316 return 0
2317 2317
2318 2318 locks = []
2319 2319 try:
2320 2320 if opts.get('set_wlock'):
2321 2321 try:
2322 2322 locks.append(repo.wlock(False))
2323 2323 except error.LockHeld:
2324 2324 raise error.Abort(_(b'wlock is already held'))
2325 2325 if opts.get('set_lock'):
2326 2326 try:
2327 2327 locks.append(repo.lock(False))
2328 2328 except error.LockHeld:
2329 2329 raise error.Abort(_(b'lock is already held'))
2330 2330 if len(locks):
2331 2331 try:
2332 2332 if ui.interactive():
2333 2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2334 2334 ui.promptchoice(prompt)
2335 2335 else:
2336 2336 msg = b"%d locks held, waiting for signal\n"
2337 2337 msg %= len(locks)
2338 2338 ui.status(msg)
2339 2339 while True: # XXX wait for a signal
2340 2340 time.sleep(0.1)
2341 2341 except KeyboardInterrupt:
2342 2342 msg = b"signal-received releasing locks\n"
2343 2343 ui.status(msg)
2344 2344 return 0
2345 2345 finally:
2346 2346 release(*locks)
2347 2347
2348 2348 now = time.time()
2349 2349 held = 0
2350 2350
2351 2351 def report(vfs, name, method):
2352 2352 # this causes stale locks to get reaped for more accurate reporting
2353 2353 try:
2354 2354 l = method(False)
2355 2355 except error.LockHeld:
2356 2356 l = None
2357 2357
2358 2358 if l:
2359 2359 l.release()
2360 2360 else:
2361 2361 try:
2362 2362 st = vfs.lstat(name)
2363 2363 age = now - st[stat.ST_MTIME]
2364 2364 user = util.username(st.st_uid)
2365 2365 locker = vfs.readlock(name)
2366 2366 if b":" in locker:
2367 2367 host, pid = locker.split(b':')
2368 2368 if host == socket.gethostname():
2369 2369 locker = b'user %s, process %s' % (user or b'None', pid)
2370 2370 else:
2371 2371 locker = b'user %s, process %s, host %s' % (
2372 2372 user or b'None',
2373 2373 pid,
2374 2374 host,
2375 2375 )
2376 2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2377 2377 return 1
2378 2378 except FileNotFoundError:
2379 2379 pass
2380 2380
2381 2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2382 2382 return 0
2383 2383
2384 2384 held += report(repo.svfs, b"lock", repo.lock)
2385 2385 held += report(repo.vfs, b"wlock", repo.wlock)
2386 2386
2387 2387 return held
2388 2388
2389 2389
2390 2390 @command(
2391 2391 b'debugmanifestfulltextcache',
2392 2392 [
2393 2393 (b'', b'clear', False, _(b'clear the cache')),
2394 2394 (
2395 2395 b'a',
2396 2396 b'add',
2397 2397 [],
2398 2398 _(b'add the given manifest nodes to the cache'),
2399 2399 _(b'NODE'),
2400 2400 ),
2401 2401 ],
2402 2402 b'',
2403 2403 )
2404 2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2405 2405 """show, clear or amend the contents of the manifest fulltext cache"""
2406 2406
2407 2407 def getcache():
2408 2408 r = repo.manifestlog.getstorage(b'')
2409 2409 try:
2410 2410 return r._fulltextcache
2411 2411 except AttributeError:
2412 2412 msg = _(
2413 2413 b"Current revlog implementation doesn't appear to have a "
2414 2414 b"manifest fulltext cache\n"
2415 2415 )
2416 2416 raise error.Abort(msg)
2417 2417
2418 2418 if opts.get('clear'):
2419 2419 with repo.wlock():
2420 2420 cache = getcache()
2421 2421 cache.clear(clear_persisted_data=True)
2422 2422 return
2423 2423
2424 2424 if add:
2425 2425 with repo.wlock():
2426 2426 m = repo.manifestlog
2427 2427 store = m.getstorage(b'')
2428 2428 for n in add:
2429 2429 try:
2430 2430 manifest = m[store.lookup(n)]
2431 2431 except error.LookupError as e:
2432 2432 raise error.Abort(
2433 2433 bytes(e), hint=b"Check your manifest node id"
2434 2434 )
2435 2435 manifest.read() # stores revisision in cache too
2436 2436 return
2437 2437
2438 2438 cache = getcache()
2439 2439 if not len(cache):
2440 2440 ui.write(_(b'cache empty\n'))
2441 2441 else:
2442 2442 ui.write(
2443 2443 _(
2444 2444 b'cache contains %d manifest entries, in order of most to '
2445 2445 b'least recent:\n'
2446 2446 )
2447 2447 % (len(cache),)
2448 2448 )
2449 2449 totalsize = 0
2450 2450 for nodeid in cache:
2451 2451 # Use cache.get to not update the LRU order
2452 2452 data = cache.peek(nodeid)
2453 2453 size = len(data)
2454 2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2455 2455 ui.write(
2456 2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2457 2457 )
2458 2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2459 2459 ui.write(
2460 2460 _(b'total cache data size %s, on-disk %s\n')
2461 2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2462 2462 )
2463 2463
2464 2464
2465 2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2466 2466 def debugmergestate(ui, repo, *args, **opts):
2467 2467 """print merge state
2468 2468
2469 2469 Use --verbose to print out information about whether v1 or v2 merge state
2470 2470 was chosen."""
2471 2471
2472 2472 if ui.verbose:
2473 2473 ms = mergestatemod.mergestate(repo)
2474 2474
2475 2475 # sort so that reasonable information is on top
2476 2476 v1records = ms._readrecordsv1()
2477 2477 v2records = ms._readrecordsv2()
2478 2478
2479 2479 if not v1records and not v2records:
2480 2480 pass
2481 2481 elif not v2records:
2482 2482 ui.writenoi18n(b'no version 2 merge state\n')
2483 2483 elif ms._v1v2match(v1records, v2records):
2484 2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2485 2485 else:
2486 2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2487 2487
2488 2488 opts = pycompat.byteskwargs(opts)
2489 2489 if not opts[b'template']:
2490 2490 opts[b'template'] = (
2491 2491 b'{if(commits, "", "no merge state found\n")}'
2492 2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2493 2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2494 2494 b'{if(local_path, "'
2495 2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2496 2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2497 2497 b' other path: {other_path} (node {other_node})\n'
2498 2498 b'")}'
2499 2499 b'{if(rename_side, "'
2500 2500 b' rename side: {rename_side}\n'
2501 2501 b' renamed path: {renamed_path}\n'
2502 2502 b'")}'
2503 2503 b'{extras % " extra: {key} = {value}\n"}'
2504 2504 b'"}'
2505 2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2506 2506 )
2507 2507
2508 2508 ms = mergestatemod.mergestate.read(repo)
2509 2509
2510 2510 fm = ui.formatter(b'debugmergestate', opts)
2511 2511 fm.startitem()
2512 2512
2513 2513 fm_commits = fm.nested(b'commits')
2514 2514 if ms.active():
2515 2515 for name, node, label_index in (
2516 2516 (b'local', ms.local, 0),
2517 2517 (b'other', ms.other, 1),
2518 2518 ):
2519 2519 fm_commits.startitem()
2520 2520 fm_commits.data(name=name)
2521 2521 fm_commits.data(node=hex(node))
2522 2522 if ms._labels and len(ms._labels) > label_index:
2523 2523 fm_commits.data(label=ms._labels[label_index])
2524 2524 fm_commits.end()
2525 2525
2526 2526 fm_files = fm.nested(b'files')
2527 2527 if ms.active():
2528 2528 for f in ms:
2529 2529 fm_files.startitem()
2530 2530 fm_files.data(path=f)
2531 2531 state = ms._state[f]
2532 2532 fm_files.data(state=state[0])
2533 2533 if state[0] in (
2534 2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2535 2535 mergestatemod.MERGE_RECORD_RESOLVED,
2536 2536 ):
2537 2537 fm_files.data(local_key=state[1])
2538 2538 fm_files.data(local_path=state[2])
2539 2539 fm_files.data(ancestor_path=state[3])
2540 2540 fm_files.data(ancestor_node=state[4])
2541 2541 fm_files.data(other_path=state[5])
2542 2542 fm_files.data(other_node=state[6])
2543 2543 fm_files.data(local_flags=state[7])
2544 2544 elif state[0] in (
2545 2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2546 2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2547 2547 ):
2548 2548 fm_files.data(renamed_path=state[1])
2549 2549 fm_files.data(rename_side=state[2])
2550 2550 fm_extras = fm_files.nested(b'extras')
2551 2551 for k, v in sorted(ms.extras(f).items()):
2552 2552 fm_extras.startitem()
2553 2553 fm_extras.data(key=k)
2554 2554 fm_extras.data(value=v)
2555 2555 fm_extras.end()
2556 2556
2557 2557 fm_files.end()
2558 2558
2559 2559 fm_extras = fm.nested(b'extras')
2560 2560 for f, d in sorted(ms.allextras().items()):
2561 2561 if f in ms:
2562 2562 # If file is in mergestate, we have already processed it's extras
2563 2563 continue
2564 2564 for k, v in d.items():
2565 2565 fm_extras.startitem()
2566 2566 fm_extras.data(file=f)
2567 2567 fm_extras.data(key=k)
2568 2568 fm_extras.data(value=v)
2569 2569 fm_extras.end()
2570 2570
2571 2571 fm.end()
2572 2572
2573 2573
2574 2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2575 2575 def debugnamecomplete(ui, repo, *args):
2576 2576 '''complete "names" - tags, open branch names, bookmark names'''
2577 2577
2578 2578 names = set()
2579 2579 # since we previously only listed open branches, we will handle that
2580 2580 # specially (after this for loop)
2581 2581 for name, ns in repo.names.items():
2582 2582 if name != b'branches':
2583 2583 names.update(ns.listnames(repo))
2584 2584 names.update(
2585 2585 tag
2586 2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2587 2587 if not closed
2588 2588 )
2589 2589 completions = set()
2590 2590 if not args:
2591 2591 args = [b'']
2592 2592 for a in args:
2593 2593 completions.update(n for n in names if n.startswith(a))
2594 2594 ui.write(b'\n'.join(sorted(completions)))
2595 2595 ui.write(b'\n')
2596 2596
2597 2597
2598 2598 @command(
2599 2599 b'debugnodemap',
2600 2600 [
2601 2601 (
2602 2602 b'',
2603 2603 b'dump-new',
2604 2604 False,
2605 2605 _(b'write a (new) persistent binary nodemap on stdout'),
2606 2606 ),
2607 2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2608 2608 (
2609 2609 b'',
2610 2610 b'check',
2611 2611 False,
2612 2612 _(b'check that the data on disk data are correct.'),
2613 2613 ),
2614 2614 (
2615 2615 b'',
2616 2616 b'metadata',
2617 2617 False,
2618 2618 _(b'display the on disk meta data for the nodemap'),
2619 2619 ),
2620 2620 ],
2621 2621 )
2622 2622 def debugnodemap(ui, repo, **opts):
2623 2623 """write and inspect on disk nodemap"""
2624 2624 if opts['dump_new']:
2625 2625 unfi = repo.unfiltered()
2626 2626 cl = unfi.changelog
2627 2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2628 2628 data = cl.index.nodemap_data_all()
2629 2629 else:
2630 2630 data = nodemap.persistent_data(cl.index)
2631 2631 ui.write(data)
2632 2632 elif opts['dump_disk']:
2633 2633 unfi = repo.unfiltered()
2634 2634 cl = unfi.changelog
2635 2635 nm_data = nodemap.persisted_data(cl)
2636 2636 if nm_data is not None:
2637 2637 docket, data = nm_data
2638 2638 ui.write(data[:])
2639 2639 elif opts['check']:
2640 2640 unfi = repo.unfiltered()
2641 2641 cl = unfi.changelog
2642 2642 nm_data = nodemap.persisted_data(cl)
2643 2643 if nm_data is not None:
2644 2644 docket, data = nm_data
2645 2645 return nodemap.check_data(ui, cl.index, data)
2646 2646 elif opts['metadata']:
2647 2647 unfi = repo.unfiltered()
2648 2648 cl = unfi.changelog
2649 2649 nm_data = nodemap.persisted_data(cl)
2650 2650 if nm_data is not None:
2651 2651 docket, data = nm_data
2652 2652 ui.write((b"uid: %s\n") % docket.uid)
2653 2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2654 2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2655 2655 ui.write((b"data-length: %d\n") % docket.data_length)
2656 2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2657 2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2658 2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2659 2659
2660 2660
2661 2661 @command(
2662 2662 b'debugobsolete',
2663 2663 [
2664 2664 (b'', b'flags', 0, _(b'markers flag')),
2665 2665 (
2666 2666 b'',
2667 2667 b'record-parents',
2668 2668 False,
2669 2669 _(b'record parent information for the precursor'),
2670 2670 ),
2671 2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2672 2672 (
2673 2673 b'',
2674 2674 b'exclusive',
2675 2675 False,
2676 2676 _(b'restrict display to markers only relevant to REV'),
2677 2677 ),
2678 2678 (b'', b'index', False, _(b'display index of the marker')),
2679 2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2680 2680 ]
2681 2681 + cmdutil.commitopts2
2682 2682 + cmdutil.formatteropts,
2683 2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2684 2684 )
2685 2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2686 2686 """create arbitrary obsolete marker
2687 2687
2688 2688 With no arguments, displays the list of obsolescence markers."""
2689 2689
2690 2690 opts = pycompat.byteskwargs(opts)
2691 2691
2692 2692 def parsenodeid(s):
2693 2693 try:
2694 2694 # We do not use revsingle/revrange functions here to accept
2695 2695 # arbitrary node identifiers, possibly not present in the
2696 2696 # local repository.
2697 2697 n = bin(s)
2698 2698 if len(n) != repo.nodeconstants.nodelen:
2699 2699 raise ValueError
2700 2700 return n
2701 2701 except ValueError:
2702 2702 raise error.InputError(
2703 2703 b'changeset references must be full hexadecimal '
2704 2704 b'node identifiers'
2705 2705 )
2706 2706
2707 2707 if opts.get(b'delete'):
2708 2708 indices = []
2709 2709 for v in opts.get(b'delete'):
2710 2710 try:
2711 2711 indices.append(int(v))
2712 2712 except ValueError:
2713 2713 raise error.InputError(
2714 2714 _(b'invalid index value: %r') % v,
2715 2715 hint=_(b'use integers for indices'),
2716 2716 )
2717 2717
2718 2718 if repo.currenttransaction():
2719 2719 raise error.Abort(
2720 2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2721 2721 )
2722 2722
2723 2723 with repo.lock():
2724 2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2725 2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2726 2726
2727 2727 return
2728 2728
2729 2729 if precursor is not None:
2730 2730 if opts[b'rev']:
2731 2731 raise error.InputError(
2732 2732 b'cannot select revision when creating marker'
2733 2733 )
2734 2734 metadata = {}
2735 2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2736 2736 succs = tuple(parsenodeid(succ) for succ in successors)
2737 2737 l = repo.lock()
2738 2738 try:
2739 2739 tr = repo.transaction(b'debugobsolete')
2740 2740 try:
2741 2741 date = opts.get(b'date')
2742 2742 if date:
2743 2743 date = dateutil.parsedate(date)
2744 2744 else:
2745 2745 date = None
2746 2746 prec = parsenodeid(precursor)
2747 2747 parents = None
2748 2748 if opts[b'record_parents']:
2749 2749 if prec not in repo.unfiltered():
2750 2750 raise error.Abort(
2751 2751 b'cannot used --record-parents on '
2752 2752 b'unknown changesets'
2753 2753 )
2754 2754 parents = repo.unfiltered()[prec].parents()
2755 2755 parents = tuple(p.node() for p in parents)
2756 2756 repo.obsstore.create(
2757 2757 tr,
2758 2758 prec,
2759 2759 succs,
2760 2760 opts[b'flags'],
2761 2761 parents=parents,
2762 2762 date=date,
2763 2763 metadata=metadata,
2764 2764 ui=ui,
2765 2765 )
2766 2766 tr.close()
2767 2767 except ValueError as exc:
2768 2768 raise error.Abort(
2769 2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2770 2770 )
2771 2771 finally:
2772 2772 tr.release()
2773 2773 finally:
2774 2774 l.release()
2775 2775 else:
2776 2776 if opts[b'rev']:
2777 2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2778 2778 nodes = [repo[r].node() for r in revs]
2779 2779 markers = list(
2780 2780 obsutil.getmarkers(
2781 2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2782 2782 )
2783 2783 )
2784 2784 markers.sort(key=lambda x: x._data)
2785 2785 else:
2786 2786 markers = obsutil.getmarkers(repo)
2787 2787
2788 2788 markerstoiter = markers
2789 2789 isrelevant = lambda m: True
2790 2790 if opts.get(b'rev') and opts.get(b'index'):
2791 2791 markerstoiter = obsutil.getmarkers(repo)
2792 2792 markerset = set(markers)
2793 2793 isrelevant = lambda m: m in markerset
2794 2794
2795 2795 fm = ui.formatter(b'debugobsolete', opts)
2796 2796 for i, m in enumerate(markerstoiter):
2797 2797 if not isrelevant(m):
2798 2798 # marker can be irrelevant when we're iterating over a set
2799 2799 # of markers (markerstoiter) which is bigger than the set
2800 2800 # of markers we want to display (markers)
2801 2801 # this can happen if both --index and --rev options are
2802 2802 # provided and thus we need to iterate over all of the markers
2803 2803 # to get the correct indices, but only display the ones that
2804 2804 # are relevant to --rev value
2805 2805 continue
2806 2806 fm.startitem()
2807 2807 ind = i if opts.get(b'index') else None
2808 2808 cmdutil.showmarker(fm, m, index=ind)
2809 2809 fm.end()
2810 2810
2811 2811
2812 2812 @command(
2813 2813 b'debugp1copies',
2814 2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2815 2815 _(b'[-r REV]'),
2816 2816 )
2817 2817 def debugp1copies(ui, repo, **opts):
2818 2818 """dump copy information compared to p1"""
2819 2819
2820 2820 opts = pycompat.byteskwargs(opts)
2821 2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2822 2822 for dst, src in ctx.p1copies().items():
2823 2823 ui.write(b'%s -> %s\n' % (src, dst))
2824 2824
2825 2825
2826 2826 @command(
2827 2827 b'debugp2copies',
2828 2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2829 2829 _(b'[-r REV]'),
2830 2830 )
2831 2831 def debugp2copies(ui, repo, **opts):
2832 2832 """dump copy information compared to p2"""
2833 2833
2834 2834 opts = pycompat.byteskwargs(opts)
2835 2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2836 2836 for dst, src in ctx.p2copies().items():
2837 2837 ui.write(b'%s -> %s\n' % (src, dst))
2838 2838
2839 2839
2840 2840 @command(
2841 2841 b'debugpathcomplete',
2842 2842 [
2843 2843 (b'f', b'full', None, _(b'complete an entire path')),
2844 2844 (b'n', b'normal', None, _(b'show only normal files')),
2845 2845 (b'a', b'added', None, _(b'show only added files')),
2846 2846 (b'r', b'removed', None, _(b'show only removed files')),
2847 2847 ],
2848 2848 _(b'FILESPEC...'),
2849 2849 )
2850 2850 def debugpathcomplete(ui, repo, *specs, **opts):
2851 2851 """complete part or all of a tracked path
2852 2852
2853 2853 This command supports shells that offer path name completion. It
2854 2854 currently completes only files already known to the dirstate.
2855 2855
2856 2856 Completion extends only to the next path segment unless
2857 2857 --full is specified, in which case entire paths are used."""
2858 2858
2859 2859 def complete(path, acceptable):
2860 2860 dirstate = repo.dirstate
2861 2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2862 2862 rootdir = repo.root + pycompat.ossep
2863 2863 if spec != repo.root and not spec.startswith(rootdir):
2864 2864 return [], []
2865 2865 if os.path.isdir(spec):
2866 2866 spec += b'/'
2867 2867 spec = spec[len(rootdir) :]
2868 2868 fixpaths = pycompat.ossep != b'/'
2869 2869 if fixpaths:
2870 2870 spec = spec.replace(pycompat.ossep, b'/')
2871 2871 speclen = len(spec)
2872 2872 fullpaths = opts['full']
2873 2873 files, dirs = set(), set()
2874 2874 adddir, addfile = dirs.add, files.add
2875 2875 for f, st in dirstate.items():
2876 2876 if f.startswith(spec) and st.state in acceptable:
2877 2877 if fixpaths:
2878 2878 f = f.replace(b'/', pycompat.ossep)
2879 2879 if fullpaths:
2880 2880 addfile(f)
2881 2881 continue
2882 2882 s = f.find(pycompat.ossep, speclen)
2883 2883 if s >= 0:
2884 2884 adddir(f[:s])
2885 2885 else:
2886 2886 addfile(f)
2887 2887 return files, dirs
2888 2888
2889 2889 acceptable = b''
2890 2890 if opts['normal']:
2891 2891 acceptable += b'nm'
2892 2892 if opts['added']:
2893 2893 acceptable += b'a'
2894 2894 if opts['removed']:
2895 2895 acceptable += b'r'
2896 2896 cwd = repo.getcwd()
2897 2897 if not specs:
2898 2898 specs = [b'.']
2899 2899
2900 2900 files, dirs = set(), set()
2901 2901 for spec in specs:
2902 2902 f, d = complete(spec, acceptable or b'nmar')
2903 2903 files.update(f)
2904 2904 dirs.update(d)
2905 2905 files.update(dirs)
2906 2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2907 2907 ui.write(b'\n')
2908 2908
2909 2909
2910 2910 @command(
2911 2911 b'debugpathcopies',
2912 2912 cmdutil.walkopts,
2913 2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2914 2914 inferrepo=True,
2915 2915 )
2916 2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2917 2917 """show copies between two revisions"""
2918 2918 ctx1 = scmutil.revsingle(repo, rev1)
2919 2919 ctx2 = scmutil.revsingle(repo, rev2)
2920 2920 m = scmutil.match(ctx1, pats, opts)
2921 2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2922 2922 ui.write(b'%s -> %s\n' % (src, dst))
2923 2923
2924 2924
2925 2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2926 2926 def debugpeer(ui, path):
2927 2927 """establish a connection to a peer repository"""
2928 2928 # Always enable peer request logging. Requires --debug to display
2929 2929 # though.
2930 2930 overrides = {
2931 2931 (b'devel', b'debug.peer-request'): True,
2932 2932 }
2933 2933
2934 2934 with ui.configoverride(overrides):
2935 2935 peer = hg.peer(ui, {}, path)
2936 2936
2937 2937 try:
2938 2938 local = peer.local() is not None
2939 2939 canpush = peer.canpush()
2940 2940
2941 2941 ui.write(_(b'url: %s\n') % peer.url())
2942 2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2943 2943 ui.write(
2944 2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2945 2945 )
2946 2946 finally:
2947 2947 peer.close()
2948 2948
2949 2949
2950 2950 @command(
2951 2951 b'debugpickmergetool',
2952 2952 [
2953 2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2954 2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2955 2955 ]
2956 2956 + cmdutil.walkopts
2957 2957 + cmdutil.mergetoolopts,
2958 2958 _(b'[PATTERN]...'),
2959 2959 inferrepo=True,
2960 2960 )
2961 2961 def debugpickmergetool(ui, repo, *pats, **opts):
2962 2962 """examine which merge tool is chosen for specified file
2963 2963
2964 2964 As described in :hg:`help merge-tools`, Mercurial examines
2965 2965 configurations below in this order to decide which merge tool is
2966 2966 chosen for specified file.
2967 2967
2968 2968 1. ``--tool`` option
2969 2969 2. ``HGMERGE`` environment variable
2970 2970 3. configurations in ``merge-patterns`` section
2971 2971 4. configuration of ``ui.merge``
2972 2972 5. configurations in ``merge-tools`` section
2973 2973 6. ``hgmerge`` tool (for historical reason only)
2974 2974 7. default tool for fallback (``:merge`` or ``:prompt``)
2975 2975
2976 2976 This command writes out examination result in the style below::
2977 2977
2978 2978 FILE = MERGETOOL
2979 2979
2980 2980 By default, all files known in the first parent context of the
2981 2981 working directory are examined. Use file patterns and/or -I/-X
2982 2982 options to limit target files. -r/--rev is also useful to examine
2983 2983 files in another context without actual updating to it.
2984 2984
2985 2985 With --debug, this command shows warning messages while matching
2986 2986 against ``merge-patterns`` and so on, too. It is recommended to
2987 2987 use this option with explicit file patterns and/or -I/-X options,
2988 2988 because this option increases amount of output per file according
2989 2989 to configurations in hgrc.
2990 2990
2991 2991 With -v/--verbose, this command shows configurations below at
2992 2992 first (only if specified).
2993 2993
2994 2994 - ``--tool`` option
2995 2995 - ``HGMERGE`` environment variable
2996 2996 - configuration of ``ui.merge``
2997 2997
2998 2998 If merge tool is chosen before matching against
2999 2999 ``merge-patterns``, this command can't show any helpful
3000 3000 information, even with --debug. In such case, information above is
3001 3001 useful to know why a merge tool is chosen.
3002 3002 """
3003 3003 opts = pycompat.byteskwargs(opts)
3004 3004 overrides = {}
3005 3005 if opts[b'tool']:
3006 3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3007 3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3008 3008
3009 3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3010 3010 hgmerge = encoding.environ.get(b"HGMERGE")
3011 3011 if hgmerge is not None:
3012 3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3013 3013 uimerge = ui.config(b"ui", b"merge")
3014 3014 if uimerge:
3015 3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3016 3016
3017 3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3018 3018 m = scmutil.match(ctx, pats, opts)
3019 3019 changedelete = opts[b'changedelete']
3020 3020 for path in ctx.walk(m):
3021 3021 fctx = ctx[path]
3022 3022 with ui.silent(
3023 3023 error=True
3024 3024 ) if not ui.debugflag else util.nullcontextmanager():
3025 3025 tool, toolpath = filemerge._picktool(
3026 3026 repo,
3027 3027 ui,
3028 3028 path,
3029 3029 fctx.isbinary(),
3030 3030 b'l' in fctx.flags(),
3031 3031 changedelete,
3032 3032 )
3033 3033 ui.write(b'%s = %s\n' % (path, tool))
3034 3034
3035 3035
3036 3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3037 3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3038 3038 """access the pushkey key/value protocol
3039 3039
3040 3040 With two args, list the keys in the given namespace.
3041 3041
3042 3042 With five args, set a key to new if it currently is set to old.
3043 3043 Reports success or failure.
3044 3044 """
3045 3045
3046 3046 target = hg.peer(ui, {}, repopath)
3047 3047 try:
3048 3048 if keyinfo:
3049 3049 key, old, new = keyinfo
3050 3050 with target.commandexecutor() as e:
3051 3051 r = e.callcommand(
3052 3052 b'pushkey',
3053 3053 {
3054 3054 b'namespace': namespace,
3055 3055 b'key': key,
3056 3056 b'old': old,
3057 3057 b'new': new,
3058 3058 },
3059 3059 ).result()
3060 3060
3061 3061 ui.status(pycompat.bytestr(r) + b'\n')
3062 3062 return not r
3063 3063 else:
3064 3064 for k, v in sorted(target.listkeys(namespace).items()):
3065 3065 ui.write(
3066 3066 b"%s\t%s\n"
3067 3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3068 3068 )
3069 3069 finally:
3070 3070 target.close()
3071 3071
3072 3072
3073 3073 @command(b'debugpvec', [], _(b'A B'))
3074 3074 def debugpvec(ui, repo, a, b=None):
3075 3075 ca = scmutil.revsingle(repo, a)
3076 3076 cb = scmutil.revsingle(repo, b)
3077 3077 pa = pvec.ctxpvec(ca)
3078 3078 pb = pvec.ctxpvec(cb)
3079 3079 if pa == pb:
3080 3080 rel = b"="
3081 3081 elif pa > pb:
3082 3082 rel = b">"
3083 3083 elif pa < pb:
3084 3084 rel = b"<"
3085 3085 elif pa | pb:
3086 3086 rel = b"|"
3087 3087 ui.write(_(b"a: %s\n") % pa)
3088 3088 ui.write(_(b"b: %s\n") % pb)
3089 3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3090 3090 ui.write(
3091 3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3092 3092 % (
3093 3093 abs(pa._depth - pb._depth),
3094 3094 pvec._hamming(pa._vec, pb._vec),
3095 3095 pa.distance(pb),
3096 3096 rel,
3097 3097 )
3098 3098 )
3099 3099
3100 3100
3101 3101 @command(
3102 3102 b'debugrebuilddirstate|debugrebuildstate',
3103 3103 [
3104 3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3105 3105 (
3106 3106 b'',
3107 3107 b'minimal',
3108 3108 None,
3109 3109 _(
3110 3110 b'only rebuild files that are inconsistent with '
3111 3111 b'the working copy parent'
3112 3112 ),
3113 3113 ),
3114 3114 ],
3115 3115 _(b'[-r REV]'),
3116 3116 )
3117 3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3118 3118 """rebuild the dirstate as it would look like for the given revision
3119 3119
3120 3120 If no revision is specified the first current parent will be used.
3121 3121
3122 3122 The dirstate will be set to the files of the given revision.
3123 3123 The actual working directory content or existing dirstate
3124 3124 information such as adds or removes is not considered.
3125 3125
3126 3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3127 3127 tracked but are not in the parent manifest, or that exist in the parent
3128 3128 manifest but are not in the dirstate. It will not change adds, removes, or
3129 3129 modified files that are in the working copy parent.
3130 3130
3131 3131 One use of this command is to make the next :hg:`status` invocation
3132 3132 check the actual file content.
3133 3133 """
3134 3134 ctx = scmutil.revsingle(repo, rev)
3135 3135 with repo.wlock():
3136 3136 dirstate = repo.dirstate
3137 3137 changedfiles = None
3138 3138 # See command doc for what minimal does.
3139 3139 if opts.get('minimal'):
3140 3140 manifestfiles = set(ctx.manifest().keys())
3141 3141 dirstatefiles = set(dirstate)
3142 3142 manifestonly = manifestfiles - dirstatefiles
3143 3143 dsonly = dirstatefiles - manifestfiles
3144 3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3145 3145 changedfiles = manifestonly | dsnotadded
3146 3146
3147 3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3148 3148
3149 3149
3150 3150 @command(
3151 3151 b'debugrebuildfncache',
3152 3152 [
3153 3153 (
3154 3154 b'',
3155 3155 b'only-data',
3156 3156 False,
3157 3157 _(b'only look for wrong .d files (much faster)'),
3158 3158 )
3159 3159 ],
3160 3160 b'',
3161 3161 )
3162 3162 def debugrebuildfncache(ui, repo, **opts):
3163 3163 """rebuild the fncache file"""
3164 3164 opts = pycompat.byteskwargs(opts)
3165 3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3166 3166
3167 3167
3168 3168 @command(
3169 3169 b'debugrename',
3170 3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3171 3171 _(b'[-r REV] [FILE]...'),
3172 3172 )
3173 3173 def debugrename(ui, repo, *pats, **opts):
3174 3174 """dump rename information"""
3175 3175
3176 3176 opts = pycompat.byteskwargs(opts)
3177 3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3178 3178 m = scmutil.match(ctx, pats, opts)
3179 3179 for abs in ctx.walk(m):
3180 3180 fctx = ctx[abs]
3181 3181 o = fctx.filelog().renamed(fctx.filenode())
3182 3182 rel = repo.pathto(abs)
3183 3183 if o:
3184 3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3185 3185 else:
3186 3186 ui.write(_(b"%s not renamed\n") % rel)
3187 3187
3188 3188
3189 3189 @command(b'debugrequires|debugrequirements', [], b'')
3190 3190 def debugrequirements(ui, repo):
3191 3191 """print the current repo requirements"""
3192 3192 for r in sorted(repo.requirements):
3193 3193 ui.write(b"%s\n" % r)
3194 3194
3195 3195
3196 3196 @command(
3197 3197 b'debugrevlog',
3198 3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3199 3199 _(b'-c|-m|FILE'),
3200 3200 optionalrepo=True,
3201 3201 )
3202 3202 def debugrevlog(ui, repo, file_=None, **opts):
3203 3203 """show data and statistics about a revlog"""
3204 3204 opts = pycompat.byteskwargs(opts)
3205 3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3206 3206
3207 3207 if opts.get(b"dump"):
3208 3208 revlog_debug.dump(ui, r)
3209 3209 else:
3210 3210 revlog_debug.debug_revlog(ui, r)
3211 3211 return 0
3212 3212
3213 3213
3214 3214 @command(
3215 3215 b'debugrevlogindex',
3216 3216 cmdutil.debugrevlogopts
3217 3217 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3218 3218 _(b'[-f FORMAT] -c|-m|FILE'),
3219 3219 optionalrepo=True,
3220 3220 )
3221 3221 def debugrevlogindex(ui, repo, file_=None, **opts):
3222 3222 """dump the contents of a revlog index"""
3223 3223 opts = pycompat.byteskwargs(opts)
3224 3224 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3225 3225 format = opts.get(b'format', 0)
3226 3226 if format not in (0, 1):
3227 3227 raise error.Abort(_(b"unknown format %d") % format)
3228 3228
3229 3229 if ui.debugflag:
3230 3230 shortfn = hex
3231 3231 else:
3232 3232 shortfn = short
3233 3233
3234 3234 # There might not be anything in r, so have a sane default
3235 3235 idlen = 12
3236 3236 for i in r:
3237 3237 idlen = len(shortfn(r.node(i)))
3238 3238 break
3239 3239
3240 3240 if format == 0:
3241 3241 if ui.verbose:
3242 3242 ui.writenoi18n(
3243 3243 b" rev offset length linkrev %s %s p2\n"
3244 3244 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3245 3245 )
3246 3246 else:
3247 3247 ui.writenoi18n(
3248 3248 b" rev linkrev %s %s p2\n"
3249 3249 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3250 3250 )
3251 3251 elif format == 1:
3252 3252 if ui.verbose:
3253 3253 ui.writenoi18n(
3254 3254 (
3255 3255 b" rev flag offset length size link p1"
3256 3256 b" p2 %s\n"
3257 3257 )
3258 3258 % b"nodeid".rjust(idlen)
3259 3259 )
3260 3260 else:
3261 3261 ui.writenoi18n(
3262 3262 b" rev flag size link p1 p2 %s\n"
3263 3263 % b"nodeid".rjust(idlen)
3264 3264 )
3265 3265
3266 3266 for i in r:
3267 3267 node = r.node(i)
3268 3268 if format == 0:
3269 3269 try:
3270 3270 pp = r.parents(node)
3271 3271 except Exception:
3272 3272 pp = [repo.nullid, repo.nullid]
3273 3273 if ui.verbose:
3274 3274 ui.write(
3275 3275 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3276 3276 % (
3277 3277 i,
3278 3278 r.start(i),
3279 3279 r.length(i),
3280 3280 r.linkrev(i),
3281 3281 shortfn(node),
3282 3282 shortfn(pp[0]),
3283 3283 shortfn(pp[1]),
3284 3284 )
3285 3285 )
3286 3286 else:
3287 3287 ui.write(
3288 3288 b"% 6d % 7d %s %s %s\n"
3289 3289 % (
3290 3290 i,
3291 3291 r.linkrev(i),
3292 3292 shortfn(node),
3293 3293 shortfn(pp[0]),
3294 3294 shortfn(pp[1]),
3295 3295 )
3296 3296 )
3297 3297 elif format == 1:
3298 3298 pr = r.parentrevs(i)
3299 3299 if ui.verbose:
3300 3300 ui.write(
3301 3301 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3302 3302 % (
3303 3303 i,
3304 3304 r.flags(i),
3305 3305 r.start(i),
3306 3306 r.length(i),
3307 3307 r.rawsize(i),
3308 3308 r.linkrev(i),
3309 3309 pr[0],
3310 3310 pr[1],
3311 3311 shortfn(node),
3312 3312 )
3313 3313 )
3314 3314 else:
3315 3315 ui.write(
3316 3316 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3317 3317 % (
3318 3318 i,
3319 3319 r.flags(i),
3320 3320 r.rawsize(i),
3321 3321 r.linkrev(i),
3322 3322 pr[0],
3323 3323 pr[1],
3324 3324 shortfn(node),
3325 3325 )
3326 3326 )
3327 3327
3328 3328
3329 3329 @command(
3330 3330 b'debugrevspec',
3331 3331 [
3332 3332 (
3333 3333 b'',
3334 3334 b'optimize',
3335 3335 None,
3336 3336 _(b'print parsed tree after optimizing (DEPRECATED)'),
3337 3337 ),
3338 3338 (
3339 3339 b'',
3340 3340 b'show-revs',
3341 3341 True,
3342 3342 _(b'print list of result revisions (default)'),
3343 3343 ),
3344 3344 (
3345 3345 b's',
3346 3346 b'show-set',
3347 3347 None,
3348 3348 _(b'print internal representation of result set'),
3349 3349 ),
3350 3350 (
3351 3351 b'p',
3352 3352 b'show-stage',
3353 3353 [],
3354 3354 _(b'print parsed tree at the given stage'),
3355 3355 _(b'NAME'),
3356 3356 ),
3357 3357 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3358 3358 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3359 3359 ],
3360 3360 b'REVSPEC',
3361 3361 )
3362 3362 def debugrevspec(ui, repo, expr, **opts):
3363 3363 """parse and apply a revision specification
3364 3364
3365 3365 Use -p/--show-stage option to print the parsed tree at the given stages.
3366 3366 Use -p all to print tree at every stage.
3367 3367
3368 3368 Use --no-show-revs option with -s or -p to print only the set
3369 3369 representation or the parsed tree respectively.
3370 3370
3371 3371 Use --verify-optimized to compare the optimized result with the unoptimized
3372 3372 one. Returns 1 if the optimized result differs.
3373 3373 """
3374 3374 opts = pycompat.byteskwargs(opts)
3375 3375 aliases = ui.configitems(b'revsetalias')
3376 3376 stages = [
3377 3377 (b'parsed', lambda tree: tree),
3378 3378 (
3379 3379 b'expanded',
3380 3380 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3381 3381 ),
3382 3382 (b'concatenated', revsetlang.foldconcat),
3383 3383 (b'analyzed', revsetlang.analyze),
3384 3384 (b'optimized', revsetlang.optimize),
3385 3385 ]
3386 3386 if opts[b'no_optimized']:
3387 3387 stages = stages[:-1]
3388 3388 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3389 3389 raise error.Abort(
3390 3390 _(b'cannot use --verify-optimized with --no-optimized')
3391 3391 )
3392 3392 stagenames = {n for n, f in stages}
3393 3393
3394 3394 showalways = set()
3395 3395 showchanged = set()
3396 3396 if ui.verbose and not opts[b'show_stage']:
3397 3397 # show parsed tree by --verbose (deprecated)
3398 3398 showalways.add(b'parsed')
3399 3399 showchanged.update([b'expanded', b'concatenated'])
3400 3400 if opts[b'optimize']:
3401 3401 showalways.add(b'optimized')
3402 3402 if opts[b'show_stage'] and opts[b'optimize']:
3403 3403 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3404 3404 if opts[b'show_stage'] == [b'all']:
3405 3405 showalways.update(stagenames)
3406 3406 else:
3407 3407 for n in opts[b'show_stage']:
3408 3408 if n not in stagenames:
3409 3409 raise error.Abort(_(b'invalid stage name: %s') % n)
3410 3410 showalways.update(opts[b'show_stage'])
3411 3411
3412 3412 treebystage = {}
3413 3413 printedtree = None
3414 3414 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3415 3415 for n, f in stages:
3416 3416 treebystage[n] = tree = f(tree)
3417 3417 if n in showalways or (n in showchanged and tree != printedtree):
3418 3418 if opts[b'show_stage'] or n != b'parsed':
3419 3419 ui.write(b"* %s:\n" % n)
3420 3420 ui.write(revsetlang.prettyformat(tree), b"\n")
3421 3421 printedtree = tree
3422 3422
3423 3423 if opts[b'verify_optimized']:
3424 3424 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3425 3425 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3426 3426 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3427 3427 ui.writenoi18n(
3428 3428 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3429 3429 )
3430 3430 ui.writenoi18n(
3431 3431 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3432 3432 )
3433 3433 arevs = list(arevs)
3434 3434 brevs = list(brevs)
3435 3435 if arevs == brevs:
3436 3436 return 0
3437 3437 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3438 3438 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3439 3439 sm = difflib.SequenceMatcher(None, arevs, brevs)
3440 3440 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3441 3441 if tag in ('delete', 'replace'):
3442 3442 for c in arevs[alo:ahi]:
3443 3443 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3444 3444 if tag in ('insert', 'replace'):
3445 3445 for c in brevs[blo:bhi]:
3446 3446 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3447 3447 if tag == 'equal':
3448 3448 for c in arevs[alo:ahi]:
3449 3449 ui.write(b' %d\n' % c)
3450 3450 return 1
3451 3451
3452 3452 func = revset.makematcher(tree)
3453 3453 revs = func(repo)
3454 3454 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3455 3455 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3456 3456 if not opts[b'show_revs']:
3457 3457 return
3458 3458 for c in revs:
3459 3459 ui.write(b"%d\n" % c)
3460 3460
3461 3461
3462 3462 @command(
3463 3463 b'debugserve',
3464 3464 [
3465 3465 (
3466 3466 b'',
3467 3467 b'sshstdio',
3468 3468 False,
3469 3469 _(b'run an SSH server bound to process handles'),
3470 3470 ),
3471 3471 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3472 3472 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3473 3473 ],
3474 3474 b'',
3475 3475 )
3476 3476 def debugserve(ui, repo, **opts):
3477 3477 """run a server with advanced settings
3478 3478
3479 3479 This command is similar to :hg:`serve`. It exists partially as a
3480 3480 workaround to the fact that ``hg serve --stdio`` must have specific
3481 3481 arguments for security reasons.
3482 3482 """
3483 3483 opts = pycompat.byteskwargs(opts)
3484 3484
3485 3485 if not opts[b'sshstdio']:
3486 3486 raise error.Abort(_(b'only --sshstdio is currently supported'))
3487 3487
3488 3488 logfh = None
3489 3489
3490 3490 if opts[b'logiofd'] and opts[b'logiofile']:
3491 3491 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3492 3492
3493 3493 if opts[b'logiofd']:
3494 3494 # Ideally we would be line buffered. But line buffering in binary
3495 3495 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3496 3496 # buffering could have performance impacts. But since this isn't
3497 3497 # performance critical code, it should be fine.
3498 3498 try:
3499 3499 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3500 3500 except OSError as e:
3501 3501 if e.errno != errno.ESPIPE:
3502 3502 raise
3503 3503 # can't seek a pipe, so `ab` mode fails on py3
3504 3504 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3505 3505 elif opts[b'logiofile']:
3506 3506 logfh = open(opts[b'logiofile'], b'ab', 0)
3507 3507
3508 3508 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3509 3509 s.serve_forever()
3510 3510
3511 3511
3512 3512 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3513 3513 def debugsetparents(ui, repo, rev1, rev2=None):
3514 3514 """manually set the parents of the current working directory (DANGEROUS)
3515 3515
3516 3516 This command is not what you are looking for and should not be used. Using
3517 3517 this command will most certainly results in slight corruption of the file
3518 3518 level histories withing your repository. DO NOT USE THIS COMMAND.
3519 3519
3520 3520 The command update the p1 and p2 field in the dirstate, and not touching
3521 3521 anything else. This useful for writing repository conversion tools, but
3522 3522 should be used with extreme care. For example, neither the working
3523 3523 directory nor the dirstate is updated, so file status may be incorrect
3524 3524 after running this command. Only used if you are one of the few people that
3525 3525 deeply unstand both conversion tools and file level histories. If you are
3526 3526 reading this help, you are not one of this people (most of them sailed west
3527 3527 from Mithlond anyway.
3528 3528
3529 3529 So one last time DO NOT USE THIS COMMAND.
3530 3530
3531 3531 Returns 0 on success.
3532 3532 """
3533 3533
3534 3534 node1 = scmutil.revsingle(repo, rev1).node()
3535 3535 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3536 3536
3537 3537 with repo.wlock():
3538 3538 repo.setparents(node1, node2)
3539 3539
3540 3540
3541 3541 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3542 3542 def debugsidedata(ui, repo, file_, rev=None, **opts):
3543 3543 """dump the side data for a cl/manifest/file revision
3544 3544
3545 3545 Use --verbose to dump the sidedata content."""
3546 3546 opts = pycompat.byteskwargs(opts)
3547 3547 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3548 3548 if rev is not None:
3549 3549 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3550 3550 file_, rev = None, file_
3551 3551 elif rev is None:
3552 3552 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3553 3553 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3554 3554 r = getattr(r, '_revlog', r)
3555 3555 try:
3556 3556 sidedata = r.sidedata(r.lookup(rev))
3557 3557 except KeyError:
3558 3558 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3559 3559 if sidedata:
3560 3560 sidedata = list(sidedata.items())
3561 3561 sidedata.sort()
3562 3562 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3563 3563 for key, value in sidedata:
3564 3564 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3565 3565 if ui.verbose:
3566 3566 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3567 3567
3568 3568
3569 3569 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3570 3570 def debugssl(ui, repo, source=None, **opts):
3571 3571 """test a secure connection to a server
3572 3572
3573 3573 This builds the certificate chain for the server on Windows, installing the
3574 3574 missing intermediates and trusted root via Windows Update if necessary. It
3575 3575 does nothing on other platforms.
3576 3576
3577 3577 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3578 3578 that server is used. See :hg:`help urls` for more information.
3579 3579
3580 3580 If the update succeeds, retry the original operation. Otherwise, the cause
3581 3581 of the SSL error is likely another issue.
3582 3582 """
3583 3583 if not pycompat.iswindows:
3584 3584 raise error.Abort(
3585 3585 _(b'certificate chain building is only possible on Windows')
3586 3586 )
3587 3587
3588 3588 if not source:
3589 3589 if not repo:
3590 3590 raise error.Abort(
3591 3591 _(
3592 3592 b"there is no Mercurial repository here, and no "
3593 3593 b"server specified"
3594 3594 )
3595 3595 )
3596 3596 source = b"default"
3597 3597
3598 source, branches = urlutil.get_unique_pull_path(
3599 b'debugssl', repo, ui, source
3600 )
3601 url = urlutil.url(source)
3598 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3599 url = path.url
3602 3600
3603 3601 defaultport = {b'https': 443, b'ssh': 22}
3604 3602 if url.scheme in defaultport:
3605 3603 try:
3606 3604 addr = (url.host, int(url.port or defaultport[url.scheme]))
3607 3605 except ValueError:
3608 3606 raise error.Abort(_(b"malformed port number in URL"))
3609 3607 else:
3610 3608 raise error.Abort(_(b"only https and ssh connections are supported"))
3611 3609
3612 3610 from . import win32
3613 3611
3614 3612 s = ssl.wrap_socket(
3615 3613 socket.socket(),
3616 3614 ssl_version=ssl.PROTOCOL_TLS,
3617 3615 cert_reqs=ssl.CERT_NONE,
3618 3616 ca_certs=None,
3619 3617 )
3620 3618
3621 3619 try:
3622 3620 s.connect(addr)
3623 3621 cert = s.getpeercert(True)
3624 3622
3625 3623 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3626 3624
3627 3625 complete = win32.checkcertificatechain(cert, build=False)
3628 3626
3629 3627 if not complete:
3630 3628 ui.status(_(b'certificate chain is incomplete, updating... '))
3631 3629
3632 3630 if not win32.checkcertificatechain(cert):
3633 3631 ui.status(_(b'failed.\n'))
3634 3632 else:
3635 3633 ui.status(_(b'done.\n'))
3636 3634 else:
3637 3635 ui.status(_(b'full certificate chain is available\n'))
3638 3636 finally:
3639 3637 s.close()
3640 3638
3641 3639
3642 3640 @command(
3643 3641 b"debugbackupbundle",
3644 3642 [
3645 3643 (
3646 3644 b"",
3647 3645 b"recover",
3648 3646 b"",
3649 3647 b"brings the specified changeset back into the repository",
3650 3648 )
3651 3649 ]
3652 3650 + cmdutil.logopts,
3653 3651 _(b"hg debugbackupbundle [--recover HASH]"),
3654 3652 )
3655 3653 def debugbackupbundle(ui, repo, *pats, **opts):
3656 3654 """lists the changesets available in backup bundles
3657 3655
3658 3656 Without any arguments, this command prints a list of the changesets in each
3659 3657 backup bundle.
3660 3658
3661 3659 --recover takes a changeset hash and unbundles the first bundle that
3662 3660 contains that hash, which puts that changeset back in your repository.
3663 3661
3664 3662 --verbose will print the entire commit message and the bundle path for that
3665 3663 backup.
3666 3664 """
3667 3665 backups = list(
3668 3666 filter(
3669 3667 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3670 3668 )
3671 3669 )
3672 3670 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3673 3671
3674 3672 opts = pycompat.byteskwargs(opts)
3675 3673 opts[b"bundle"] = b""
3676 3674 opts[b"force"] = None
3677 3675 limit = logcmdutil.getlimit(opts)
3678 3676
3679 3677 def display(other, chlist, displayer):
3680 3678 if opts.get(b"newest_first"):
3681 3679 chlist.reverse()
3682 3680 count = 0
3683 3681 for n in chlist:
3684 3682 if limit is not None and count >= limit:
3685 3683 break
3686 3684 parents = [
3687 3685 True for p in other.changelog.parents(n) if p != repo.nullid
3688 3686 ]
3689 3687 if opts.get(b"no_merges") and len(parents) == 2:
3690 3688 continue
3691 3689 count += 1
3692 3690 displayer.show(other[n])
3693 3691
3694 3692 recovernode = opts.get(b"recover")
3695 3693 if recovernode:
3696 3694 if scmutil.isrevsymbol(repo, recovernode):
3697 3695 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3698 3696 return
3699 3697 elif backups:
3700 3698 msg = _(
3701 3699 b"Recover changesets using: hg debugbackupbundle --recover "
3702 3700 b"<changeset hash>\n\nAvailable backup changesets:"
3703 3701 )
3704 3702 ui.status(msg, label=b"status.removed")
3705 3703 else:
3706 3704 ui.status(_(b"no backup changesets found\n"))
3707 3705 return
3708 3706
3709 3707 for backup in backups:
3710 3708 # Much of this is copied from the hg incoming logic
3711 3709 source = os.path.relpath(backup, encoding.getcwd())
3712 3710 source, branches = urlutil.get_unique_pull_path(
3713 3711 b'debugbackupbundle',
3714 3712 repo,
3715 3713 ui,
3716 3714 source,
3717 3715 default_branches=opts.get(b'branch'),
3718 3716 )
3719 3717 try:
3720 3718 other = hg.peer(repo, opts, source)
3721 3719 except error.LookupError as ex:
3722 3720 msg = _(b"\nwarning: unable to open bundle %s") % source
3723 3721 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3724 3722 ui.warn(msg, hint=hint)
3725 3723 continue
3726 3724 revs, checkout = hg.addbranchrevs(
3727 3725 repo, other, branches, opts.get(b"rev")
3728 3726 )
3729 3727
3730 3728 if revs:
3731 3729 revs = [other.lookup(rev) for rev in revs]
3732 3730
3733 3731 with ui.silent():
3734 3732 try:
3735 3733 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3736 3734 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3737 3735 )
3738 3736 except error.LookupError:
3739 3737 continue
3740 3738
3741 3739 try:
3742 3740 if not chlist:
3743 3741 continue
3744 3742 if recovernode:
3745 3743 with repo.lock(), repo.transaction(b"unbundle") as tr:
3746 3744 if scmutil.isrevsymbol(other, recovernode):
3747 3745 ui.status(_(b"Unbundling %s\n") % (recovernode))
3748 3746 f = hg.openpath(ui, source)
3749 3747 gen = exchange.readbundle(ui, f, source)
3750 3748 if isinstance(gen, bundle2.unbundle20):
3751 3749 bundle2.applybundle(
3752 3750 repo,
3753 3751 gen,
3754 3752 tr,
3755 3753 source=b"unbundle",
3756 3754 url=b"bundle:" + source,
3757 3755 )
3758 3756 else:
3759 3757 gen.apply(repo, b"unbundle", b"bundle:" + source)
3760 3758 break
3761 3759 else:
3762 3760 backupdate = encoding.strtolocal(
3763 3761 time.strftime(
3764 3762 "%a %H:%M, %Y-%m-%d",
3765 3763 time.localtime(os.path.getmtime(source)),
3766 3764 )
3767 3765 )
3768 3766 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3769 3767 if ui.verbose:
3770 3768 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3771 3769 else:
3772 3770 opts[
3773 3771 b"template"
3774 3772 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3775 3773 displayer = logcmdutil.changesetdisplayer(
3776 3774 ui, other, opts, False
3777 3775 )
3778 3776 display(other, chlist, displayer)
3779 3777 displayer.close()
3780 3778 finally:
3781 3779 cleanupfn()
3782 3780
3783 3781
3784 3782 @command(
3785 3783 b'debugsub',
3786 3784 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3787 3785 _(b'[-r REV] [REV]'),
3788 3786 )
3789 3787 def debugsub(ui, repo, rev=None):
3790 3788 ctx = scmutil.revsingle(repo, rev, None)
3791 3789 for k, v in sorted(ctx.substate.items()):
3792 3790 ui.writenoi18n(b'path %s\n' % k)
3793 3791 ui.writenoi18n(b' source %s\n' % v[0])
3794 3792 ui.writenoi18n(b' revision %s\n' % v[1])
3795 3793
3796 3794
3797 3795 @command(b'debugshell', optionalrepo=True)
3798 3796 def debugshell(ui, repo):
3799 3797 """run an interactive Python interpreter
3800 3798
3801 3799 The local namespace is provided with a reference to the ui and
3802 3800 the repo instance (if available).
3803 3801 """
3804 3802 import code
3805 3803
3806 3804 imported_objects = {
3807 3805 'ui': ui,
3808 3806 'repo': repo,
3809 3807 }
3810 3808
3811 3809 code.interact(local=imported_objects)
3812 3810
3813 3811
3814 3812 @command(
3815 3813 b'debugsuccessorssets',
3816 3814 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3817 3815 _(b'[REV]'),
3818 3816 )
3819 3817 def debugsuccessorssets(ui, repo, *revs, **opts):
3820 3818 """show set of successors for revision
3821 3819
3822 3820 A successors set of changeset A is a consistent group of revisions that
3823 3821 succeed A. It contains non-obsolete changesets only unless closests
3824 3822 successors set is set.
3825 3823
3826 3824 In most cases a changeset A has a single successors set containing a single
3827 3825 successor (changeset A replaced by A').
3828 3826
3829 3827 A changeset that is made obsolete with no successors are called "pruned".
3830 3828 Such changesets have no successors sets at all.
3831 3829
3832 3830 A changeset that has been "split" will have a successors set containing
3833 3831 more than one successor.
3834 3832
3835 3833 A changeset that has been rewritten in multiple different ways is called
3836 3834 "divergent". Such changesets have multiple successor sets (each of which
3837 3835 may also be split, i.e. have multiple successors).
3838 3836
3839 3837 Results are displayed as follows::
3840 3838
3841 3839 <rev1>
3842 3840 <successors-1A>
3843 3841 <rev2>
3844 3842 <successors-2A>
3845 3843 <successors-2B1> <successors-2B2> <successors-2B3>
3846 3844
3847 3845 Here rev2 has two possible (i.e. divergent) successors sets. The first
3848 3846 holds one element, whereas the second holds three (i.e. the changeset has
3849 3847 been split).
3850 3848 """
3851 3849 # passed to successorssets caching computation from one call to another
3852 3850 cache = {}
3853 3851 ctx2str = bytes
3854 3852 node2str = short
3855 3853 for rev in logcmdutil.revrange(repo, revs):
3856 3854 ctx = repo[rev]
3857 3855 ui.write(b'%s\n' % ctx2str(ctx))
3858 3856 for succsset in obsutil.successorssets(
3859 3857 repo, ctx.node(), closest=opts['closest'], cache=cache
3860 3858 ):
3861 3859 if succsset:
3862 3860 ui.write(b' ')
3863 3861 ui.write(node2str(succsset[0]))
3864 3862 for node in succsset[1:]:
3865 3863 ui.write(b' ')
3866 3864 ui.write(node2str(node))
3867 3865 ui.write(b'\n')
3868 3866
3869 3867
3870 3868 @command(b'debugtagscache', [])
3871 3869 def debugtagscache(ui, repo):
3872 3870 """display the contents of .hg/cache/hgtagsfnodes1"""
3873 3871 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3874 3872 flog = repo.file(b'.hgtags')
3875 3873 for r in repo:
3876 3874 node = repo[r].node()
3877 3875 tagsnode = cache.getfnode(node, computemissing=False)
3878 3876 if tagsnode:
3879 3877 tagsnodedisplay = hex(tagsnode)
3880 3878 if not flog.hasnode(tagsnode):
3881 3879 tagsnodedisplay += b' (unknown node)'
3882 3880 elif tagsnode is None:
3883 3881 tagsnodedisplay = b'missing'
3884 3882 else:
3885 3883 tagsnodedisplay = b'invalid'
3886 3884
3887 3885 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3888 3886
3889 3887
3890 3888 @command(
3891 3889 b'debugtemplate',
3892 3890 [
3893 3891 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3894 3892 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3895 3893 ],
3896 3894 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3897 3895 optionalrepo=True,
3898 3896 )
3899 3897 def debugtemplate(ui, repo, tmpl, **opts):
3900 3898 """parse and apply a template
3901 3899
3902 3900 If -r/--rev is given, the template is processed as a log template and
3903 3901 applied to the given changesets. Otherwise, it is processed as a generic
3904 3902 template.
3905 3903
3906 3904 Use --verbose to print the parsed tree.
3907 3905 """
3908 3906 revs = None
3909 3907 if opts['rev']:
3910 3908 if repo is None:
3911 3909 raise error.RepoError(
3912 3910 _(b'there is no Mercurial repository here (.hg not found)')
3913 3911 )
3914 3912 revs = logcmdutil.revrange(repo, opts['rev'])
3915 3913
3916 3914 props = {}
3917 3915 for d in opts['define']:
3918 3916 try:
3919 3917 k, v = (e.strip() for e in d.split(b'=', 1))
3920 3918 if not k or k == b'ui':
3921 3919 raise ValueError
3922 3920 props[k] = v
3923 3921 except ValueError:
3924 3922 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3925 3923
3926 3924 if ui.verbose:
3927 3925 aliases = ui.configitems(b'templatealias')
3928 3926 tree = templater.parse(tmpl)
3929 3927 ui.note(templater.prettyformat(tree), b'\n')
3930 3928 newtree = templater.expandaliases(tree, aliases)
3931 3929 if newtree != tree:
3932 3930 ui.notenoi18n(
3933 3931 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3934 3932 )
3935 3933
3936 3934 if revs is None:
3937 3935 tres = formatter.templateresources(ui, repo)
3938 3936 t = formatter.maketemplater(ui, tmpl, resources=tres)
3939 3937 if ui.verbose:
3940 3938 kwds, funcs = t.symbolsuseddefault()
3941 3939 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3942 3940 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3943 3941 ui.write(t.renderdefault(props))
3944 3942 else:
3945 3943 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3946 3944 if ui.verbose:
3947 3945 kwds, funcs = displayer.t.symbolsuseddefault()
3948 3946 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3949 3947 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3950 3948 for r in revs:
3951 3949 displayer.show(repo[r], **pycompat.strkwargs(props))
3952 3950 displayer.close()
3953 3951
3954 3952
3955 3953 @command(
3956 3954 b'debuguigetpass',
3957 3955 [
3958 3956 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3959 3957 ],
3960 3958 _(b'[-p TEXT]'),
3961 3959 norepo=True,
3962 3960 )
3963 3961 def debuguigetpass(ui, prompt=b''):
3964 3962 """show prompt to type password"""
3965 3963 r = ui.getpass(prompt)
3966 3964 if r is None:
3967 3965 r = b"<default response>"
3968 3966 ui.writenoi18n(b'response: %s\n' % r)
3969 3967
3970 3968
3971 3969 @command(
3972 3970 b'debuguiprompt',
3973 3971 [
3974 3972 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3975 3973 ],
3976 3974 _(b'[-p TEXT]'),
3977 3975 norepo=True,
3978 3976 )
3979 3977 def debuguiprompt(ui, prompt=b''):
3980 3978 """show plain prompt"""
3981 3979 r = ui.prompt(prompt)
3982 3980 ui.writenoi18n(b'response: %s\n' % r)
3983 3981
3984 3982
3985 3983 @command(b'debugupdatecaches', [])
3986 3984 def debugupdatecaches(ui, repo, *pats, **opts):
3987 3985 """warm all known caches in the repository"""
3988 3986 with repo.wlock(), repo.lock():
3989 3987 repo.updatecaches(caches=repository.CACHES_ALL)
3990 3988
3991 3989
3992 3990 @command(
3993 3991 b'debugupgraderepo',
3994 3992 [
3995 3993 (
3996 3994 b'o',
3997 3995 b'optimize',
3998 3996 [],
3999 3997 _(b'extra optimization to perform'),
4000 3998 _(b'NAME'),
4001 3999 ),
4002 4000 (b'', b'run', False, _(b'performs an upgrade')),
4003 4001 (b'', b'backup', True, _(b'keep the old repository content around')),
4004 4002 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4005 4003 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4006 4004 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4007 4005 ],
4008 4006 )
4009 4007 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4010 4008 """upgrade a repository to use different features
4011 4009
4012 4010 If no arguments are specified, the repository is evaluated for upgrade
4013 4011 and a list of problems and potential optimizations is printed.
4014 4012
4015 4013 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4016 4014 can be influenced via additional arguments. More details will be provided
4017 4015 by the command output when run without ``--run``.
4018 4016
4019 4017 During the upgrade, the repository will be locked and no writes will be
4020 4018 allowed.
4021 4019
4022 4020 At the end of the upgrade, the repository may not be readable while new
4023 4021 repository data is swapped in. This window will be as long as it takes to
4024 4022 rename some directories inside the ``.hg`` directory. On most machines, this
4025 4023 should complete almost instantaneously and the chances of a consumer being
4026 4024 unable to access the repository should be low.
4027 4025
4028 4026 By default, all revlogs will be upgraded. You can restrict this using flags
4029 4027 such as `--manifest`:
4030 4028
4031 4029 * `--manifest`: only optimize the manifest
4032 4030 * `--no-manifest`: optimize all revlog but the manifest
4033 4031 * `--changelog`: optimize the changelog only
4034 4032 * `--no-changelog --no-manifest`: optimize filelogs only
4035 4033 * `--filelogs`: optimize the filelogs only
4036 4034 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4037 4035 """
4038 4036 return upgrade.upgraderepo(
4039 4037 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4040 4038 )
4041 4039
4042 4040
4043 4041 @command(
4044 4042 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4045 4043 )
4046 4044 def debugwalk(ui, repo, *pats, **opts):
4047 4045 """show how files match on given patterns"""
4048 4046 opts = pycompat.byteskwargs(opts)
4049 4047 m = scmutil.match(repo[None], pats, opts)
4050 4048 if ui.verbose:
4051 4049 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4052 4050 items = list(repo[None].walk(m))
4053 4051 if not items:
4054 4052 return
4055 4053 f = lambda fn: fn
4056 4054 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4057 4055 f = lambda fn: util.normpath(fn)
4058 4056 fmt = b'f %%-%ds %%-%ds %%s' % (
4059 4057 max([len(abs) for abs in items]),
4060 4058 max([len(repo.pathto(abs)) for abs in items]),
4061 4059 )
4062 4060 for abs in items:
4063 4061 line = fmt % (
4064 4062 abs,
4065 4063 f(repo.pathto(abs)),
4066 4064 m.exact(abs) and b'exact' or b'',
4067 4065 )
4068 4066 ui.write(b"%s\n" % line.rstrip())
4069 4067
4070 4068
4071 4069 @command(b'debugwhyunstable', [], _(b'REV'))
4072 4070 def debugwhyunstable(ui, repo, rev):
4073 4071 """explain instabilities of a changeset"""
4074 4072 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4075 4073 dnodes = b''
4076 4074 if entry.get(b'divergentnodes'):
4077 4075 dnodes = (
4078 4076 b' '.join(
4079 4077 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4080 4078 for ctx in entry[b'divergentnodes']
4081 4079 )
4082 4080 + b' '
4083 4081 )
4084 4082 ui.write(
4085 4083 b'%s: %s%s %s\n'
4086 4084 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4087 4085 )
4088 4086
4089 4087
4090 4088 @command(
4091 4089 b'debugwireargs',
4092 4090 [
4093 4091 (b'', b'three', b'', b'three'),
4094 4092 (b'', b'four', b'', b'four'),
4095 4093 (b'', b'five', b'', b'five'),
4096 4094 ]
4097 4095 + cmdutil.remoteopts,
4098 4096 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4099 4097 norepo=True,
4100 4098 )
4101 4099 def debugwireargs(ui, repopath, *vals, **opts):
4102 4100 opts = pycompat.byteskwargs(opts)
4103 4101 repo = hg.peer(ui, opts, repopath)
4104 4102 try:
4105 4103 for opt in cmdutil.remoteopts:
4106 4104 del opts[opt[1]]
4107 4105 args = {}
4108 4106 for k, v in opts.items():
4109 4107 if v:
4110 4108 args[k] = v
4111 4109 args = pycompat.strkwargs(args)
4112 4110 # run twice to check that we don't mess up the stream for the next command
4113 4111 res1 = repo.debugwireargs(*vals, **args)
4114 4112 res2 = repo.debugwireargs(*vals, **args)
4115 4113 ui.write(b"%s\n" % res1)
4116 4114 if res1 != res2:
4117 4115 ui.warn(b"%s\n" % res2)
4118 4116 finally:
4119 4117 repo.close()
4120 4118
4121 4119
4122 4120 def _parsewirelangblocks(fh):
4123 4121 activeaction = None
4124 4122 blocklines = []
4125 4123 lastindent = 0
4126 4124
4127 4125 for line in fh:
4128 4126 line = line.rstrip()
4129 4127 if not line:
4130 4128 continue
4131 4129
4132 4130 if line.startswith(b'#'):
4133 4131 continue
4134 4132
4135 4133 if not line.startswith(b' '):
4136 4134 # New block. Flush previous one.
4137 4135 if activeaction:
4138 4136 yield activeaction, blocklines
4139 4137
4140 4138 activeaction = line
4141 4139 blocklines = []
4142 4140 lastindent = 0
4143 4141 continue
4144 4142
4145 4143 # Else we start with an indent.
4146 4144
4147 4145 if not activeaction:
4148 4146 raise error.Abort(_(b'indented line outside of block'))
4149 4147
4150 4148 indent = len(line) - len(line.lstrip())
4151 4149
4152 4150 # If this line is indented more than the last line, concatenate it.
4153 4151 if indent > lastindent and blocklines:
4154 4152 blocklines[-1] += line.lstrip()
4155 4153 else:
4156 4154 blocklines.append(line)
4157 4155 lastindent = indent
4158 4156
4159 4157 # Flush last block.
4160 4158 if activeaction:
4161 4159 yield activeaction, blocklines
4162 4160
4163 4161
4164 4162 @command(
4165 4163 b'debugwireproto',
4166 4164 [
4167 4165 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4168 4166 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4169 4167 (
4170 4168 b'',
4171 4169 b'noreadstderr',
4172 4170 False,
4173 4171 _(b'do not read from stderr of the remote'),
4174 4172 ),
4175 4173 (
4176 4174 b'',
4177 4175 b'nologhandshake',
4178 4176 False,
4179 4177 _(b'do not log I/O related to the peer handshake'),
4180 4178 ),
4181 4179 ]
4182 4180 + cmdutil.remoteopts,
4183 4181 _(b'[PATH]'),
4184 4182 optionalrepo=True,
4185 4183 )
4186 4184 def debugwireproto(ui, repo, path=None, **opts):
4187 4185 """send wire protocol commands to a server
4188 4186
4189 4187 This command can be used to issue wire protocol commands to remote
4190 4188 peers and to debug the raw data being exchanged.
4191 4189
4192 4190 ``--localssh`` will start an SSH server against the current repository
4193 4191 and connect to that. By default, the connection will perform a handshake
4194 4192 and establish an appropriate peer instance.
4195 4193
4196 4194 ``--peer`` can be used to bypass the handshake protocol and construct a
4197 4195 peer instance using the specified class type. Valid values are ``raw``,
4198 4196 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4199 4197 don't support higher-level command actions.
4200 4198
4201 4199 ``--noreadstderr`` can be used to disable automatic reading from stderr
4202 4200 of the peer (for SSH connections only). Disabling automatic reading of
4203 4201 stderr is useful for making output more deterministic.
4204 4202
4205 4203 Commands are issued via a mini language which is specified via stdin.
4206 4204 The language consists of individual actions to perform. An action is
4207 4205 defined by a block. A block is defined as a line with no leading
4208 4206 space followed by 0 or more lines with leading space. Blocks are
4209 4207 effectively a high-level command with additional metadata.
4210 4208
4211 4209 Lines beginning with ``#`` are ignored.
4212 4210
4213 4211 The following sections denote available actions.
4214 4212
4215 4213 raw
4216 4214 ---
4217 4215
4218 4216 Send raw data to the server.
4219 4217
4220 4218 The block payload contains the raw data to send as one atomic send
4221 4219 operation. The data may not actually be delivered in a single system
4222 4220 call: it depends on the abilities of the transport being used.
4223 4221
4224 4222 Each line in the block is de-indented and concatenated. Then, that
4225 4223 value is evaluated as a Python b'' literal. This allows the use of
4226 4224 backslash escaping, etc.
4227 4225
4228 4226 raw+
4229 4227 ----
4230 4228
4231 4229 Behaves like ``raw`` except flushes output afterwards.
4232 4230
4233 4231 command <X>
4234 4232 -----------
4235 4233
4236 4234 Send a request to run a named command, whose name follows the ``command``
4237 4235 string.
4238 4236
4239 4237 Arguments to the command are defined as lines in this block. The format of
4240 4238 each line is ``<key> <value>``. e.g.::
4241 4239
4242 4240 command listkeys
4243 4241 namespace bookmarks
4244 4242
4245 4243 If the value begins with ``eval:``, it will be interpreted as a Python
4246 4244 literal expression. Otherwise values are interpreted as Python b'' literals.
4247 4245 This allows sending complex types and encoding special byte sequences via
4248 4246 backslash escaping.
4249 4247
4250 4248 The following arguments have special meaning:
4251 4249
4252 4250 ``PUSHFILE``
4253 4251 When defined, the *push* mechanism of the peer will be used instead
4254 4252 of the static request-response mechanism and the content of the
4255 4253 file specified in the value of this argument will be sent as the
4256 4254 command payload.
4257 4255
4258 4256 This can be used to submit a local bundle file to the remote.
4259 4257
4260 4258 batchbegin
4261 4259 ----------
4262 4260
4263 4261 Instruct the peer to begin a batched send.
4264 4262
4265 4263 All ``command`` blocks are queued for execution until the next
4266 4264 ``batchsubmit`` block.
4267 4265
4268 4266 batchsubmit
4269 4267 -----------
4270 4268
4271 4269 Submit previously queued ``command`` blocks as a batch request.
4272 4270
4273 4271 This action MUST be paired with a ``batchbegin`` action.
4274 4272
4275 4273 httprequest <method> <path>
4276 4274 ---------------------------
4277 4275
4278 4276 (HTTP peer only)
4279 4277
4280 4278 Send an HTTP request to the peer.
4281 4279
4282 4280 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4283 4281
4284 4282 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4285 4283 headers to add to the request. e.g. ``Accept: foo``.
4286 4284
4287 4285 The following arguments are special:
4288 4286
4289 4287 ``BODYFILE``
4290 4288 The content of the file defined as the value to this argument will be
4291 4289 transferred verbatim as the HTTP request body.
4292 4290
4293 4291 ``frame <type> <flags> <payload>``
4294 4292 Send a unified protocol frame as part of the request body.
4295 4293
4296 4294 All frames will be collected and sent as the body to the HTTP
4297 4295 request.
4298 4296
4299 4297 close
4300 4298 -----
4301 4299
4302 4300 Close the connection to the server.
4303 4301
4304 4302 flush
4305 4303 -----
4306 4304
4307 4305 Flush data written to the server.
4308 4306
4309 4307 readavailable
4310 4308 -------------
4311 4309
4312 4310 Close the write end of the connection and read all available data from
4313 4311 the server.
4314 4312
4315 4313 If the connection to the server encompasses multiple pipes, we poll both
4316 4314 pipes and read available data.
4317 4315
4318 4316 readline
4319 4317 --------
4320 4318
4321 4319 Read a line of output from the server. If there are multiple output
4322 4320 pipes, reads only the main pipe.
4323 4321
4324 4322 ereadline
4325 4323 ---------
4326 4324
4327 4325 Like ``readline``, but read from the stderr pipe, if available.
4328 4326
4329 4327 read <X>
4330 4328 --------
4331 4329
4332 4330 ``read()`` N bytes from the server's main output pipe.
4333 4331
4334 4332 eread <X>
4335 4333 ---------
4336 4334
4337 4335 ``read()`` N bytes from the server's stderr pipe, if available.
4338 4336
4339 4337 Specifying Unified Frame-Based Protocol Frames
4340 4338 ----------------------------------------------
4341 4339
4342 4340 It is possible to emit a *Unified Frame-Based Protocol* by using special
4343 4341 syntax.
4344 4342
4345 4343 A frame is composed as a type, flags, and payload. These can be parsed
4346 4344 from a string of the form:
4347 4345
4348 4346 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4349 4347
4350 4348 ``request-id`` and ``stream-id`` are integers defining the request and
4351 4349 stream identifiers.
4352 4350
4353 4351 ``type`` can be an integer value for the frame type or the string name
4354 4352 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4355 4353 ``command-name``.
4356 4354
4357 4355 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4358 4356 components. Each component (and there can be just one) can be an integer
4359 4357 or a flag name for stream flags or frame flags, respectively. Values are
4360 4358 resolved to integers and then bitwise OR'd together.
4361 4359
4362 4360 ``payload`` represents the raw frame payload. If it begins with
4363 4361 ``cbor:``, the following string is evaluated as Python code and the
4364 4362 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4365 4363 as a Python byte string literal.
4366 4364 """
4367 4365 opts = pycompat.byteskwargs(opts)
4368 4366
4369 4367 if opts[b'localssh'] and not repo:
4370 4368 raise error.Abort(_(b'--localssh requires a repository'))
4371 4369
4372 4370 if opts[b'peer'] and opts[b'peer'] not in (
4373 4371 b'raw',
4374 4372 b'ssh1',
4375 4373 ):
4376 4374 raise error.Abort(
4377 4375 _(b'invalid value for --peer'),
4378 4376 hint=_(b'valid values are "raw" and "ssh1"'),
4379 4377 )
4380 4378
4381 4379 if path and opts[b'localssh']:
4382 4380 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4383 4381
4384 4382 if ui.interactive():
4385 4383 ui.write(_(b'(waiting for commands on stdin)\n'))
4386 4384
4387 4385 blocks = list(_parsewirelangblocks(ui.fin))
4388 4386
4389 4387 proc = None
4390 4388 stdin = None
4391 4389 stdout = None
4392 4390 stderr = None
4393 4391 opener = None
4394 4392
4395 4393 if opts[b'localssh']:
4396 4394 # We start the SSH server in its own process so there is process
4397 4395 # separation. This prevents a whole class of potential bugs around
4398 4396 # shared state from interfering with server operation.
4399 4397 args = procutil.hgcmd() + [
4400 4398 b'-R',
4401 4399 repo.root,
4402 4400 b'debugserve',
4403 4401 b'--sshstdio',
4404 4402 ]
4405 4403 proc = subprocess.Popen(
4406 4404 pycompat.rapply(procutil.tonativestr, args),
4407 4405 stdin=subprocess.PIPE,
4408 4406 stdout=subprocess.PIPE,
4409 4407 stderr=subprocess.PIPE,
4410 4408 bufsize=0,
4411 4409 )
4412 4410
4413 4411 stdin = proc.stdin
4414 4412 stdout = proc.stdout
4415 4413 stderr = proc.stderr
4416 4414
4417 4415 # We turn the pipes into observers so we can log I/O.
4418 4416 if ui.verbose or opts[b'peer'] == b'raw':
4419 4417 stdin = util.makeloggingfileobject(
4420 4418 ui, proc.stdin, b'i', logdata=True
4421 4419 )
4422 4420 stdout = util.makeloggingfileobject(
4423 4421 ui, proc.stdout, b'o', logdata=True
4424 4422 )
4425 4423 stderr = util.makeloggingfileobject(
4426 4424 ui, proc.stderr, b'e', logdata=True
4427 4425 )
4428 4426
4429 4427 # --localssh also implies the peer connection settings.
4430 4428
4431 4429 url = b'ssh://localserver'
4432 4430 autoreadstderr = not opts[b'noreadstderr']
4433 4431
4434 4432 if opts[b'peer'] == b'ssh1':
4435 4433 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4436 4434 peer = sshpeer.sshv1peer(
4437 4435 ui,
4438 4436 url,
4439 4437 proc,
4440 4438 stdin,
4441 4439 stdout,
4442 4440 stderr,
4443 4441 None,
4444 4442 autoreadstderr=autoreadstderr,
4445 4443 )
4446 4444 elif opts[b'peer'] == b'raw':
4447 4445 ui.write(_(b'using raw connection to peer\n'))
4448 4446 peer = None
4449 4447 else:
4450 4448 ui.write(_(b'creating ssh peer from handshake results\n'))
4451 4449 peer = sshpeer.makepeer(
4452 4450 ui,
4453 4451 url,
4454 4452 proc,
4455 4453 stdin,
4456 4454 stdout,
4457 4455 stderr,
4458 4456 autoreadstderr=autoreadstderr,
4459 4457 )
4460 4458
4461 4459 elif path:
4462 4460 # We bypass hg.peer() so we can proxy the sockets.
4463 4461 # TODO consider not doing this because we skip
4464 4462 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4465 4463 u = urlutil.url(path)
4466 4464 if u.scheme != b'http':
4467 4465 raise error.Abort(_(b'only http:// paths are currently supported'))
4468 4466
4469 4467 url, authinfo = u.authinfo()
4470 4468 openerargs = {
4471 4469 'useragent': b'Mercurial debugwireproto',
4472 4470 }
4473 4471
4474 4472 # Turn pipes/sockets into observers so we can log I/O.
4475 4473 if ui.verbose:
4476 4474 openerargs.update(
4477 4475 {
4478 4476 'loggingfh': ui,
4479 4477 'loggingname': b's',
4480 4478 'loggingopts': {
4481 4479 'logdata': True,
4482 4480 'logdataapis': False,
4483 4481 },
4484 4482 }
4485 4483 )
4486 4484
4487 4485 if ui.debugflag:
4488 4486 openerargs['loggingopts']['logdataapis'] = True
4489 4487
4490 4488 # Don't send default headers when in raw mode. This allows us to
4491 4489 # bypass most of the behavior of our URL handling code so we can
4492 4490 # have near complete control over what's sent on the wire.
4493 4491 if opts[b'peer'] == b'raw':
4494 4492 openerargs['sendaccept'] = False
4495 4493
4496 4494 opener = urlmod.opener(ui, authinfo, **openerargs)
4497 4495
4498 4496 if opts[b'peer'] == b'raw':
4499 4497 ui.write(_(b'using raw connection to peer\n'))
4500 4498 peer = None
4501 4499 elif opts[b'peer']:
4502 4500 raise error.Abort(
4503 4501 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4504 4502 )
4505 4503 else:
4506 4504 peer = httppeer.makepeer(ui, path, opener=opener)
4507 4505
4508 4506 # We /could/ populate stdin/stdout with sock.makefile()...
4509 4507 else:
4510 4508 raise error.Abort(_(b'unsupported connection configuration'))
4511 4509
4512 4510 batchedcommands = None
4513 4511
4514 4512 # Now perform actions based on the parsed wire language instructions.
4515 4513 for action, lines in blocks:
4516 4514 if action in (b'raw', b'raw+'):
4517 4515 if not stdin:
4518 4516 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4519 4517
4520 4518 # Concatenate the data together.
4521 4519 data = b''.join(l.lstrip() for l in lines)
4522 4520 data = stringutil.unescapestr(data)
4523 4521 stdin.write(data)
4524 4522
4525 4523 if action == b'raw+':
4526 4524 stdin.flush()
4527 4525 elif action == b'flush':
4528 4526 if not stdin:
4529 4527 raise error.Abort(_(b'cannot call flush on this peer'))
4530 4528 stdin.flush()
4531 4529 elif action.startswith(b'command'):
4532 4530 if not peer:
4533 4531 raise error.Abort(
4534 4532 _(
4535 4533 b'cannot send commands unless peer instance '
4536 4534 b'is available'
4537 4535 )
4538 4536 )
4539 4537
4540 4538 command = action.split(b' ', 1)[1]
4541 4539
4542 4540 args = {}
4543 4541 for line in lines:
4544 4542 # We need to allow empty values.
4545 4543 fields = line.lstrip().split(b' ', 1)
4546 4544 if len(fields) == 1:
4547 4545 key = fields[0]
4548 4546 value = b''
4549 4547 else:
4550 4548 key, value = fields
4551 4549
4552 4550 if value.startswith(b'eval:'):
4553 4551 value = stringutil.evalpythonliteral(value[5:])
4554 4552 else:
4555 4553 value = stringutil.unescapestr(value)
4556 4554
4557 4555 args[key] = value
4558 4556
4559 4557 if batchedcommands is not None:
4560 4558 batchedcommands.append((command, args))
4561 4559 continue
4562 4560
4563 4561 ui.status(_(b'sending %s command\n') % command)
4564 4562
4565 4563 if b'PUSHFILE' in args:
4566 4564 with open(args[b'PUSHFILE'], 'rb') as fh:
4567 4565 del args[b'PUSHFILE']
4568 4566 res, output = peer._callpush(
4569 4567 command, fh, **pycompat.strkwargs(args)
4570 4568 )
4571 4569 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4572 4570 ui.status(
4573 4571 _(b'remote output: %s\n') % stringutil.escapestr(output)
4574 4572 )
4575 4573 else:
4576 4574 with peer.commandexecutor() as e:
4577 4575 res = e.callcommand(command, args).result()
4578 4576
4579 4577 ui.status(
4580 4578 _(b'response: %s\n')
4581 4579 % stringutil.pprint(res, bprefix=True, indent=2)
4582 4580 )
4583 4581
4584 4582 elif action == b'batchbegin':
4585 4583 if batchedcommands is not None:
4586 4584 raise error.Abort(_(b'nested batchbegin not allowed'))
4587 4585
4588 4586 batchedcommands = []
4589 4587 elif action == b'batchsubmit':
4590 4588 # There is a batching API we could go through. But it would be
4591 4589 # difficult to normalize requests into function calls. It is easier
4592 4590 # to bypass this layer and normalize to commands + args.
4593 4591 ui.status(
4594 4592 _(b'sending batch with %d sub-commands\n')
4595 4593 % len(batchedcommands)
4596 4594 )
4597 4595 assert peer is not None
4598 4596 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4599 4597 ui.status(
4600 4598 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4601 4599 )
4602 4600
4603 4601 batchedcommands = None
4604 4602
4605 4603 elif action.startswith(b'httprequest '):
4606 4604 if not opener:
4607 4605 raise error.Abort(
4608 4606 _(b'cannot use httprequest without an HTTP peer')
4609 4607 )
4610 4608
4611 4609 request = action.split(b' ', 2)
4612 4610 if len(request) != 3:
4613 4611 raise error.Abort(
4614 4612 _(
4615 4613 b'invalid httprequest: expected format is '
4616 4614 b'"httprequest <method> <path>'
4617 4615 )
4618 4616 )
4619 4617
4620 4618 method, httppath = request[1:]
4621 4619 headers = {}
4622 4620 body = None
4623 4621 frames = []
4624 4622 for line in lines:
4625 4623 line = line.lstrip()
4626 4624 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4627 4625 if m:
4628 4626 # Headers need to use native strings.
4629 4627 key = pycompat.strurl(m.group(1))
4630 4628 value = pycompat.strurl(m.group(2))
4631 4629 headers[key] = value
4632 4630 continue
4633 4631
4634 4632 if line.startswith(b'BODYFILE '):
4635 4633 with open(line.split(b' ', 1), b'rb') as fh:
4636 4634 body = fh.read()
4637 4635 elif line.startswith(b'frame '):
4638 4636 frame = wireprotoframing.makeframefromhumanstring(
4639 4637 line[len(b'frame ') :]
4640 4638 )
4641 4639
4642 4640 frames.append(frame)
4643 4641 else:
4644 4642 raise error.Abort(
4645 4643 _(b'unknown argument to httprequest: %s') % line
4646 4644 )
4647 4645
4648 4646 url = path + httppath
4649 4647
4650 4648 if frames:
4651 4649 body = b''.join(bytes(f) for f in frames)
4652 4650
4653 4651 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4654 4652
4655 4653 # urllib.Request insists on using has_data() as a proxy for
4656 4654 # determining the request method. Override that to use our
4657 4655 # explicitly requested method.
4658 4656 req.get_method = lambda: pycompat.sysstr(method)
4659 4657
4660 4658 try:
4661 4659 res = opener.open(req)
4662 4660 body = res.read()
4663 4661 except util.urlerr.urlerror as e:
4664 4662 # read() method must be called, but only exists in Python 2
4665 4663 getattr(e, 'read', lambda: None)()
4666 4664 continue
4667 4665
4668 4666 ct = res.headers.get('Content-Type')
4669 4667 if ct == 'application/mercurial-cbor':
4670 4668 ui.write(
4671 4669 _(b'cbor> %s\n')
4672 4670 % stringutil.pprint(
4673 4671 cborutil.decodeall(body), bprefix=True, indent=2
4674 4672 )
4675 4673 )
4676 4674
4677 4675 elif action == b'close':
4678 4676 assert peer is not None
4679 4677 peer.close()
4680 4678 elif action == b'readavailable':
4681 4679 if not stdout or not stderr:
4682 4680 raise error.Abort(
4683 4681 _(b'readavailable not available on this peer')
4684 4682 )
4685 4683
4686 4684 stdin.close()
4687 4685 stdout.read()
4688 4686 stderr.read()
4689 4687
4690 4688 elif action == b'readline':
4691 4689 if not stdout:
4692 4690 raise error.Abort(_(b'readline not available on this peer'))
4693 4691 stdout.readline()
4694 4692 elif action == b'ereadline':
4695 4693 if not stderr:
4696 4694 raise error.Abort(_(b'ereadline not available on this peer'))
4697 4695 stderr.readline()
4698 4696 elif action.startswith(b'read '):
4699 4697 count = int(action.split(b' ', 1)[1])
4700 4698 if not stdout:
4701 4699 raise error.Abort(_(b'read not available on this peer'))
4702 4700 stdout.read(count)
4703 4701 elif action.startswith(b'eread '):
4704 4702 count = int(action.split(b' ', 1)[1])
4705 4703 if not stderr:
4706 4704 raise error.Abort(_(b'eread not available on this peer'))
4707 4705 stderr.read(count)
4708 4706 else:
4709 4707 raise error.Abort(_(b'unknown action: %s') % action)
4710 4708
4711 4709 if batchedcommands is not None:
4712 4710 raise error.Abort(_(b'unclosed "batchbegin" request'))
4713 4711
4714 4712 if peer:
4715 4713 peer.close()
4716 4714
4717 4715 if proc:
4718 4716 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now