##// END OF EJS Templates
path: pass `path` to `peer` in `hg debugbackupbundle`...
marmoute -
r50623:5177be2b default
parent child Browse files
Show More
@@ -1,4716 +1,4715
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 bundlerepo,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 dirstateutils,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 requirements,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 )
94 94 from .interfaces import repository
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 constants as revlog_constants,
106 106 debug as revlog_debug,
107 107 deltas as deltautil,
108 108 nodemap,
109 109 rewrite,
110 110 sidedata,
111 111 )
112 112
113 113 release = lockmod.release
114 114
115 115 table = {}
116 116 table.update(strip.command._table)
117 117 command = registrar.command(table)
118 118
119 119
120 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 121 def debugancestor(ui, repo, *args):
122 122 """find the ancestor revision of two revisions in a given index"""
123 123 if len(args) == 3:
124 124 index, rev1, rev2 = args
125 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 126 lookup = r.lookup
127 127 elif len(args) == 2:
128 128 if not repo:
129 129 raise error.Abort(
130 130 _(b'there is no Mercurial repository here (.hg not found)')
131 131 )
132 132 rev1, rev2 = args
133 133 r = repo.changelog
134 134 lookup = repo.lookup
135 135 else:
136 136 raise error.Abort(_(b'either two or three arguments required'))
137 137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 139
140 140
141 141 @command(b'debugantivirusrunning', [])
142 142 def debugantivirusrunning(ui, repo):
143 143 """attempt to trigger an antivirus scanner to see if one is active"""
144 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 145 f.write(
146 146 util.b85decode(
147 147 # This is a base85-armored version of the EICAR test file. See
148 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 151 )
152 152 )
153 153 # Give an AV engine time to scan the file.
154 154 time.sleep(2)
155 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 156
157 157
158 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 159 def debugapplystreamclonebundle(ui, repo, fname):
160 160 """apply a stream clone bundle file"""
161 161 f = hg.openpath(ui, fname)
162 162 gen = exchange.readbundle(ui, f, fname)
163 163 gen.apply(repo)
164 164
165 165
166 166 @command(
167 167 b'debugbuilddag',
168 168 [
169 169 (
170 170 b'm',
171 171 b'mergeable-file',
172 172 None,
173 173 _(b'add single file mergeable changes'),
174 174 ),
175 175 (
176 176 b'o',
177 177 b'overwritten-file',
178 178 None,
179 179 _(b'add single file all revs overwrite'),
180 180 ),
181 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 182 (
183 183 b'',
184 184 b'from-existing',
185 185 None,
186 186 _(b'continue from a non-empty repository'),
187 187 ),
188 188 ],
189 189 _(b'[OPTION]... [TEXT]'),
190 190 )
191 191 def debugbuilddag(
192 192 ui,
193 193 repo,
194 194 text=None,
195 195 mergeable_file=False,
196 196 overwritten_file=False,
197 197 new_file=False,
198 198 from_existing=False,
199 199 ):
200 200 """builds a repo with a given DAG from scratch in the current empty repo
201 201
202 202 The description of the DAG is read from stdin if not given on the
203 203 command line.
204 204
205 205 Elements:
206 206
207 207 - "+n" is a linear run of n nodes based on the current default parent
208 208 - "." is a single node based on the current default parent
209 209 - "$" resets the default parent to null (implied at the start);
210 210 otherwise the default parent is always the last node created
211 211 - "<p" sets the default parent to the backref p
212 212 - "*p" is a fork at parent p, which is a backref
213 213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 214 - "/p2" is a merge of the preceding node and p2
215 215 - ":tag" defines a local tag for the preceding node
216 216 - "@branch" sets the named branch for subsequent nodes
217 217 - "#...\\n" is a comment up to the end of the line
218 218
219 219 Whitespace between the above elements is ignored.
220 220
221 221 A backref is either
222 222
223 223 - a number n, which references the node curr-n, where curr is the current
224 224 node, or
225 225 - the name of a local tag you placed earlier using ":tag", or
226 226 - empty to denote the default parent.
227 227
228 228 All string valued-elements are either strictly alphanumeric, or must
229 229 be enclosed in double quotes ("..."), with "\\" as escape character.
230 230 """
231 231
232 232 if text is None:
233 233 ui.status(_(b"reading DAG from stdin\n"))
234 234 text = ui.fin.read()
235 235
236 236 cl = repo.changelog
237 237 if len(cl) > 0 and not from_existing:
238 238 raise error.Abort(_(b'repository is not empty'))
239 239
240 240 # determine number of revs in DAG
241 241 total = 0
242 242 for type, data in dagparser.parsedag(text):
243 243 if type == b'n':
244 244 total += 1
245 245
246 246 if mergeable_file:
247 247 linesperrev = 2
248 248 # make a file with k lines per rev
249 249 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
250 250 initialmergedlines.append(b"")
251 251
252 252 tags = []
253 253 progress = ui.makeprogress(
254 254 _(b'building'), unit=_(b'revisions'), total=total
255 255 )
256 256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 257 at = -1
258 258 atbranch = b'default'
259 259 nodeids = []
260 260 id = 0
261 261 progress.update(id)
262 262 for type, data in dagparser.parsedag(text):
263 263 if type == b'n':
264 264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 265 id, ps = data
266 266
267 267 files = []
268 268 filecontent = {}
269 269
270 270 p2 = None
271 271 if mergeable_file:
272 272 fn = b"mf"
273 273 p1 = repo[ps[0]]
274 274 if len(ps) > 1:
275 275 p2 = repo[ps[1]]
276 276 pa = p1.ancestor(p2)
277 277 base, local, other = [
278 278 x[fn].data() for x in (pa, p1, p2)
279 279 ]
280 280 m3 = simplemerge.Merge3Text(base, local, other)
281 281 ml = [
282 282 l.strip()
283 283 for l in simplemerge.render_minimized(m3)[0]
284 284 ]
285 285 ml.append(b"")
286 286 elif at > 0:
287 287 ml = p1[fn].data().split(b"\n")
288 288 else:
289 289 ml = initialmergedlines
290 290 ml[id * linesperrev] += b" r%i" % id
291 291 mergedtext = b"\n".join(ml)
292 292 files.append(fn)
293 293 filecontent[fn] = mergedtext
294 294
295 295 if overwritten_file:
296 296 fn = b"of"
297 297 files.append(fn)
298 298 filecontent[fn] = b"r%i\n" % id
299 299
300 300 if new_file:
301 301 fn = b"nf%i" % id
302 302 files.append(fn)
303 303 filecontent[fn] = b"r%i\n" % id
304 304 if len(ps) > 1:
305 305 if not p2:
306 306 p2 = repo[ps[1]]
307 307 for fn in p2:
308 308 if fn.startswith(b"nf"):
309 309 files.append(fn)
310 310 filecontent[fn] = p2[fn].data()
311 311
312 312 def fctxfn(repo, cx, path):
313 313 if path in filecontent:
314 314 return context.memfilectx(
315 315 repo, cx, path, filecontent[path]
316 316 )
317 317 return None
318 318
319 319 if len(ps) == 0 or ps[0] < 0:
320 320 pars = [None, None]
321 321 elif len(ps) == 1:
322 322 pars = [nodeids[ps[0]], None]
323 323 else:
324 324 pars = [nodeids[p] for p in ps]
325 325 cx = context.memctx(
326 326 repo,
327 327 pars,
328 328 b"r%i" % id,
329 329 files,
330 330 fctxfn,
331 331 date=(id, 0),
332 332 user=b"debugbuilddag",
333 333 extra={b'branch': atbranch},
334 334 )
335 335 nodeid = repo.commitctx(cx)
336 336 nodeids.append(nodeid)
337 337 at = id
338 338 elif type == b'l':
339 339 id, name = data
340 340 ui.note((b'tag %s\n' % name))
341 341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 342 elif type == b'a':
343 343 ui.note((b'branch %s\n' % data))
344 344 atbranch = data
345 345 progress.update(id)
346 346
347 347 if tags:
348 348 repo.vfs.write(b"localtags", b"".join(tags))
349 349
350 350
351 351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 352 indent_string = b' ' * indent
353 353 if all:
354 354 ui.writenoi18n(
355 355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 356 % indent_string
357 357 )
358 358
359 359 def showchunks(named):
360 360 ui.write(b"\n%s%s\n" % (indent_string, named))
361 361 for deltadata in gen.deltaiter():
362 362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 363 ui.write(
364 364 b"%s%s %s %s %s %s %d\n"
365 365 % (
366 366 indent_string,
367 367 hex(node),
368 368 hex(p1),
369 369 hex(p2),
370 370 hex(cs),
371 371 hex(deltabase),
372 372 len(delta),
373 373 )
374 374 )
375 375
376 376 gen.changelogheader()
377 377 showchunks(b"changelog")
378 378 gen.manifestheader()
379 379 showchunks(b"manifest")
380 380 for chunkdata in iter(gen.filelogheader, {}):
381 381 fname = chunkdata[b'filename']
382 382 showchunks(fname)
383 383 else:
384 384 if isinstance(gen, bundle2.unbundle20):
385 385 raise error.Abort(_(b'use debugbundle2 for this file'))
386 386 gen.changelogheader()
387 387 for deltadata in gen.deltaiter():
388 388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 390
391 391
392 392 def _debugobsmarkers(ui, part, indent=0, **opts):
393 393 """display version and markers contained in 'data'"""
394 394 opts = pycompat.byteskwargs(opts)
395 395 data = part.read()
396 396 indent_string = b' ' * indent
397 397 try:
398 398 version, markers = obsolete._readmarkers(data)
399 399 except error.UnknownVersion as exc:
400 400 msg = b"%sunsupported version: %s (%d bytes)\n"
401 401 msg %= indent_string, exc.version, len(data)
402 402 ui.write(msg)
403 403 else:
404 404 msg = b"%sversion: %d (%d bytes)\n"
405 405 msg %= indent_string, version, len(data)
406 406 ui.write(msg)
407 407 fm = ui.formatter(b'debugobsolete', opts)
408 408 for rawmarker in sorted(markers):
409 409 m = obsutil.marker(None, rawmarker)
410 410 fm.startitem()
411 411 fm.plain(indent_string)
412 412 cmdutil.showmarker(fm, m)
413 413 fm.end()
414 414
415 415
416 416 def _debugphaseheads(ui, data, indent=0):
417 417 """display version and markers contained in 'data'"""
418 418 indent_string = b' ' * indent
419 419 headsbyphase = phases.binarydecode(data)
420 420 for phase in phases.allphases:
421 421 for head in headsbyphase[phase]:
422 422 ui.write(indent_string)
423 423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 424
425 425
426 426 def _quasirepr(thing):
427 427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 428 return b'{%s}' % (
429 429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 430 )
431 431 return pycompat.bytestr(repr(thing))
432 432
433 433
434 434 def _debugbundle2(ui, gen, all=None, **opts):
435 435 """lists the contents of a bundle2"""
436 436 if not isinstance(gen, bundle2.unbundle20):
437 437 raise error.Abort(_(b'not a bundle2 file'))
438 438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 439 parttypes = opts.get('part_type', [])
440 440 for part in gen.iterparts():
441 441 if parttypes and part.type not in parttypes:
442 442 continue
443 443 msg = b'%s -- %s (mandatory: %r)\n'
444 444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 445 if part.type == b'changegroup':
446 446 version = part.params.get(b'version', b'01')
447 447 cg = changegroup.getunbundler(version, part, b'UN')
448 448 if not ui.quiet:
449 449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 450 if part.type == b'obsmarkers':
451 451 if not ui.quiet:
452 452 _debugobsmarkers(ui, part, indent=4, **opts)
453 453 if part.type == b'phase-heads':
454 454 if not ui.quiet:
455 455 _debugphaseheads(ui, part, indent=4)
456 456
457 457
458 458 @command(
459 459 b'debugbundle',
460 460 [
461 461 (b'a', b'all', None, _(b'show all details')),
462 462 (b'', b'part-type', [], _(b'show only the named part type')),
463 463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 464 ],
465 465 _(b'FILE'),
466 466 norepo=True,
467 467 )
468 468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 469 """lists the contents of a bundle"""
470 470 with hg.openpath(ui, bundlepath) as f:
471 471 if spec:
472 472 spec = exchange.getbundlespec(ui, f)
473 473 ui.write(b'%s\n' % spec)
474 474 return
475 475
476 476 gen = exchange.readbundle(ui, f, bundlepath)
477 477 if isinstance(gen, bundle2.unbundle20):
478 478 return _debugbundle2(ui, gen, all=all, **opts)
479 479 _debugchangegroup(ui, gen, all=all, **opts)
480 480
481 481
482 482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 483 def debugcapabilities(ui, path, **opts):
484 484 """lists the capabilities of a remote peer"""
485 485 opts = pycompat.byteskwargs(opts)
486 486 peer = hg.peer(ui, opts, path)
487 487 try:
488 488 caps = peer.capabilities()
489 489 ui.writenoi18n(b'Main capabilities:\n')
490 490 for c in sorted(caps):
491 491 ui.write(b' %s\n' % c)
492 492 b2caps = bundle2.bundle2caps(peer)
493 493 if b2caps:
494 494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 495 for key, values in sorted(b2caps.items()):
496 496 ui.write(b' %s\n' % key)
497 497 for v in values:
498 498 ui.write(b' %s\n' % v)
499 499 finally:
500 500 peer.close()
501 501
502 502
503 503 @command(
504 504 b'debugchangedfiles',
505 505 [
506 506 (
507 507 b'',
508 508 b'compute',
509 509 False,
510 510 b"compute information instead of reading it from storage",
511 511 ),
512 512 ],
513 513 b'REV',
514 514 )
515 515 def debugchangedfiles(ui, repo, rev, **opts):
516 516 """list the stored files changes for a revision"""
517 517 ctx = logcmdutil.revsingle(repo, rev, None)
518 518 files = None
519 519
520 520 if opts['compute']:
521 521 files = metadata.compute_all_files_changes(ctx)
522 522 else:
523 523 sd = repo.changelog.sidedata(ctx.rev())
524 524 files_block = sd.get(sidedata.SD_FILES)
525 525 if files_block is not None:
526 526 files = metadata.decode_files_sidedata(sd)
527 527 if files is not None:
528 528 for f in sorted(files.touched):
529 529 if f in files.added:
530 530 action = b"added"
531 531 elif f in files.removed:
532 532 action = b"removed"
533 533 elif f in files.merged:
534 534 action = b"merged"
535 535 elif f in files.salvaged:
536 536 action = b"salvaged"
537 537 else:
538 538 action = b"touched"
539 539
540 540 copy_parent = b""
541 541 copy_source = b""
542 542 if f in files.copied_from_p1:
543 543 copy_parent = b"p1"
544 544 copy_source = files.copied_from_p1[f]
545 545 elif f in files.copied_from_p2:
546 546 copy_parent = b"p2"
547 547 copy_source = files.copied_from_p2[f]
548 548
549 549 data = (action, copy_parent, f, copy_source)
550 550 template = b"%-8s %2s: %s, %s;\n"
551 551 ui.write(template % data)
552 552
553 553
554 554 @command(b'debugcheckstate', [], b'')
555 555 def debugcheckstate(ui, repo):
556 556 """validate the correctness of the current dirstate"""
557 557 parent1, parent2 = repo.dirstate.parents()
558 558 m1 = repo[parent1].manifest()
559 559 m2 = repo[parent2].manifest()
560 560 errors = 0
561 561 for err in repo.dirstate.verify(m1, m2):
562 562 ui.warn(err[0] % err[1:])
563 563 errors += 1
564 564 if errors:
565 565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 566 raise error.Abort(errstr)
567 567
568 568
569 569 @command(
570 570 b'debugcolor',
571 571 [(b'', b'style', None, _(b'show all configured styles'))],
572 572 b'hg debugcolor',
573 573 )
574 574 def debugcolor(ui, repo, **opts):
575 575 """show available color, effects or style"""
576 576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 577 if opts.get('style'):
578 578 return _debugdisplaystyle(ui)
579 579 else:
580 580 return _debugdisplaycolor(ui)
581 581
582 582
583 583 def _debugdisplaycolor(ui):
584 584 ui = ui.copy()
585 585 ui._styles.clear()
586 586 for effect in color._activeeffects(ui).keys():
587 587 ui._styles[effect] = effect
588 588 if ui._terminfoparams:
589 589 for k, v in ui.configitems(b'color'):
590 590 if k.startswith(b'color.'):
591 591 ui._styles[k] = k[6:]
592 592 elif k.startswith(b'terminfo.'):
593 593 ui._styles[k] = k[9:]
594 594 ui.write(_(b'available colors:\n'))
595 595 # sort label with a '_' after the other to group '_background' entry.
596 596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 597 for colorname, label in items:
598 598 ui.write(b'%s\n' % colorname, label=label)
599 599
600 600
601 601 def _debugdisplaystyle(ui):
602 602 ui.write(_(b'available style:\n'))
603 603 if not ui._styles:
604 604 return
605 605 width = max(len(s) for s in ui._styles)
606 606 for label, effects in sorted(ui._styles.items()):
607 607 ui.write(b'%s' % label, label=label)
608 608 if effects:
609 609 # 50
610 610 ui.write(b': ')
611 611 ui.write(b' ' * (max(0, width - len(label))))
612 612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 613 ui.write(b'\n')
614 614
615 615
616 616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 617 def debugcreatestreamclonebundle(ui, repo, fname):
618 618 """create a stream clone bundle file
619 619
620 620 Stream bundles are special bundles that are essentially archives of
621 621 revlog files. They are commonly used for cloning very quickly.
622 622 """
623 623 # TODO we may want to turn this into an abort when this functionality
624 624 # is moved into `hg bundle`.
625 625 if phases.hassecret(repo):
626 626 ui.warn(
627 627 _(
628 628 b'(warning: stream clone bundle will contain secret '
629 629 b'revisions)\n'
630 630 )
631 631 )
632 632
633 633 requirements, gen = streamclone.generatebundlev1(repo)
634 634 changegroup.writechunks(ui, gen, fname)
635 635
636 636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637 637
638 638
639 639 @command(
640 640 b'debugdag',
641 641 [
642 642 (b't', b'tags', None, _(b'use tags as labels')),
643 643 (b'b', b'branches', None, _(b'annotate with branch names')),
644 644 (b'', b'dots', None, _(b'use dots for runs')),
645 645 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 646 ],
647 647 _(b'[OPTION]... [FILE [REV]...]'),
648 648 optionalrepo=True,
649 649 )
650 650 def debugdag(ui, repo, file_=None, *revs, **opts):
651 651 """format the changelog or an index DAG as a concise textual description
652 652
653 653 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 654 revision numbers, they get labeled in the output as rN.
655 655
656 656 Otherwise, the changelog DAG of the current repo is emitted.
657 657 """
658 658 spaces = opts.get('spaces')
659 659 dots = opts.get('dots')
660 660 if file_:
661 661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 662 revs = {int(r) for r in revs}
663 663
664 664 def events():
665 665 for r in rlog:
666 666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 667 if r in revs:
668 668 yield b'l', (r, b"r%i" % r)
669 669
670 670 elif repo:
671 671 cl = repo.changelog
672 672 tags = opts.get('tags')
673 673 branches = opts.get('branches')
674 674 if tags:
675 675 labels = {}
676 676 for l, n in repo.tags().items():
677 677 labels.setdefault(cl.rev(n), []).append(l)
678 678
679 679 def events():
680 680 b = b"default"
681 681 for r in cl:
682 682 if branches:
683 683 newb = cl.read(cl.node(r))[5][b'branch']
684 684 if newb != b:
685 685 yield b'a', newb
686 686 b = newb
687 687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 688 if tags:
689 689 ls = labels.get(r)
690 690 if ls:
691 691 for l in ls:
692 692 yield b'l', (r, l)
693 693
694 694 else:
695 695 raise error.Abort(_(b'need repo for changelog dag'))
696 696
697 697 for line in dagparser.dagtextlines(
698 698 events(),
699 699 addspaces=spaces,
700 700 wraplabels=True,
701 701 wrapannotations=True,
702 702 wrapnonlinear=dots,
703 703 usedots=dots,
704 704 maxlinewidth=70,
705 705 ):
706 706 ui.write(line)
707 707 ui.write(b"\n")
708 708
709 709
710 710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 711 def debugdata(ui, repo, file_, rev=None, **opts):
712 712 """dump the contents of a data file revision"""
713 713 opts = pycompat.byteskwargs(opts)
714 714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 715 if rev is not None:
716 716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 717 file_, rev = None, file_
718 718 elif rev is None:
719 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 721 try:
722 722 ui.write(r.rawdata(r.lookup(rev)))
723 723 except KeyError:
724 724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 725
726 726
727 727 @command(
728 728 b'debugdate',
729 729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 730 _(b'[-e] DATE [RANGE]'),
731 731 norepo=True,
732 732 optionalrepo=True,
733 733 )
734 734 def debugdate(ui, date, range=None, **opts):
735 735 """parse and display a date"""
736 736 if opts["extended"]:
737 737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 738 else:
739 739 d = dateutil.parsedate(date)
740 740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 742 if range:
743 743 m = dateutil.matchdate(range)
744 744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 745
746 746
747 747 @command(
748 748 b'debugdeltachain',
749 749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 750 _(b'-c|-m|FILE'),
751 751 optionalrepo=True,
752 752 )
753 753 def debugdeltachain(ui, repo, file_=None, **opts):
754 754 """dump information about delta chains in a revlog
755 755
756 756 Output can be templatized. Available template keywords are:
757 757
758 758 :``rev``: revision number
759 759 :``p1``: parent 1 revision number (for reference)
760 760 :``p2``: parent 2 revision number (for reference)
761 761 :``chainid``: delta chain identifier (numbered by unique base)
762 762 :``chainlen``: delta chain length to this revision
763 763 :``prevrev``: previous revision in delta chain
764 764 :``deltatype``: role of delta / how it was computed
765 765 - base: a full snapshot
766 766 - snap: an intermediate snapshot
767 767 - p1: a delta against the first parent
768 768 - p2: a delta against the second parent
769 769 - skip1: a delta against the same base as p1
770 770 (when p1 has empty delta
771 771 - skip2: a delta against the same base as p2
772 772 (when p2 has empty delta
773 773 - prev: a delta against the previous revision
774 774 - other: a delta against an arbitrary revision
775 775 :``compsize``: compressed size of revision
776 776 :``uncompsize``: uncompressed size of revision
777 777 :``chainsize``: total size of compressed revisions in chain
778 778 :``chainratio``: total chain size divided by uncompressed revision size
779 779 (new delta chains typically start at ratio 2.00)
780 780 :``lindist``: linear distance from base revision in delta chain to end
781 781 of this revision
782 782 :``extradist``: total size of revisions not part of this delta chain from
783 783 base of delta chain to end of this revision; a measurement
784 784 of how much extra data we need to read/seek across to read
785 785 the delta chain for this revision
786 786 :``extraratio``: extradist divided by chainsize; another representation of
787 787 how much unrelated data is needed to load this delta chain
788 788
789 789 If the repository is configured to use the sparse read, additional keywords
790 790 are available:
791 791
792 792 :``readsize``: total size of data read from the disk for a revision
793 793 (sum of the sizes of all the blocks)
794 794 :``largestblock``: size of the largest block of data read from the disk
795 795 :``readdensity``: density of useful bytes in the data read from the disk
796 796 :``srchunks``: in how many data hunks the whole revision would be read
797 797
798 798 The sparse read can be enabled with experimental.sparse-read = True
799 799 """
800 800 opts = pycompat.byteskwargs(opts)
801 801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
802 802 index = r.index
803 803 start = r.start
804 804 length = r.length
805 805 generaldelta = r._generaldelta
806 806 withsparseread = getattr(r, '_withsparseread', False)
807 807
808 808 # security to avoid crash on corrupted revlogs
809 809 total_revs = len(index)
810 810
811 811 def revinfo(rev):
812 812 e = index[rev]
813 813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
814 814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
815 815 chainsize = 0
816 816
817 817 base = e[revlog_constants.ENTRY_DELTA_BASE]
818 818 p1 = e[revlog_constants.ENTRY_PARENT_1]
819 819 p2 = e[revlog_constants.ENTRY_PARENT_2]
820 820
821 821 # If the parents of a revision has an empty delta, we never try to delta
822 822 # against that parent, but directly against the delta base of that
823 823 # parent (recursively). It avoids adding a useless entry in the chain.
824 824 #
825 825 # However we need to detect that as a special case for delta-type, that
826 826 # is not simply "other".
827 827 p1_base = p1
828 828 if p1 != nullrev and p1 < total_revs:
829 829 e1 = index[p1]
830 830 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
831 831 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
832 832 if (
833 833 new_base == p1_base
834 834 or new_base == nullrev
835 835 or new_base >= total_revs
836 836 ):
837 837 break
838 838 p1_base = new_base
839 839 e1 = index[p1_base]
840 840 p2_base = p2
841 841 if p2 != nullrev and p2 < total_revs:
842 842 e2 = index[p2]
843 843 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
844 844 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
845 845 if (
846 846 new_base == p2_base
847 847 or new_base == nullrev
848 848 or new_base >= total_revs
849 849 ):
850 850 break
851 851 p2_base = new_base
852 852 e2 = index[p2_base]
853 853
854 854 if generaldelta:
855 855 if base == p1:
856 856 deltatype = b'p1'
857 857 elif base == p2:
858 858 deltatype = b'p2'
859 859 elif base == rev:
860 860 deltatype = b'base'
861 861 elif base == p1_base:
862 862 deltatype = b'skip1'
863 863 elif base == p2_base:
864 864 deltatype = b'skip2'
865 865 elif r.issnapshot(rev):
866 866 deltatype = b'snap'
867 867 elif base == rev - 1:
868 868 deltatype = b'prev'
869 869 else:
870 870 deltatype = b'other'
871 871 else:
872 872 if base == rev:
873 873 deltatype = b'base'
874 874 else:
875 875 deltatype = b'prev'
876 876
877 877 chain = r._deltachain(rev)[0]
878 878 for iterrev in chain:
879 879 e = index[iterrev]
880 880 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
881 881
882 882 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
883 883
884 884 fm = ui.formatter(b'debugdeltachain', opts)
885 885
886 886 fm.plain(
887 887 b' rev p1 p2 chain# chainlen prev delta '
888 888 b'size rawsize chainsize ratio lindist extradist '
889 889 b'extraratio'
890 890 )
891 891 if withsparseread:
892 892 fm.plain(b' readsize largestblk rddensity srchunks')
893 893 fm.plain(b'\n')
894 894
895 895 chainbases = {}
896 896 for rev in r:
897 897 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
898 898 chainbase = chain[0]
899 899 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
900 900 basestart = start(chainbase)
901 901 revstart = start(rev)
902 902 lineardist = revstart + comp - basestart
903 903 extradist = lineardist - chainsize
904 904 try:
905 905 prevrev = chain[-2]
906 906 except IndexError:
907 907 prevrev = -1
908 908
909 909 if uncomp != 0:
910 910 chainratio = float(chainsize) / float(uncomp)
911 911 else:
912 912 chainratio = chainsize
913 913
914 914 if chainsize != 0:
915 915 extraratio = float(extradist) / float(chainsize)
916 916 else:
917 917 extraratio = extradist
918 918
919 919 fm.startitem()
920 920 fm.write(
921 921 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
922 922 b'uncompsize chainsize chainratio lindist extradist '
923 923 b'extraratio',
924 924 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
925 925 rev,
926 926 p1,
927 927 p2,
928 928 chainid,
929 929 len(chain),
930 930 prevrev,
931 931 deltatype,
932 932 comp,
933 933 uncomp,
934 934 chainsize,
935 935 chainratio,
936 936 lineardist,
937 937 extradist,
938 938 extraratio,
939 939 rev=rev,
940 940 chainid=chainid,
941 941 chainlen=len(chain),
942 942 prevrev=prevrev,
943 943 deltatype=deltatype,
944 944 compsize=comp,
945 945 uncompsize=uncomp,
946 946 chainsize=chainsize,
947 947 chainratio=chainratio,
948 948 lindist=lineardist,
949 949 extradist=extradist,
950 950 extraratio=extraratio,
951 951 )
952 952 if withsparseread:
953 953 readsize = 0
954 954 largestblock = 0
955 955 srchunks = 0
956 956
957 957 for revschunk in deltautil.slicechunk(r, chain):
958 958 srchunks += 1
959 959 blkend = start(revschunk[-1]) + length(revschunk[-1])
960 960 blksize = blkend - start(revschunk[0])
961 961
962 962 readsize += blksize
963 963 if largestblock < blksize:
964 964 largestblock = blksize
965 965
966 966 if readsize:
967 967 readdensity = float(chainsize) / float(readsize)
968 968 else:
969 969 readdensity = 1
970 970
971 971 fm.write(
972 972 b'readsize largestblock readdensity srchunks',
973 973 b' %10d %10d %9.5f %8d',
974 974 readsize,
975 975 largestblock,
976 976 readdensity,
977 977 srchunks,
978 978 readsize=readsize,
979 979 largestblock=largestblock,
980 980 readdensity=readdensity,
981 981 srchunks=srchunks,
982 982 )
983 983
984 984 fm.plain(b'\n')
985 985
986 986 fm.end()
987 987
988 988
989 989 @command(
990 990 b'debug-delta-find',
991 991 cmdutil.debugrevlogopts
992 992 + cmdutil.formatteropts
993 993 + [
994 994 (
995 995 b'',
996 996 b'source',
997 997 b'full',
998 998 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
999 999 ),
1000 1000 ],
1001 1001 _(b'-c|-m|FILE REV'),
1002 1002 optionalrepo=True,
1003 1003 )
1004 1004 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1005 1005 """display the computation to get to a valid delta for storing REV
1006 1006
1007 1007 This command will replay the process used to find the "best" delta to store
1008 1008 a revision and display information about all the steps used to get to that
1009 1009 result.
1010 1010
1011 1011 By default, the process is fed with a the full-text for the revision. This
1012 1012 can be controlled with the --source flag.
1013 1013
1014 1014 The revision use the revision number of the target storage (not changelog
1015 1015 revision number).
1016 1016
1017 1017 note: the process is initiated from a full text of the revision to store.
1018 1018 """
1019 1019 opts = pycompat.byteskwargs(opts)
1020 1020 if arg_2 is None:
1021 1021 file_ = None
1022 1022 rev = arg_1
1023 1023 else:
1024 1024 file_ = arg_1
1025 1025 rev = arg_2
1026 1026
1027 1027 rev = int(rev)
1028 1028
1029 1029 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1030 1030 p1r, p2r = revlog.parentrevs(rev)
1031 1031
1032 1032 if source == b'full':
1033 1033 base_rev = nullrev
1034 1034 elif source == b'storage':
1035 1035 base_rev = revlog.deltaparent(rev)
1036 1036 elif source == b'p1':
1037 1037 base_rev = p1r
1038 1038 elif source == b'p2':
1039 1039 base_rev = p2r
1040 1040 elif source == b'prev':
1041 1041 base_rev = rev - 1
1042 1042 else:
1043 1043 raise error.InputError(b"invalid --source value: %s" % source)
1044 1044
1045 1045 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1046 1046
1047 1047
1048 1048 @command(
1049 1049 b'debugdirstate|debugstate',
1050 1050 [
1051 1051 (
1052 1052 b'',
1053 1053 b'nodates',
1054 1054 None,
1055 1055 _(b'do not display the saved mtime (DEPRECATED)'),
1056 1056 ),
1057 1057 (b'', b'dates', True, _(b'display the saved mtime')),
1058 1058 (b'', b'datesort', None, _(b'sort by saved mtime')),
1059 1059 (
1060 1060 b'',
1061 1061 b'docket',
1062 1062 False,
1063 1063 _(b'display the docket (metadata file) instead'),
1064 1064 ),
1065 1065 (
1066 1066 b'',
1067 1067 b'all',
1068 1068 False,
1069 1069 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1070 1070 ),
1071 1071 ],
1072 1072 _(b'[OPTION]...'),
1073 1073 )
1074 1074 def debugstate(ui, repo, **opts):
1075 1075 """show the contents of the current dirstate"""
1076 1076
1077 1077 if opts.get("docket"):
1078 1078 if not repo.dirstate._use_dirstate_v2:
1079 1079 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1080 1080
1081 1081 docket = repo.dirstate._map.docket
1082 1082 (
1083 1083 start_offset,
1084 1084 root_nodes,
1085 1085 nodes_with_entry,
1086 1086 nodes_with_copy,
1087 1087 unused_bytes,
1088 1088 _unused,
1089 1089 ignore_pattern,
1090 1090 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1091 1091
1092 1092 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1093 1093 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1094 1094 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1095 1095 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1096 1096 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1097 1097 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1098 1098 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1099 1099 ui.write(
1100 1100 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1101 1101 )
1102 1102 return
1103 1103
1104 1104 nodates = not opts['dates']
1105 1105 if opts.get('nodates') is not None:
1106 1106 nodates = True
1107 1107 datesort = opts.get('datesort')
1108 1108
1109 1109 if datesort:
1110 1110
1111 1111 def keyfunc(entry):
1112 1112 filename, _state, _mode, _size, mtime = entry
1113 1113 return (mtime, filename)
1114 1114
1115 1115 else:
1116 1116 keyfunc = None # sort by filename
1117 1117 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1118 1118 entries.sort(key=keyfunc)
1119 1119 for entry in entries:
1120 1120 filename, state, mode, size, mtime = entry
1121 1121 if mtime == -1:
1122 1122 timestr = b'unset '
1123 1123 elif nodates:
1124 1124 timestr = b'set '
1125 1125 else:
1126 1126 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1127 1127 timestr = encoding.strtolocal(timestr)
1128 1128 if mode & 0o20000:
1129 1129 mode = b'lnk'
1130 1130 else:
1131 1131 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1132 1132 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1133 1133 for f in repo.dirstate.copies():
1134 1134 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1135 1135
1136 1136
1137 1137 @command(
1138 1138 b'debugdirstateignorepatternshash',
1139 1139 [],
1140 1140 _(b''),
1141 1141 )
1142 1142 def debugdirstateignorepatternshash(ui, repo, **opts):
1143 1143 """show the hash of ignore patterns stored in dirstate if v2,
1144 1144 or nothing for dirstate-v2
1145 1145 """
1146 1146 if repo.dirstate._use_dirstate_v2:
1147 1147 docket = repo.dirstate._map.docket
1148 1148 hash_len = 20 # 160 bits for SHA-1
1149 1149 hash_bytes = docket.tree_metadata[-hash_len:]
1150 1150 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1151 1151
1152 1152
1153 1153 @command(
1154 1154 b'debugdiscovery',
1155 1155 [
1156 1156 (b'', b'old', None, _(b'use old-style discovery')),
1157 1157 (
1158 1158 b'',
1159 1159 b'nonheads',
1160 1160 None,
1161 1161 _(b'use old-style discovery with non-heads included'),
1162 1162 ),
1163 1163 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1164 1164 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1165 1165 (
1166 1166 b'',
1167 1167 b'local-as-revs',
1168 1168 b"",
1169 1169 b'treat local has having these revisions only',
1170 1170 ),
1171 1171 (
1172 1172 b'',
1173 1173 b'remote-as-revs',
1174 1174 b"",
1175 1175 b'use local as remote, with only these revisions',
1176 1176 ),
1177 1177 ]
1178 1178 + cmdutil.remoteopts
1179 1179 + cmdutil.formatteropts,
1180 1180 _(b'[--rev REV] [OTHER]'),
1181 1181 )
1182 1182 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1183 1183 """runs the changeset discovery protocol in isolation
1184 1184
1185 1185 The local peer can be "replaced" by a subset of the local repository by
1186 1186 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1187 1187 can be "replaced" by a subset of the local repository using the
1188 1188 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1189 1189 discovery situations.
1190 1190
1191 1191 The following developer oriented config are relevant for people playing with this command:
1192 1192
1193 1193 * devel.discovery.exchange-heads=True
1194 1194
1195 1195 If False, the discovery will not start with
1196 1196 remote head fetching and local head querying.
1197 1197
1198 1198 * devel.discovery.grow-sample=True
1199 1199
1200 1200 If False, the sample size used in set discovery will not be increased
1201 1201 through the process
1202 1202
1203 1203 * devel.discovery.grow-sample.dynamic=True
1204 1204
1205 1205 When discovery.grow-sample.dynamic is True, the default, the sample size is
1206 1206 adapted to the shape of the undecided set (it is set to the max of:
1207 1207 <target-size>, len(roots(undecided)), len(heads(undecided)
1208 1208
1209 1209 * devel.discovery.grow-sample.rate=1.05
1210 1210
1211 1211 the rate at which the sample grow
1212 1212
1213 1213 * devel.discovery.randomize=True
1214 1214
1215 1215 If andom sampling during discovery are deterministic. It is meant for
1216 1216 integration tests.
1217 1217
1218 1218 * devel.discovery.sample-size=200
1219 1219
1220 1220 Control the initial size of the discovery sample
1221 1221
1222 1222 * devel.discovery.sample-size.initial=100
1223 1223
1224 1224 Control the initial size of the discovery for initial change
1225 1225 """
1226 1226 opts = pycompat.byteskwargs(opts)
1227 1227 unfi = repo.unfiltered()
1228 1228
1229 1229 # setup potential extra filtering
1230 1230 local_revs = opts[b"local_as_revs"]
1231 1231 remote_revs = opts[b"remote_as_revs"]
1232 1232
1233 1233 # make sure tests are repeatable
1234 1234 random.seed(int(opts[b'seed']))
1235 1235
1236 1236 if not remote_revs:
1237 1237 path = urlutil.get_unique_pull_path_obj(
1238 1238 b'debugdiscovery', ui, remoteurl
1239 1239 )
1240 1240 branches = (path.branch, [])
1241 1241 remote = hg.peer(repo, opts, path)
1242 1242 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1243 1243 else:
1244 1244 branches = (None, [])
1245 1245 remote_filtered_revs = logcmdutil.revrange(
1246 1246 unfi, [b"not (::(%s))" % remote_revs]
1247 1247 )
1248 1248 remote_filtered_revs = frozenset(remote_filtered_revs)
1249 1249
1250 1250 def remote_func(x):
1251 1251 return remote_filtered_revs
1252 1252
1253 1253 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1254 1254
1255 1255 remote = repo.peer()
1256 1256 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1257 1257
1258 1258 if local_revs:
1259 1259 local_filtered_revs = logcmdutil.revrange(
1260 1260 unfi, [b"not (::(%s))" % local_revs]
1261 1261 )
1262 1262 local_filtered_revs = frozenset(local_filtered_revs)
1263 1263
1264 1264 def local_func(x):
1265 1265 return local_filtered_revs
1266 1266
1267 1267 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1268 1268 repo = repo.filtered(b'debug-discovery-local-filter')
1269 1269
1270 1270 data = {}
1271 1271 if opts.get(b'old'):
1272 1272
1273 1273 def doit(pushedrevs, remoteheads, remote=remote):
1274 1274 if not util.safehasattr(remote, b'branches'):
1275 1275 # enable in-client legacy support
1276 1276 remote = localrepo.locallegacypeer(remote.local())
1277 1277 if remote_revs:
1278 1278 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1279 1279 remote._repo = r
1280 1280 common, _in, hds = treediscovery.findcommonincoming(
1281 1281 repo, remote, force=True, audit=data
1282 1282 )
1283 1283 common = set(common)
1284 1284 if not opts.get(b'nonheads'):
1285 1285 ui.writenoi18n(
1286 1286 b"unpruned common: %s\n"
1287 1287 % b" ".join(sorted(short(n) for n in common))
1288 1288 )
1289 1289
1290 1290 clnode = repo.changelog.node
1291 1291 common = repo.revs(b'heads(::%ln)', common)
1292 1292 common = {clnode(r) for r in common}
1293 1293 return common, hds
1294 1294
1295 1295 else:
1296 1296
1297 1297 def doit(pushedrevs, remoteheads, remote=remote):
1298 1298 nodes = None
1299 1299 if pushedrevs:
1300 1300 revs = logcmdutil.revrange(repo, pushedrevs)
1301 1301 nodes = [repo[r].node() for r in revs]
1302 1302 common, any, hds = setdiscovery.findcommonheads(
1303 1303 ui,
1304 1304 repo,
1305 1305 remote,
1306 1306 ancestorsof=nodes,
1307 1307 audit=data,
1308 1308 abortwhenunrelated=False,
1309 1309 )
1310 1310 return common, hds
1311 1311
1312 1312 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1313 1313 localrevs = opts[b'rev']
1314 1314
1315 1315 fm = ui.formatter(b'debugdiscovery', opts)
1316 1316 if fm.strict_format:
1317 1317
1318 1318 @contextlib.contextmanager
1319 1319 def may_capture_output():
1320 1320 ui.pushbuffer()
1321 1321 yield
1322 1322 data[b'output'] = ui.popbuffer()
1323 1323
1324 1324 else:
1325 1325 may_capture_output = util.nullcontextmanager
1326 1326 with may_capture_output():
1327 1327 with util.timedcm('debug-discovery') as t:
1328 1328 common, hds = doit(localrevs, remoterevs)
1329 1329
1330 1330 # compute all statistics
1331 1331 if len(common) == 1 and repo.nullid in common:
1332 1332 common = set()
1333 1333 heads_common = set(common)
1334 1334 heads_remote = set(hds)
1335 1335 heads_local = set(repo.heads())
1336 1336 # note: they cannot be a local or remote head that is in common and not
1337 1337 # itself a head of common.
1338 1338 heads_common_local = heads_common & heads_local
1339 1339 heads_common_remote = heads_common & heads_remote
1340 1340 heads_common_both = heads_common & heads_remote & heads_local
1341 1341
1342 1342 all = repo.revs(b'all()')
1343 1343 common = repo.revs(b'::%ln', common)
1344 1344 roots_common = repo.revs(b'roots(::%ld)', common)
1345 1345 missing = repo.revs(b'not ::%ld', common)
1346 1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1347 1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1348 1348 assert len(common) + len(missing) == len(all)
1349 1349
1350 1350 initial_undecided = repo.revs(
1351 1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1352 1352 )
1353 1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1354 1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1355 1355 common_initial_undecided = initial_undecided & common
1356 1356 missing_initial_undecided = initial_undecided & missing
1357 1357
1358 1358 data[b'elapsed'] = t.elapsed
1359 1359 data[b'nb-common-heads'] = len(heads_common)
1360 1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1361 1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1362 1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1363 1363 data[b'nb-common-roots'] = len(roots_common)
1364 1364 data[b'nb-head-local'] = len(heads_local)
1365 1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1366 1366 data[b'nb-head-remote'] = len(heads_remote)
1367 1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1368 1368 heads_common_remote
1369 1369 )
1370 1370 data[b'nb-revs'] = len(all)
1371 1371 data[b'nb-revs-common'] = len(common)
1372 1372 data[b'nb-revs-missing'] = len(missing)
1373 1373 data[b'nb-missing-heads'] = len(heads_missing)
1374 1374 data[b'nb-missing-roots'] = len(roots_missing)
1375 1375 data[b'nb-ini_und'] = len(initial_undecided)
1376 1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1377 1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1378 1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1379 1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1380 1380
1381 1381 fm.startitem()
1382 1382 fm.data(**pycompat.strkwargs(data))
1383 1383 # display discovery summary
1384 1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1385 1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1386 1386 if b'total-round-trips-heads' in data:
1387 1387 fm.plain(
1388 1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1389 1389 )
1390 1390 if b'total-round-trips-branches' in data:
1391 1391 fm.plain(
1392 1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1393 1393 % data
1394 1394 )
1395 1395 if b'total-round-trips-between' in data:
1396 1396 fm.plain(
1397 1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1398 1398 )
1399 1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1400 1400 if b'total-queries-branches' in data:
1401 1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1402 1402 if b'total-queries-between' in data:
1403 1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1404 1404 fm.plain(b"heads summary:\n")
1405 1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1406 1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1407 1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1408 1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1409 1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1410 1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1411 1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1412 1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1413 1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1414 1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1415 1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1416 1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1417 1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1418 1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1419 1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1420 1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1421 1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1422 1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1423 1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1424 1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1425 1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1426 1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1427 1427
1428 1428 if ui.verbose:
1429 1429 fm.plain(
1430 1430 b"common heads: %s\n"
1431 1431 % b" ".join(sorted(short(n) for n in heads_common))
1432 1432 )
1433 1433 fm.end()
1434 1434
1435 1435
1436 1436 _chunksize = 4 << 10
1437 1437
1438 1438
1439 1439 @command(
1440 1440 b'debugdownload',
1441 1441 [
1442 1442 (b'o', b'output', b'', _(b'path')),
1443 1443 ],
1444 1444 optionalrepo=True,
1445 1445 )
1446 1446 def debugdownload(ui, repo, url, output=None, **opts):
1447 1447 """download a resource using Mercurial logic and config"""
1448 1448 fh = urlmod.open(ui, url, output)
1449 1449
1450 1450 dest = ui
1451 1451 if output:
1452 1452 dest = open(output, b"wb", _chunksize)
1453 1453 try:
1454 1454 data = fh.read(_chunksize)
1455 1455 while data:
1456 1456 dest.write(data)
1457 1457 data = fh.read(_chunksize)
1458 1458 finally:
1459 1459 if output:
1460 1460 dest.close()
1461 1461
1462 1462
1463 1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1464 1464 def debugextensions(ui, repo, **opts):
1465 1465 '''show information about active extensions'''
1466 1466 opts = pycompat.byteskwargs(opts)
1467 1467 exts = extensions.extensions(ui)
1468 1468 hgver = util.version()
1469 1469 fm = ui.formatter(b'debugextensions', opts)
1470 1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1471 1471 isinternal = extensions.ismoduleinternal(extmod)
1472 1472 extsource = None
1473 1473
1474 1474 if util.safehasattr(extmod, '__file__'):
1475 1475 extsource = pycompat.fsencode(extmod.__file__)
1476 1476 elif getattr(sys, 'oxidized', False):
1477 1477 extsource = pycompat.sysexecutable
1478 1478 if isinternal:
1479 1479 exttestedwith = [] # never expose magic string to users
1480 1480 else:
1481 1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1482 1482 extbuglink = getattr(extmod, 'buglink', None)
1483 1483
1484 1484 fm.startitem()
1485 1485
1486 1486 if ui.quiet or ui.verbose:
1487 1487 fm.write(b'name', b'%s\n', extname)
1488 1488 else:
1489 1489 fm.write(b'name', b'%s', extname)
1490 1490 if isinternal or hgver in exttestedwith:
1491 1491 fm.plain(b'\n')
1492 1492 elif not exttestedwith:
1493 1493 fm.plain(_(b' (untested!)\n'))
1494 1494 else:
1495 1495 lasttestedversion = exttestedwith[-1]
1496 1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1497 1497
1498 1498 fm.condwrite(
1499 1499 ui.verbose and extsource,
1500 1500 b'source',
1501 1501 _(b' location: %s\n'),
1502 1502 extsource or b"",
1503 1503 )
1504 1504
1505 1505 if ui.verbose:
1506 1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1507 1507 fm.data(bundled=isinternal)
1508 1508
1509 1509 fm.condwrite(
1510 1510 ui.verbose and exttestedwith,
1511 1511 b'testedwith',
1512 1512 _(b' tested with: %s\n'),
1513 1513 fm.formatlist(exttestedwith, name=b'ver'),
1514 1514 )
1515 1515
1516 1516 fm.condwrite(
1517 1517 ui.verbose and extbuglink,
1518 1518 b'buglink',
1519 1519 _(b' bug reporting: %s\n'),
1520 1520 extbuglink or b"",
1521 1521 )
1522 1522
1523 1523 fm.end()
1524 1524
1525 1525
1526 1526 @command(
1527 1527 b'debugfileset',
1528 1528 [
1529 1529 (
1530 1530 b'r',
1531 1531 b'rev',
1532 1532 b'',
1533 1533 _(b'apply the filespec on this revision'),
1534 1534 _(b'REV'),
1535 1535 ),
1536 1536 (
1537 1537 b'',
1538 1538 b'all-files',
1539 1539 False,
1540 1540 _(b'test files from all revisions and working directory'),
1541 1541 ),
1542 1542 (
1543 1543 b's',
1544 1544 b'show-matcher',
1545 1545 None,
1546 1546 _(b'print internal representation of matcher'),
1547 1547 ),
1548 1548 (
1549 1549 b'p',
1550 1550 b'show-stage',
1551 1551 [],
1552 1552 _(b'print parsed tree at the given stage'),
1553 1553 _(b'NAME'),
1554 1554 ),
1555 1555 ],
1556 1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1557 1557 )
1558 1558 def debugfileset(ui, repo, expr, **opts):
1559 1559 '''parse and apply a fileset specification'''
1560 1560 from . import fileset
1561 1561
1562 1562 fileset.symbols # force import of fileset so we have predicates to optimize
1563 1563 opts = pycompat.byteskwargs(opts)
1564 1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1565 1565
1566 1566 stages = [
1567 1567 (b'parsed', pycompat.identity),
1568 1568 (b'analyzed', filesetlang.analyze),
1569 1569 (b'optimized', filesetlang.optimize),
1570 1570 ]
1571 1571 stagenames = {n for n, f in stages}
1572 1572
1573 1573 showalways = set()
1574 1574 if ui.verbose and not opts[b'show_stage']:
1575 1575 # show parsed tree by --verbose (deprecated)
1576 1576 showalways.add(b'parsed')
1577 1577 if opts[b'show_stage'] == [b'all']:
1578 1578 showalways.update(stagenames)
1579 1579 else:
1580 1580 for n in opts[b'show_stage']:
1581 1581 if n not in stagenames:
1582 1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1583 1583 showalways.update(opts[b'show_stage'])
1584 1584
1585 1585 tree = filesetlang.parse(expr)
1586 1586 for n, f in stages:
1587 1587 tree = f(tree)
1588 1588 if n in showalways:
1589 1589 if opts[b'show_stage'] or n != b'parsed':
1590 1590 ui.write(b"* %s:\n" % n)
1591 1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1592 1592
1593 1593 files = set()
1594 1594 if opts[b'all_files']:
1595 1595 for r in repo:
1596 1596 c = repo[r]
1597 1597 files.update(c.files())
1598 1598 files.update(c.substate)
1599 1599 if opts[b'all_files'] or ctx.rev() is None:
1600 1600 wctx = repo[None]
1601 1601 files.update(
1602 1602 repo.dirstate.walk(
1603 1603 scmutil.matchall(repo),
1604 1604 subrepos=list(wctx.substate),
1605 1605 unknown=True,
1606 1606 ignored=True,
1607 1607 )
1608 1608 )
1609 1609 files.update(wctx.substate)
1610 1610 else:
1611 1611 files.update(ctx.files())
1612 1612 files.update(ctx.substate)
1613 1613
1614 1614 m = ctx.matchfileset(repo.getcwd(), expr)
1615 1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1616 1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1617 1617 for f in sorted(files):
1618 1618 if not m(f):
1619 1619 continue
1620 1620 ui.write(b"%s\n" % f)
1621 1621
1622 1622
1623 1623 @command(
1624 1624 b"debug-repair-issue6528",
1625 1625 [
1626 1626 (
1627 1627 b'',
1628 1628 b'to-report',
1629 1629 b'',
1630 1630 _(b'build a report of affected revisions to this file'),
1631 1631 _(b'FILE'),
1632 1632 ),
1633 1633 (
1634 1634 b'',
1635 1635 b'from-report',
1636 1636 b'',
1637 1637 _(b'repair revisions listed in this report file'),
1638 1638 _(b'FILE'),
1639 1639 ),
1640 1640 (
1641 1641 b'',
1642 1642 b'paranoid',
1643 1643 False,
1644 1644 _(b'check that both detection methods do the same thing'),
1645 1645 ),
1646 1646 ]
1647 1647 + cmdutil.dryrunopts,
1648 1648 )
1649 1649 def debug_repair_issue6528(ui, repo, **opts):
1650 1650 """find affected revisions and repair them. See issue6528 for more details.
1651 1651
1652 1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1653 1653 computation of affected revisions for a given repository across clones.
1654 1654 The report format is line-based (with empty lines ignored):
1655 1655
1656 1656 ```
1657 1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1658 1658 ```
1659 1659
1660 1660 There can be multiple broken revisions per filelog, they are separated by
1661 1661 a comma with no spaces. The only space is between the revision(s) and the
1662 1662 filename.
1663 1663
1664 1664 Note that this does *not* mean that this repairs future affected revisions,
1665 1665 that needs a separate fix at the exchange level that was introduced in
1666 1666 Mercurial 5.9.1.
1667 1667
1668 1668 There is a `--paranoid` flag to test that the fast implementation is correct
1669 1669 by checking it against the slow implementation. Since this matter is quite
1670 1670 urgent and testing every edge-case is probably quite costly, we use this
1671 1671 method to test on large repositories as a fuzzing method of sorts.
1672 1672 """
1673 1673 cmdutil.check_incompatible_arguments(
1674 1674 opts, 'to_report', ['from_report', 'dry_run']
1675 1675 )
1676 1676 dry_run = opts.get('dry_run')
1677 1677 to_report = opts.get('to_report')
1678 1678 from_report = opts.get('from_report')
1679 1679 paranoid = opts.get('paranoid')
1680 1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1681 1681 # narrow down the search for users that know what they're looking for?
1682 1682
1683 1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1684 1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1685 1685 raise error.Abort(_(msg))
1686 1686
1687 1687 rewrite.repair_issue6528(
1688 1688 ui,
1689 1689 repo,
1690 1690 dry_run=dry_run,
1691 1691 to_report=to_report,
1692 1692 from_report=from_report,
1693 1693 paranoid=paranoid,
1694 1694 )
1695 1695
1696 1696
1697 1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1698 1698 def debugformat(ui, repo, **opts):
1699 1699 """display format information about the current repository
1700 1700
1701 1701 Use --verbose to get extra information about current config value and
1702 1702 Mercurial default."""
1703 1703 opts = pycompat.byteskwargs(opts)
1704 1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1705 1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1706 1706
1707 1707 def makeformatname(name):
1708 1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1709 1709
1710 1710 fm = ui.formatter(b'debugformat', opts)
1711 1711 if fm.isplain():
1712 1712
1713 1713 def formatvalue(value):
1714 1714 if util.safehasattr(value, b'startswith'):
1715 1715 return value
1716 1716 if value:
1717 1717 return b'yes'
1718 1718 else:
1719 1719 return b'no'
1720 1720
1721 1721 else:
1722 1722 formatvalue = pycompat.identity
1723 1723
1724 1724 fm.plain(b'format-variant')
1725 1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1726 1726 fm.plain(b' repo')
1727 1727 if ui.verbose:
1728 1728 fm.plain(b' config default')
1729 1729 fm.plain(b'\n')
1730 1730 for fv in upgrade.allformatvariant:
1731 1731 fm.startitem()
1732 1732 repovalue = fv.fromrepo(repo)
1733 1733 configvalue = fv.fromconfig(repo)
1734 1734
1735 1735 if repovalue != configvalue:
1736 1736 namelabel = b'formatvariant.name.mismatchconfig'
1737 1737 repolabel = b'formatvariant.repo.mismatchconfig'
1738 1738 elif repovalue != fv.default:
1739 1739 namelabel = b'formatvariant.name.mismatchdefault'
1740 1740 repolabel = b'formatvariant.repo.mismatchdefault'
1741 1741 else:
1742 1742 namelabel = b'formatvariant.name.uptodate'
1743 1743 repolabel = b'formatvariant.repo.uptodate'
1744 1744
1745 1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1746 1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1747 1747 if fv.default != configvalue:
1748 1748 configlabel = b'formatvariant.config.special'
1749 1749 else:
1750 1750 configlabel = b'formatvariant.config.default'
1751 1751 fm.condwrite(
1752 1752 ui.verbose,
1753 1753 b'config',
1754 1754 b' %6s',
1755 1755 formatvalue(configvalue),
1756 1756 label=configlabel,
1757 1757 )
1758 1758 fm.condwrite(
1759 1759 ui.verbose,
1760 1760 b'default',
1761 1761 b' %7s',
1762 1762 formatvalue(fv.default),
1763 1763 label=b'formatvariant.default',
1764 1764 )
1765 1765 fm.plain(b'\n')
1766 1766 fm.end()
1767 1767
1768 1768
1769 1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1770 1770 def debugfsinfo(ui, path=b"."):
1771 1771 """show information detected about current filesystem"""
1772 1772 ui.writenoi18n(b'path: %s\n' % path)
1773 1773 ui.writenoi18n(
1774 1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1775 1775 )
1776 1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1777 1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1778 1778 ui.writenoi18n(
1779 1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1780 1780 )
1781 1781 ui.writenoi18n(
1782 1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1783 1783 )
1784 1784 casesensitive = b'(unknown)'
1785 1785 try:
1786 1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1787 1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1788 1788 except OSError:
1789 1789 pass
1790 1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1791 1791
1792 1792
1793 1793 @command(
1794 1794 b'debuggetbundle',
1795 1795 [
1796 1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1797 1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1798 1798 (
1799 1799 b't',
1800 1800 b'type',
1801 1801 b'bzip2',
1802 1802 _(b'bundle compression type to use'),
1803 1803 _(b'TYPE'),
1804 1804 ),
1805 1805 ],
1806 1806 _(b'REPO FILE [-H|-C ID]...'),
1807 1807 norepo=True,
1808 1808 )
1809 1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1810 1810 """retrieves a bundle from a repo
1811 1811
1812 1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1813 1813 given file.
1814 1814 """
1815 1815 opts = pycompat.byteskwargs(opts)
1816 1816 repo = hg.peer(ui, opts, repopath)
1817 1817 if not repo.capable(b'getbundle'):
1818 1818 raise error.Abort(b"getbundle() not supported by target repository")
1819 1819 args = {}
1820 1820 if common:
1821 1821 args['common'] = [bin(s) for s in common]
1822 1822 if head:
1823 1823 args['heads'] = [bin(s) for s in head]
1824 1824 # TODO: get desired bundlecaps from command line.
1825 1825 args['bundlecaps'] = None
1826 1826 bundle = repo.getbundle(b'debug', **args)
1827 1827
1828 1828 bundletype = opts.get(b'type', b'bzip2').lower()
1829 1829 btypes = {
1830 1830 b'none': b'HG10UN',
1831 1831 b'bzip2': b'HG10BZ',
1832 1832 b'gzip': b'HG10GZ',
1833 1833 b'bundle2': b'HG20',
1834 1834 }
1835 1835 bundletype = btypes.get(bundletype)
1836 1836 if bundletype not in bundle2.bundletypes:
1837 1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1838 1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1839 1839
1840 1840
1841 1841 @command(b'debugignore', [], b'[FILE]')
1842 1842 def debugignore(ui, repo, *files, **opts):
1843 1843 """display the combined ignore pattern and information about ignored files
1844 1844
1845 1845 With no argument display the combined ignore pattern.
1846 1846
1847 1847 Given space separated file names, shows if the given file is ignored and
1848 1848 if so, show the ignore rule (file and line number) that matched it.
1849 1849 """
1850 1850 ignore = repo.dirstate._ignore
1851 1851 if not files:
1852 1852 # Show all the patterns
1853 1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1854 1854 else:
1855 1855 m = scmutil.match(repo[None], pats=files)
1856 1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1857 1857 for f in m.files():
1858 1858 nf = util.normpath(f)
1859 1859 ignored = None
1860 1860 ignoredata = None
1861 1861 if nf != b'.':
1862 1862 if ignore(nf):
1863 1863 ignored = nf
1864 1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1865 1865 else:
1866 1866 for p in pathutil.finddirs(nf):
1867 1867 if ignore(p):
1868 1868 ignored = p
1869 1869 ignoredata = repo.dirstate._ignorefileandline(p)
1870 1870 break
1871 1871 if ignored:
1872 1872 if ignored == nf:
1873 1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1874 1874 else:
1875 1875 ui.write(
1876 1876 _(
1877 1877 b"%s is ignored because of "
1878 1878 b"containing directory %s\n"
1879 1879 )
1880 1880 % (uipathfn(f), ignored)
1881 1881 )
1882 1882 ignorefile, lineno, line = ignoredata
1883 1883 ui.write(
1884 1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1885 1885 % (ignorefile, lineno, line)
1886 1886 )
1887 1887 else:
1888 1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1889 1889
1890 1890
1891 1891 @command(
1892 1892 b'debug-revlog-index|debugindex',
1893 1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1894 1894 _(b'-c|-m|FILE'),
1895 1895 )
1896 1896 def debugindex(ui, repo, file_=None, **opts):
1897 1897 """dump index data for a revlog"""
1898 1898 opts = pycompat.byteskwargs(opts)
1899 1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1900 1900
1901 1901 fm = ui.formatter(b'debugindex', opts)
1902 1902
1903 1903 revlog = getattr(store, b'_revlog', store)
1904 1904
1905 1905 return revlog_debug.debug_index(
1906 1906 ui,
1907 1907 repo,
1908 1908 formatter=fm,
1909 1909 revlog=revlog,
1910 1910 full_node=ui.debugflag,
1911 1911 )
1912 1912
1913 1913
1914 1914 @command(
1915 1915 b'debugindexdot',
1916 1916 cmdutil.debugrevlogopts,
1917 1917 _(b'-c|-m|FILE'),
1918 1918 optionalrepo=True,
1919 1919 )
1920 1920 def debugindexdot(ui, repo, file_=None, **opts):
1921 1921 """dump an index DAG as a graphviz dot file"""
1922 1922 opts = pycompat.byteskwargs(opts)
1923 1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1924 1924 ui.writenoi18n(b"digraph G {\n")
1925 1925 for i in r:
1926 1926 node = r.node(i)
1927 1927 pp = r.parents(node)
1928 1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1929 1929 if pp[1] != repo.nullid:
1930 1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1931 1931 ui.write(b"}\n")
1932 1932
1933 1933
1934 1934 @command(b'debugindexstats', [])
1935 1935 def debugindexstats(ui, repo):
1936 1936 """show stats related to the changelog index"""
1937 1937 repo.changelog.shortest(repo.nullid, 1)
1938 1938 index = repo.changelog.index
1939 1939 if not util.safehasattr(index, b'stats'):
1940 1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1941 1941 for k, v in sorted(index.stats().items()):
1942 1942 ui.write(b'%s: %d\n' % (k, v))
1943 1943
1944 1944
1945 1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1946 1946 def debuginstall(ui, **opts):
1947 1947 """test Mercurial installation
1948 1948
1949 1949 Returns 0 on success.
1950 1950 """
1951 1951 opts = pycompat.byteskwargs(opts)
1952 1952
1953 1953 problems = 0
1954 1954
1955 1955 fm = ui.formatter(b'debuginstall', opts)
1956 1956 fm.startitem()
1957 1957
1958 1958 # encoding might be unknown or wrong. don't translate these messages.
1959 1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1960 1960 err = None
1961 1961 try:
1962 1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1963 1963 except LookupError as inst:
1964 1964 err = stringutil.forcebytestr(inst)
1965 1965 problems += 1
1966 1966 fm.condwrite(
1967 1967 err,
1968 1968 b'encodingerror',
1969 1969 b" %s\n (check that your locale is properly set)\n",
1970 1970 err,
1971 1971 )
1972 1972
1973 1973 # Python
1974 1974 pythonlib = None
1975 1975 if util.safehasattr(os, '__file__'):
1976 1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1977 1977 elif getattr(sys, 'oxidized', False):
1978 1978 pythonlib = pycompat.sysexecutable
1979 1979
1980 1980 fm.write(
1981 1981 b'pythonexe',
1982 1982 _(b"checking Python executable (%s)\n"),
1983 1983 pycompat.sysexecutable or _(b"unknown"),
1984 1984 )
1985 1985 fm.write(
1986 1986 b'pythonimplementation',
1987 1987 _(b"checking Python implementation (%s)\n"),
1988 1988 pycompat.sysbytes(platform.python_implementation()),
1989 1989 )
1990 1990 fm.write(
1991 1991 b'pythonver',
1992 1992 _(b"checking Python version (%s)\n"),
1993 1993 (b"%d.%d.%d" % sys.version_info[:3]),
1994 1994 )
1995 1995 fm.write(
1996 1996 b'pythonlib',
1997 1997 _(b"checking Python lib (%s)...\n"),
1998 1998 pythonlib or _(b"unknown"),
1999 1999 )
2000 2000
2001 2001 try:
2002 2002 from . import rustext # pytype: disable=import-error
2003 2003
2004 2004 rustext.__doc__ # trigger lazy import
2005 2005 except ImportError:
2006 2006 rustext = None
2007 2007
2008 2008 security = set(sslutil.supportedprotocols)
2009 2009 if sslutil.hassni:
2010 2010 security.add(b'sni')
2011 2011
2012 2012 fm.write(
2013 2013 b'pythonsecurity',
2014 2014 _(b"checking Python security support (%s)\n"),
2015 2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2016 2016 )
2017 2017
2018 2018 # These are warnings, not errors. So don't increment problem count. This
2019 2019 # may change in the future.
2020 2020 if b'tls1.2' not in security:
2021 2021 fm.plain(
2022 2022 _(
2023 2023 b' TLS 1.2 not supported by Python install; '
2024 2024 b'network connections lack modern security\n'
2025 2025 )
2026 2026 )
2027 2027 if b'sni' not in security:
2028 2028 fm.plain(
2029 2029 _(
2030 2030 b' SNI not supported by Python install; may have '
2031 2031 b'connectivity issues with some servers\n'
2032 2032 )
2033 2033 )
2034 2034
2035 2035 fm.plain(
2036 2036 _(
2037 2037 b"checking Rust extensions (%s)\n"
2038 2038 % (b'missing' if rustext is None else b'installed')
2039 2039 ),
2040 2040 )
2041 2041
2042 2042 # TODO print CA cert info
2043 2043
2044 2044 # hg version
2045 2045 hgver = util.version()
2046 2046 fm.write(
2047 2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2048 2048 )
2049 2049 fm.write(
2050 2050 b'hgverextra',
2051 2051 _(b"checking Mercurial custom build (%s)\n"),
2052 2052 b'+'.join(hgver.split(b'+')[1:]),
2053 2053 )
2054 2054
2055 2055 # compiled modules
2056 2056 hgmodules = None
2057 2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2058 2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2059 2059 elif getattr(sys, 'oxidized', False):
2060 2060 hgmodules = pycompat.sysexecutable
2061 2061
2062 2062 fm.write(
2063 2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2064 2064 )
2065 2065 fm.write(
2066 2066 b'hgmodules',
2067 2067 _(b"checking installed modules (%s)...\n"),
2068 2068 hgmodules or _(b"unknown"),
2069 2069 )
2070 2070
2071 2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2072 2072 rustext = rustandc # for now, that's the only case
2073 2073 cext = policy.policy in (b'c', b'allow') or rustandc
2074 2074 nopure = cext or rustext
2075 2075 if nopure:
2076 2076 err = None
2077 2077 try:
2078 2078 if cext:
2079 2079 from .cext import ( # pytype: disable=import-error
2080 2080 base85,
2081 2081 bdiff,
2082 2082 mpatch,
2083 2083 osutil,
2084 2084 )
2085 2085
2086 2086 # quiet pyflakes
2087 2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2088 2088 if rustext:
2089 2089 from .rustext import ( # pytype: disable=import-error
2090 2090 ancestor,
2091 2091 dirstate,
2092 2092 )
2093 2093
2094 2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2095 2095 except Exception as inst:
2096 2096 err = stringutil.forcebytestr(inst)
2097 2097 problems += 1
2098 2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2099 2099
2100 2100 compengines = util.compengines._engines.values()
2101 2101 fm.write(
2102 2102 b'compengines',
2103 2103 _(b'checking registered compression engines (%s)\n'),
2104 2104 fm.formatlist(
2105 2105 sorted(e.name() for e in compengines),
2106 2106 name=b'compengine',
2107 2107 fmt=b'%s',
2108 2108 sep=b', ',
2109 2109 ),
2110 2110 )
2111 2111 fm.write(
2112 2112 b'compenginesavail',
2113 2113 _(b'checking available compression engines (%s)\n'),
2114 2114 fm.formatlist(
2115 2115 sorted(e.name() for e in compengines if e.available()),
2116 2116 name=b'compengine',
2117 2117 fmt=b'%s',
2118 2118 sep=b', ',
2119 2119 ),
2120 2120 )
2121 2121 wirecompengines = compression.compengines.supportedwireengines(
2122 2122 compression.SERVERROLE
2123 2123 )
2124 2124 fm.write(
2125 2125 b'compenginesserver',
2126 2126 _(
2127 2127 b'checking available compression engines '
2128 2128 b'for wire protocol (%s)\n'
2129 2129 ),
2130 2130 fm.formatlist(
2131 2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2132 2132 name=b'compengine',
2133 2133 fmt=b'%s',
2134 2134 sep=b', ',
2135 2135 ),
2136 2136 )
2137 2137 re2 = b'missing'
2138 2138 if util._re2:
2139 2139 re2 = b'available'
2140 2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2141 2141 fm.data(re2=bool(util._re2))
2142 2142
2143 2143 # templates
2144 2144 p = templater.templatedir()
2145 2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2146 2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2147 2147 if p:
2148 2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2149 2149 if m:
2150 2150 # template found, check if it is working
2151 2151 err = None
2152 2152 try:
2153 2153 templater.templater.frommapfile(m)
2154 2154 except Exception as inst:
2155 2155 err = stringutil.forcebytestr(inst)
2156 2156 p = None
2157 2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2158 2158 else:
2159 2159 p = None
2160 2160 fm.condwrite(
2161 2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2162 2162 )
2163 2163 fm.condwrite(
2164 2164 not m,
2165 2165 b'defaulttemplatenotfound',
2166 2166 _(b" template '%s' not found\n"),
2167 2167 b"default",
2168 2168 )
2169 2169 if not p:
2170 2170 problems += 1
2171 2171 fm.condwrite(
2172 2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2173 2173 )
2174 2174
2175 2175 # editor
2176 2176 editor = ui.geteditor()
2177 2177 editor = util.expandpath(editor)
2178 2178 editorbin = procutil.shellsplit(editor)[0]
2179 2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2180 2180 cmdpath = procutil.findexe(editorbin)
2181 2181 fm.condwrite(
2182 2182 not cmdpath and editor == b'vi',
2183 2183 b'vinotfound',
2184 2184 _(
2185 2185 b" No commit editor set and can't find %s in PATH\n"
2186 2186 b" (specify a commit editor in your configuration"
2187 2187 b" file)\n"
2188 2188 ),
2189 2189 not cmdpath and editor == b'vi' and editorbin,
2190 2190 )
2191 2191 fm.condwrite(
2192 2192 not cmdpath and editor != b'vi',
2193 2193 b'editornotfound',
2194 2194 _(
2195 2195 b" Can't find editor '%s' in PATH\n"
2196 2196 b" (specify a commit editor in your configuration"
2197 2197 b" file)\n"
2198 2198 ),
2199 2199 not cmdpath and editorbin,
2200 2200 )
2201 2201 if not cmdpath and editor != b'vi':
2202 2202 problems += 1
2203 2203
2204 2204 # check username
2205 2205 username = None
2206 2206 err = None
2207 2207 try:
2208 2208 username = ui.username()
2209 2209 except error.Abort as e:
2210 2210 err = e.message
2211 2211 problems += 1
2212 2212
2213 2213 fm.condwrite(
2214 2214 username, b'username', _(b"checking username (%s)\n"), username
2215 2215 )
2216 2216 fm.condwrite(
2217 2217 err,
2218 2218 b'usernameerror',
2219 2219 _(
2220 2220 b"checking username...\n %s\n"
2221 2221 b" (specify a username in your configuration file)\n"
2222 2222 ),
2223 2223 err,
2224 2224 )
2225 2225
2226 2226 for name, mod in extensions.extensions():
2227 2227 handler = getattr(mod, 'debuginstall', None)
2228 2228 if handler is not None:
2229 2229 problems += handler(ui, fm)
2230 2230
2231 2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2232 2232 if not problems:
2233 2233 fm.data(problems=problems)
2234 2234 fm.condwrite(
2235 2235 problems,
2236 2236 b'problems',
2237 2237 _(b"%d problems detected, please check your install!\n"),
2238 2238 problems,
2239 2239 )
2240 2240 fm.end()
2241 2241
2242 2242 return problems
2243 2243
2244 2244
2245 2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2246 2246 def debugknown(ui, repopath, *ids, **opts):
2247 2247 """test whether node ids are known to a repo
2248 2248
2249 2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2250 2250 and 1s indicating unknown/known.
2251 2251 """
2252 2252 opts = pycompat.byteskwargs(opts)
2253 2253 repo = hg.peer(ui, opts, repopath)
2254 2254 if not repo.capable(b'known'):
2255 2255 raise error.Abort(b"known() not supported by target repository")
2256 2256 flags = repo.known([bin(s) for s in ids])
2257 2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2258 2258
2259 2259
2260 2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2261 2261 def debuglabelcomplete(ui, repo, *args):
2262 2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2263 2263 debugnamecomplete(ui, repo, *args)
2264 2264
2265 2265
2266 2266 @command(
2267 2267 b'debuglocks',
2268 2268 [
2269 2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2270 2270 (
2271 2271 b'W',
2272 2272 b'force-free-wlock',
2273 2273 None,
2274 2274 _(b'free the working state lock (DANGEROUS)'),
2275 2275 ),
2276 2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2277 2277 (
2278 2278 b'S',
2279 2279 b'set-wlock',
2280 2280 None,
2281 2281 _(b'set the working state lock until stopped'),
2282 2282 ),
2283 2283 ],
2284 2284 _(b'[OPTION]...'),
2285 2285 )
2286 2286 def debuglocks(ui, repo, **opts):
2287 2287 """show or modify state of locks
2288 2288
2289 2289 By default, this command will show which locks are held. This
2290 2290 includes the user and process holding the lock, the amount of time
2291 2291 the lock has been held, and the machine name where the process is
2292 2292 running if it's not local.
2293 2293
2294 2294 Locks protect the integrity of Mercurial's data, so should be
2295 2295 treated with care. System crashes or other interruptions may cause
2296 2296 locks to not be properly released, though Mercurial will usually
2297 2297 detect and remove such stale locks automatically.
2298 2298
2299 2299 However, detecting stale locks may not always be possible (for
2300 2300 instance, on a shared filesystem). Removing locks may also be
2301 2301 blocked by filesystem permissions.
2302 2302
2303 2303 Setting a lock will prevent other commands from changing the data.
2304 2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2305 2305 The set locks are removed when the command exits.
2306 2306
2307 2307 Returns 0 if no locks are held.
2308 2308
2309 2309 """
2310 2310
2311 2311 if opts.get('force_free_lock'):
2312 2312 repo.svfs.tryunlink(b'lock')
2313 2313 if opts.get('force_free_wlock'):
2314 2314 repo.vfs.tryunlink(b'wlock')
2315 2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2316 2316 return 0
2317 2317
2318 2318 locks = []
2319 2319 try:
2320 2320 if opts.get('set_wlock'):
2321 2321 try:
2322 2322 locks.append(repo.wlock(False))
2323 2323 except error.LockHeld:
2324 2324 raise error.Abort(_(b'wlock is already held'))
2325 2325 if opts.get('set_lock'):
2326 2326 try:
2327 2327 locks.append(repo.lock(False))
2328 2328 except error.LockHeld:
2329 2329 raise error.Abort(_(b'lock is already held'))
2330 2330 if len(locks):
2331 2331 try:
2332 2332 if ui.interactive():
2333 2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2334 2334 ui.promptchoice(prompt)
2335 2335 else:
2336 2336 msg = b"%d locks held, waiting for signal\n"
2337 2337 msg %= len(locks)
2338 2338 ui.status(msg)
2339 2339 while True: # XXX wait for a signal
2340 2340 time.sleep(0.1)
2341 2341 except KeyboardInterrupt:
2342 2342 msg = b"signal-received releasing locks\n"
2343 2343 ui.status(msg)
2344 2344 return 0
2345 2345 finally:
2346 2346 release(*locks)
2347 2347
2348 2348 now = time.time()
2349 2349 held = 0
2350 2350
2351 2351 def report(vfs, name, method):
2352 2352 # this causes stale locks to get reaped for more accurate reporting
2353 2353 try:
2354 2354 l = method(False)
2355 2355 except error.LockHeld:
2356 2356 l = None
2357 2357
2358 2358 if l:
2359 2359 l.release()
2360 2360 else:
2361 2361 try:
2362 2362 st = vfs.lstat(name)
2363 2363 age = now - st[stat.ST_MTIME]
2364 2364 user = util.username(st.st_uid)
2365 2365 locker = vfs.readlock(name)
2366 2366 if b":" in locker:
2367 2367 host, pid = locker.split(b':')
2368 2368 if host == socket.gethostname():
2369 2369 locker = b'user %s, process %s' % (user or b'None', pid)
2370 2370 else:
2371 2371 locker = b'user %s, process %s, host %s' % (
2372 2372 user or b'None',
2373 2373 pid,
2374 2374 host,
2375 2375 )
2376 2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2377 2377 return 1
2378 2378 except FileNotFoundError:
2379 2379 pass
2380 2380
2381 2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2382 2382 return 0
2383 2383
2384 2384 held += report(repo.svfs, b"lock", repo.lock)
2385 2385 held += report(repo.vfs, b"wlock", repo.wlock)
2386 2386
2387 2387 return held
2388 2388
2389 2389
2390 2390 @command(
2391 2391 b'debugmanifestfulltextcache',
2392 2392 [
2393 2393 (b'', b'clear', False, _(b'clear the cache')),
2394 2394 (
2395 2395 b'a',
2396 2396 b'add',
2397 2397 [],
2398 2398 _(b'add the given manifest nodes to the cache'),
2399 2399 _(b'NODE'),
2400 2400 ),
2401 2401 ],
2402 2402 b'',
2403 2403 )
2404 2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2405 2405 """show, clear or amend the contents of the manifest fulltext cache"""
2406 2406
2407 2407 def getcache():
2408 2408 r = repo.manifestlog.getstorage(b'')
2409 2409 try:
2410 2410 return r._fulltextcache
2411 2411 except AttributeError:
2412 2412 msg = _(
2413 2413 b"Current revlog implementation doesn't appear to have a "
2414 2414 b"manifest fulltext cache\n"
2415 2415 )
2416 2416 raise error.Abort(msg)
2417 2417
2418 2418 if opts.get('clear'):
2419 2419 with repo.wlock():
2420 2420 cache = getcache()
2421 2421 cache.clear(clear_persisted_data=True)
2422 2422 return
2423 2423
2424 2424 if add:
2425 2425 with repo.wlock():
2426 2426 m = repo.manifestlog
2427 2427 store = m.getstorage(b'')
2428 2428 for n in add:
2429 2429 try:
2430 2430 manifest = m[store.lookup(n)]
2431 2431 except error.LookupError as e:
2432 2432 raise error.Abort(
2433 2433 bytes(e), hint=b"Check your manifest node id"
2434 2434 )
2435 2435 manifest.read() # stores revisision in cache too
2436 2436 return
2437 2437
2438 2438 cache = getcache()
2439 2439 if not len(cache):
2440 2440 ui.write(_(b'cache empty\n'))
2441 2441 else:
2442 2442 ui.write(
2443 2443 _(
2444 2444 b'cache contains %d manifest entries, in order of most to '
2445 2445 b'least recent:\n'
2446 2446 )
2447 2447 % (len(cache),)
2448 2448 )
2449 2449 totalsize = 0
2450 2450 for nodeid in cache:
2451 2451 # Use cache.get to not update the LRU order
2452 2452 data = cache.peek(nodeid)
2453 2453 size = len(data)
2454 2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2455 2455 ui.write(
2456 2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2457 2457 )
2458 2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2459 2459 ui.write(
2460 2460 _(b'total cache data size %s, on-disk %s\n')
2461 2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2462 2462 )
2463 2463
2464 2464
2465 2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2466 2466 def debugmergestate(ui, repo, *args, **opts):
2467 2467 """print merge state
2468 2468
2469 2469 Use --verbose to print out information about whether v1 or v2 merge state
2470 2470 was chosen."""
2471 2471
2472 2472 if ui.verbose:
2473 2473 ms = mergestatemod.mergestate(repo)
2474 2474
2475 2475 # sort so that reasonable information is on top
2476 2476 v1records = ms._readrecordsv1()
2477 2477 v2records = ms._readrecordsv2()
2478 2478
2479 2479 if not v1records and not v2records:
2480 2480 pass
2481 2481 elif not v2records:
2482 2482 ui.writenoi18n(b'no version 2 merge state\n')
2483 2483 elif ms._v1v2match(v1records, v2records):
2484 2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2485 2485 else:
2486 2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2487 2487
2488 2488 opts = pycompat.byteskwargs(opts)
2489 2489 if not opts[b'template']:
2490 2490 opts[b'template'] = (
2491 2491 b'{if(commits, "", "no merge state found\n")}'
2492 2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2493 2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2494 2494 b'{if(local_path, "'
2495 2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2496 2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2497 2497 b' other path: {other_path} (node {other_node})\n'
2498 2498 b'")}'
2499 2499 b'{if(rename_side, "'
2500 2500 b' rename side: {rename_side}\n'
2501 2501 b' renamed path: {renamed_path}\n'
2502 2502 b'")}'
2503 2503 b'{extras % " extra: {key} = {value}\n"}'
2504 2504 b'"}'
2505 2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2506 2506 )
2507 2507
2508 2508 ms = mergestatemod.mergestate.read(repo)
2509 2509
2510 2510 fm = ui.formatter(b'debugmergestate', opts)
2511 2511 fm.startitem()
2512 2512
2513 2513 fm_commits = fm.nested(b'commits')
2514 2514 if ms.active():
2515 2515 for name, node, label_index in (
2516 2516 (b'local', ms.local, 0),
2517 2517 (b'other', ms.other, 1),
2518 2518 ):
2519 2519 fm_commits.startitem()
2520 2520 fm_commits.data(name=name)
2521 2521 fm_commits.data(node=hex(node))
2522 2522 if ms._labels and len(ms._labels) > label_index:
2523 2523 fm_commits.data(label=ms._labels[label_index])
2524 2524 fm_commits.end()
2525 2525
2526 2526 fm_files = fm.nested(b'files')
2527 2527 if ms.active():
2528 2528 for f in ms:
2529 2529 fm_files.startitem()
2530 2530 fm_files.data(path=f)
2531 2531 state = ms._state[f]
2532 2532 fm_files.data(state=state[0])
2533 2533 if state[0] in (
2534 2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2535 2535 mergestatemod.MERGE_RECORD_RESOLVED,
2536 2536 ):
2537 2537 fm_files.data(local_key=state[1])
2538 2538 fm_files.data(local_path=state[2])
2539 2539 fm_files.data(ancestor_path=state[3])
2540 2540 fm_files.data(ancestor_node=state[4])
2541 2541 fm_files.data(other_path=state[5])
2542 2542 fm_files.data(other_node=state[6])
2543 2543 fm_files.data(local_flags=state[7])
2544 2544 elif state[0] in (
2545 2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2546 2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2547 2547 ):
2548 2548 fm_files.data(renamed_path=state[1])
2549 2549 fm_files.data(rename_side=state[2])
2550 2550 fm_extras = fm_files.nested(b'extras')
2551 2551 for k, v in sorted(ms.extras(f).items()):
2552 2552 fm_extras.startitem()
2553 2553 fm_extras.data(key=k)
2554 2554 fm_extras.data(value=v)
2555 2555 fm_extras.end()
2556 2556
2557 2557 fm_files.end()
2558 2558
2559 2559 fm_extras = fm.nested(b'extras')
2560 2560 for f, d in sorted(ms.allextras().items()):
2561 2561 if f in ms:
2562 2562 # If file is in mergestate, we have already processed it's extras
2563 2563 continue
2564 2564 for k, v in d.items():
2565 2565 fm_extras.startitem()
2566 2566 fm_extras.data(file=f)
2567 2567 fm_extras.data(key=k)
2568 2568 fm_extras.data(value=v)
2569 2569 fm_extras.end()
2570 2570
2571 2571 fm.end()
2572 2572
2573 2573
2574 2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2575 2575 def debugnamecomplete(ui, repo, *args):
2576 2576 '''complete "names" - tags, open branch names, bookmark names'''
2577 2577
2578 2578 names = set()
2579 2579 # since we previously only listed open branches, we will handle that
2580 2580 # specially (after this for loop)
2581 2581 for name, ns in repo.names.items():
2582 2582 if name != b'branches':
2583 2583 names.update(ns.listnames(repo))
2584 2584 names.update(
2585 2585 tag
2586 2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2587 2587 if not closed
2588 2588 )
2589 2589 completions = set()
2590 2590 if not args:
2591 2591 args = [b'']
2592 2592 for a in args:
2593 2593 completions.update(n for n in names if n.startswith(a))
2594 2594 ui.write(b'\n'.join(sorted(completions)))
2595 2595 ui.write(b'\n')
2596 2596
2597 2597
2598 2598 @command(
2599 2599 b'debugnodemap',
2600 2600 [
2601 2601 (
2602 2602 b'',
2603 2603 b'dump-new',
2604 2604 False,
2605 2605 _(b'write a (new) persistent binary nodemap on stdout'),
2606 2606 ),
2607 2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2608 2608 (
2609 2609 b'',
2610 2610 b'check',
2611 2611 False,
2612 2612 _(b'check that the data on disk data are correct.'),
2613 2613 ),
2614 2614 (
2615 2615 b'',
2616 2616 b'metadata',
2617 2617 False,
2618 2618 _(b'display the on disk meta data for the nodemap'),
2619 2619 ),
2620 2620 ],
2621 2621 )
2622 2622 def debugnodemap(ui, repo, **opts):
2623 2623 """write and inspect on disk nodemap"""
2624 2624 if opts['dump_new']:
2625 2625 unfi = repo.unfiltered()
2626 2626 cl = unfi.changelog
2627 2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2628 2628 data = cl.index.nodemap_data_all()
2629 2629 else:
2630 2630 data = nodemap.persistent_data(cl.index)
2631 2631 ui.write(data)
2632 2632 elif opts['dump_disk']:
2633 2633 unfi = repo.unfiltered()
2634 2634 cl = unfi.changelog
2635 2635 nm_data = nodemap.persisted_data(cl)
2636 2636 if nm_data is not None:
2637 2637 docket, data = nm_data
2638 2638 ui.write(data[:])
2639 2639 elif opts['check']:
2640 2640 unfi = repo.unfiltered()
2641 2641 cl = unfi.changelog
2642 2642 nm_data = nodemap.persisted_data(cl)
2643 2643 if nm_data is not None:
2644 2644 docket, data = nm_data
2645 2645 return nodemap.check_data(ui, cl.index, data)
2646 2646 elif opts['metadata']:
2647 2647 unfi = repo.unfiltered()
2648 2648 cl = unfi.changelog
2649 2649 nm_data = nodemap.persisted_data(cl)
2650 2650 if nm_data is not None:
2651 2651 docket, data = nm_data
2652 2652 ui.write((b"uid: %s\n") % docket.uid)
2653 2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2654 2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2655 2655 ui.write((b"data-length: %d\n") % docket.data_length)
2656 2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2657 2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2658 2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2659 2659
2660 2660
2661 2661 @command(
2662 2662 b'debugobsolete',
2663 2663 [
2664 2664 (b'', b'flags', 0, _(b'markers flag')),
2665 2665 (
2666 2666 b'',
2667 2667 b'record-parents',
2668 2668 False,
2669 2669 _(b'record parent information for the precursor'),
2670 2670 ),
2671 2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2672 2672 (
2673 2673 b'',
2674 2674 b'exclusive',
2675 2675 False,
2676 2676 _(b'restrict display to markers only relevant to REV'),
2677 2677 ),
2678 2678 (b'', b'index', False, _(b'display index of the marker')),
2679 2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2680 2680 ]
2681 2681 + cmdutil.commitopts2
2682 2682 + cmdutil.formatteropts,
2683 2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2684 2684 )
2685 2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2686 2686 """create arbitrary obsolete marker
2687 2687
2688 2688 With no arguments, displays the list of obsolescence markers."""
2689 2689
2690 2690 opts = pycompat.byteskwargs(opts)
2691 2691
2692 2692 def parsenodeid(s):
2693 2693 try:
2694 2694 # We do not use revsingle/revrange functions here to accept
2695 2695 # arbitrary node identifiers, possibly not present in the
2696 2696 # local repository.
2697 2697 n = bin(s)
2698 2698 if len(n) != repo.nodeconstants.nodelen:
2699 2699 raise ValueError
2700 2700 return n
2701 2701 except ValueError:
2702 2702 raise error.InputError(
2703 2703 b'changeset references must be full hexadecimal '
2704 2704 b'node identifiers'
2705 2705 )
2706 2706
2707 2707 if opts.get(b'delete'):
2708 2708 indices = []
2709 2709 for v in opts.get(b'delete'):
2710 2710 try:
2711 2711 indices.append(int(v))
2712 2712 except ValueError:
2713 2713 raise error.InputError(
2714 2714 _(b'invalid index value: %r') % v,
2715 2715 hint=_(b'use integers for indices'),
2716 2716 )
2717 2717
2718 2718 if repo.currenttransaction():
2719 2719 raise error.Abort(
2720 2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2721 2721 )
2722 2722
2723 2723 with repo.lock():
2724 2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2725 2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2726 2726
2727 2727 return
2728 2728
2729 2729 if precursor is not None:
2730 2730 if opts[b'rev']:
2731 2731 raise error.InputError(
2732 2732 b'cannot select revision when creating marker'
2733 2733 )
2734 2734 metadata = {}
2735 2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2736 2736 succs = tuple(parsenodeid(succ) for succ in successors)
2737 2737 l = repo.lock()
2738 2738 try:
2739 2739 tr = repo.transaction(b'debugobsolete')
2740 2740 try:
2741 2741 date = opts.get(b'date')
2742 2742 if date:
2743 2743 date = dateutil.parsedate(date)
2744 2744 else:
2745 2745 date = None
2746 2746 prec = parsenodeid(precursor)
2747 2747 parents = None
2748 2748 if opts[b'record_parents']:
2749 2749 if prec not in repo.unfiltered():
2750 2750 raise error.Abort(
2751 2751 b'cannot used --record-parents on '
2752 2752 b'unknown changesets'
2753 2753 )
2754 2754 parents = repo.unfiltered()[prec].parents()
2755 2755 parents = tuple(p.node() for p in parents)
2756 2756 repo.obsstore.create(
2757 2757 tr,
2758 2758 prec,
2759 2759 succs,
2760 2760 opts[b'flags'],
2761 2761 parents=parents,
2762 2762 date=date,
2763 2763 metadata=metadata,
2764 2764 ui=ui,
2765 2765 )
2766 2766 tr.close()
2767 2767 except ValueError as exc:
2768 2768 raise error.Abort(
2769 2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2770 2770 )
2771 2771 finally:
2772 2772 tr.release()
2773 2773 finally:
2774 2774 l.release()
2775 2775 else:
2776 2776 if opts[b'rev']:
2777 2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2778 2778 nodes = [repo[r].node() for r in revs]
2779 2779 markers = list(
2780 2780 obsutil.getmarkers(
2781 2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2782 2782 )
2783 2783 )
2784 2784 markers.sort(key=lambda x: x._data)
2785 2785 else:
2786 2786 markers = obsutil.getmarkers(repo)
2787 2787
2788 2788 markerstoiter = markers
2789 2789 isrelevant = lambda m: True
2790 2790 if opts.get(b'rev') and opts.get(b'index'):
2791 2791 markerstoiter = obsutil.getmarkers(repo)
2792 2792 markerset = set(markers)
2793 2793 isrelevant = lambda m: m in markerset
2794 2794
2795 2795 fm = ui.formatter(b'debugobsolete', opts)
2796 2796 for i, m in enumerate(markerstoiter):
2797 2797 if not isrelevant(m):
2798 2798 # marker can be irrelevant when we're iterating over a set
2799 2799 # of markers (markerstoiter) which is bigger than the set
2800 2800 # of markers we want to display (markers)
2801 2801 # this can happen if both --index and --rev options are
2802 2802 # provided and thus we need to iterate over all of the markers
2803 2803 # to get the correct indices, but only display the ones that
2804 2804 # are relevant to --rev value
2805 2805 continue
2806 2806 fm.startitem()
2807 2807 ind = i if opts.get(b'index') else None
2808 2808 cmdutil.showmarker(fm, m, index=ind)
2809 2809 fm.end()
2810 2810
2811 2811
2812 2812 @command(
2813 2813 b'debugp1copies',
2814 2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2815 2815 _(b'[-r REV]'),
2816 2816 )
2817 2817 def debugp1copies(ui, repo, **opts):
2818 2818 """dump copy information compared to p1"""
2819 2819
2820 2820 opts = pycompat.byteskwargs(opts)
2821 2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2822 2822 for dst, src in ctx.p1copies().items():
2823 2823 ui.write(b'%s -> %s\n' % (src, dst))
2824 2824
2825 2825
2826 2826 @command(
2827 2827 b'debugp2copies',
2828 2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2829 2829 _(b'[-r REV]'),
2830 2830 )
2831 2831 def debugp2copies(ui, repo, **opts):
2832 2832 """dump copy information compared to p2"""
2833 2833
2834 2834 opts = pycompat.byteskwargs(opts)
2835 2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2836 2836 for dst, src in ctx.p2copies().items():
2837 2837 ui.write(b'%s -> %s\n' % (src, dst))
2838 2838
2839 2839
2840 2840 @command(
2841 2841 b'debugpathcomplete',
2842 2842 [
2843 2843 (b'f', b'full', None, _(b'complete an entire path')),
2844 2844 (b'n', b'normal', None, _(b'show only normal files')),
2845 2845 (b'a', b'added', None, _(b'show only added files')),
2846 2846 (b'r', b'removed', None, _(b'show only removed files')),
2847 2847 ],
2848 2848 _(b'FILESPEC...'),
2849 2849 )
2850 2850 def debugpathcomplete(ui, repo, *specs, **opts):
2851 2851 """complete part or all of a tracked path
2852 2852
2853 2853 This command supports shells that offer path name completion. It
2854 2854 currently completes only files already known to the dirstate.
2855 2855
2856 2856 Completion extends only to the next path segment unless
2857 2857 --full is specified, in which case entire paths are used."""
2858 2858
2859 2859 def complete(path, acceptable):
2860 2860 dirstate = repo.dirstate
2861 2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2862 2862 rootdir = repo.root + pycompat.ossep
2863 2863 if spec != repo.root and not spec.startswith(rootdir):
2864 2864 return [], []
2865 2865 if os.path.isdir(spec):
2866 2866 spec += b'/'
2867 2867 spec = spec[len(rootdir) :]
2868 2868 fixpaths = pycompat.ossep != b'/'
2869 2869 if fixpaths:
2870 2870 spec = spec.replace(pycompat.ossep, b'/')
2871 2871 speclen = len(spec)
2872 2872 fullpaths = opts['full']
2873 2873 files, dirs = set(), set()
2874 2874 adddir, addfile = dirs.add, files.add
2875 2875 for f, st in dirstate.items():
2876 2876 if f.startswith(spec) and st.state in acceptable:
2877 2877 if fixpaths:
2878 2878 f = f.replace(b'/', pycompat.ossep)
2879 2879 if fullpaths:
2880 2880 addfile(f)
2881 2881 continue
2882 2882 s = f.find(pycompat.ossep, speclen)
2883 2883 if s >= 0:
2884 2884 adddir(f[:s])
2885 2885 else:
2886 2886 addfile(f)
2887 2887 return files, dirs
2888 2888
2889 2889 acceptable = b''
2890 2890 if opts['normal']:
2891 2891 acceptable += b'nm'
2892 2892 if opts['added']:
2893 2893 acceptable += b'a'
2894 2894 if opts['removed']:
2895 2895 acceptable += b'r'
2896 2896 cwd = repo.getcwd()
2897 2897 if not specs:
2898 2898 specs = [b'.']
2899 2899
2900 2900 files, dirs = set(), set()
2901 2901 for spec in specs:
2902 2902 f, d = complete(spec, acceptable or b'nmar')
2903 2903 files.update(f)
2904 2904 dirs.update(d)
2905 2905 files.update(dirs)
2906 2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2907 2907 ui.write(b'\n')
2908 2908
2909 2909
2910 2910 @command(
2911 2911 b'debugpathcopies',
2912 2912 cmdutil.walkopts,
2913 2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2914 2914 inferrepo=True,
2915 2915 )
2916 2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2917 2917 """show copies between two revisions"""
2918 2918 ctx1 = scmutil.revsingle(repo, rev1)
2919 2919 ctx2 = scmutil.revsingle(repo, rev2)
2920 2920 m = scmutil.match(ctx1, pats, opts)
2921 2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2922 2922 ui.write(b'%s -> %s\n' % (src, dst))
2923 2923
2924 2924
2925 2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2926 2926 def debugpeer(ui, path):
2927 2927 """establish a connection to a peer repository"""
2928 2928 # Always enable peer request logging. Requires --debug to display
2929 2929 # though.
2930 2930 overrides = {
2931 2931 (b'devel', b'debug.peer-request'): True,
2932 2932 }
2933 2933
2934 2934 with ui.configoverride(overrides):
2935 2935 peer = hg.peer(ui, {}, path)
2936 2936
2937 2937 try:
2938 2938 local = peer.local() is not None
2939 2939 canpush = peer.canpush()
2940 2940
2941 2941 ui.write(_(b'url: %s\n') % peer.url())
2942 2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2943 2943 ui.write(
2944 2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2945 2945 )
2946 2946 finally:
2947 2947 peer.close()
2948 2948
2949 2949
2950 2950 @command(
2951 2951 b'debugpickmergetool',
2952 2952 [
2953 2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2954 2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2955 2955 ]
2956 2956 + cmdutil.walkopts
2957 2957 + cmdutil.mergetoolopts,
2958 2958 _(b'[PATTERN]...'),
2959 2959 inferrepo=True,
2960 2960 )
2961 2961 def debugpickmergetool(ui, repo, *pats, **opts):
2962 2962 """examine which merge tool is chosen for specified file
2963 2963
2964 2964 As described in :hg:`help merge-tools`, Mercurial examines
2965 2965 configurations below in this order to decide which merge tool is
2966 2966 chosen for specified file.
2967 2967
2968 2968 1. ``--tool`` option
2969 2969 2. ``HGMERGE`` environment variable
2970 2970 3. configurations in ``merge-patterns`` section
2971 2971 4. configuration of ``ui.merge``
2972 2972 5. configurations in ``merge-tools`` section
2973 2973 6. ``hgmerge`` tool (for historical reason only)
2974 2974 7. default tool for fallback (``:merge`` or ``:prompt``)
2975 2975
2976 2976 This command writes out examination result in the style below::
2977 2977
2978 2978 FILE = MERGETOOL
2979 2979
2980 2980 By default, all files known in the first parent context of the
2981 2981 working directory are examined. Use file patterns and/or -I/-X
2982 2982 options to limit target files. -r/--rev is also useful to examine
2983 2983 files in another context without actual updating to it.
2984 2984
2985 2985 With --debug, this command shows warning messages while matching
2986 2986 against ``merge-patterns`` and so on, too. It is recommended to
2987 2987 use this option with explicit file patterns and/or -I/-X options,
2988 2988 because this option increases amount of output per file according
2989 2989 to configurations in hgrc.
2990 2990
2991 2991 With -v/--verbose, this command shows configurations below at
2992 2992 first (only if specified).
2993 2993
2994 2994 - ``--tool`` option
2995 2995 - ``HGMERGE`` environment variable
2996 2996 - configuration of ``ui.merge``
2997 2997
2998 2998 If merge tool is chosen before matching against
2999 2999 ``merge-patterns``, this command can't show any helpful
3000 3000 information, even with --debug. In such case, information above is
3001 3001 useful to know why a merge tool is chosen.
3002 3002 """
3003 3003 opts = pycompat.byteskwargs(opts)
3004 3004 overrides = {}
3005 3005 if opts[b'tool']:
3006 3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3007 3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3008 3008
3009 3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3010 3010 hgmerge = encoding.environ.get(b"HGMERGE")
3011 3011 if hgmerge is not None:
3012 3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3013 3013 uimerge = ui.config(b"ui", b"merge")
3014 3014 if uimerge:
3015 3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3016 3016
3017 3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3018 3018 m = scmutil.match(ctx, pats, opts)
3019 3019 changedelete = opts[b'changedelete']
3020 3020 for path in ctx.walk(m):
3021 3021 fctx = ctx[path]
3022 3022 with ui.silent(
3023 3023 error=True
3024 3024 ) if not ui.debugflag else util.nullcontextmanager():
3025 3025 tool, toolpath = filemerge._picktool(
3026 3026 repo,
3027 3027 ui,
3028 3028 path,
3029 3029 fctx.isbinary(),
3030 3030 b'l' in fctx.flags(),
3031 3031 changedelete,
3032 3032 )
3033 3033 ui.write(b'%s = %s\n' % (path, tool))
3034 3034
3035 3035
3036 3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3037 3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3038 3038 """access the pushkey key/value protocol
3039 3039
3040 3040 With two args, list the keys in the given namespace.
3041 3041
3042 3042 With five args, set a key to new if it currently is set to old.
3043 3043 Reports success or failure.
3044 3044 """
3045 3045
3046 3046 target = hg.peer(ui, {}, repopath)
3047 3047 try:
3048 3048 if keyinfo:
3049 3049 key, old, new = keyinfo
3050 3050 with target.commandexecutor() as e:
3051 3051 r = e.callcommand(
3052 3052 b'pushkey',
3053 3053 {
3054 3054 b'namespace': namespace,
3055 3055 b'key': key,
3056 3056 b'old': old,
3057 3057 b'new': new,
3058 3058 },
3059 3059 ).result()
3060 3060
3061 3061 ui.status(pycompat.bytestr(r) + b'\n')
3062 3062 return not r
3063 3063 else:
3064 3064 for k, v in sorted(target.listkeys(namespace).items()):
3065 3065 ui.write(
3066 3066 b"%s\t%s\n"
3067 3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3068 3068 )
3069 3069 finally:
3070 3070 target.close()
3071 3071
3072 3072
3073 3073 @command(b'debugpvec', [], _(b'A B'))
3074 3074 def debugpvec(ui, repo, a, b=None):
3075 3075 ca = scmutil.revsingle(repo, a)
3076 3076 cb = scmutil.revsingle(repo, b)
3077 3077 pa = pvec.ctxpvec(ca)
3078 3078 pb = pvec.ctxpvec(cb)
3079 3079 if pa == pb:
3080 3080 rel = b"="
3081 3081 elif pa > pb:
3082 3082 rel = b">"
3083 3083 elif pa < pb:
3084 3084 rel = b"<"
3085 3085 elif pa | pb:
3086 3086 rel = b"|"
3087 3087 ui.write(_(b"a: %s\n") % pa)
3088 3088 ui.write(_(b"b: %s\n") % pb)
3089 3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3090 3090 ui.write(
3091 3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3092 3092 % (
3093 3093 abs(pa._depth - pb._depth),
3094 3094 pvec._hamming(pa._vec, pb._vec),
3095 3095 pa.distance(pb),
3096 3096 rel,
3097 3097 )
3098 3098 )
3099 3099
3100 3100
3101 3101 @command(
3102 3102 b'debugrebuilddirstate|debugrebuildstate',
3103 3103 [
3104 3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3105 3105 (
3106 3106 b'',
3107 3107 b'minimal',
3108 3108 None,
3109 3109 _(
3110 3110 b'only rebuild files that are inconsistent with '
3111 3111 b'the working copy parent'
3112 3112 ),
3113 3113 ),
3114 3114 ],
3115 3115 _(b'[-r REV]'),
3116 3116 )
3117 3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3118 3118 """rebuild the dirstate as it would look like for the given revision
3119 3119
3120 3120 If no revision is specified the first current parent will be used.
3121 3121
3122 3122 The dirstate will be set to the files of the given revision.
3123 3123 The actual working directory content or existing dirstate
3124 3124 information such as adds or removes is not considered.
3125 3125
3126 3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3127 3127 tracked but are not in the parent manifest, or that exist in the parent
3128 3128 manifest but are not in the dirstate. It will not change adds, removes, or
3129 3129 modified files that are in the working copy parent.
3130 3130
3131 3131 One use of this command is to make the next :hg:`status` invocation
3132 3132 check the actual file content.
3133 3133 """
3134 3134 ctx = scmutil.revsingle(repo, rev)
3135 3135 with repo.wlock():
3136 3136 dirstate = repo.dirstate
3137 3137 changedfiles = None
3138 3138 # See command doc for what minimal does.
3139 3139 if opts.get('minimal'):
3140 3140 manifestfiles = set(ctx.manifest().keys())
3141 3141 dirstatefiles = set(dirstate)
3142 3142 manifestonly = manifestfiles - dirstatefiles
3143 3143 dsonly = dirstatefiles - manifestfiles
3144 3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3145 3145 changedfiles = manifestonly | dsnotadded
3146 3146
3147 3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3148 3148
3149 3149
3150 3150 @command(
3151 3151 b'debugrebuildfncache',
3152 3152 [
3153 3153 (
3154 3154 b'',
3155 3155 b'only-data',
3156 3156 False,
3157 3157 _(b'only look for wrong .d files (much faster)'),
3158 3158 )
3159 3159 ],
3160 3160 b'',
3161 3161 )
3162 3162 def debugrebuildfncache(ui, repo, **opts):
3163 3163 """rebuild the fncache file"""
3164 3164 opts = pycompat.byteskwargs(opts)
3165 3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3166 3166
3167 3167
3168 3168 @command(
3169 3169 b'debugrename',
3170 3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3171 3171 _(b'[-r REV] [FILE]...'),
3172 3172 )
3173 3173 def debugrename(ui, repo, *pats, **opts):
3174 3174 """dump rename information"""
3175 3175
3176 3176 opts = pycompat.byteskwargs(opts)
3177 3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3178 3178 m = scmutil.match(ctx, pats, opts)
3179 3179 for abs in ctx.walk(m):
3180 3180 fctx = ctx[abs]
3181 3181 o = fctx.filelog().renamed(fctx.filenode())
3182 3182 rel = repo.pathto(abs)
3183 3183 if o:
3184 3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3185 3185 else:
3186 3186 ui.write(_(b"%s not renamed\n") % rel)
3187 3187
3188 3188
3189 3189 @command(b'debugrequires|debugrequirements', [], b'')
3190 3190 def debugrequirements(ui, repo):
3191 3191 """print the current repo requirements"""
3192 3192 for r in sorted(repo.requirements):
3193 3193 ui.write(b"%s\n" % r)
3194 3194
3195 3195
3196 3196 @command(
3197 3197 b'debugrevlog',
3198 3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3199 3199 _(b'-c|-m|FILE'),
3200 3200 optionalrepo=True,
3201 3201 )
3202 3202 def debugrevlog(ui, repo, file_=None, **opts):
3203 3203 """show data and statistics about a revlog"""
3204 3204 opts = pycompat.byteskwargs(opts)
3205 3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3206 3206
3207 3207 if opts.get(b"dump"):
3208 3208 revlog_debug.dump(ui, r)
3209 3209 else:
3210 3210 revlog_debug.debug_revlog(ui, r)
3211 3211 return 0
3212 3212
3213 3213
3214 3214 @command(
3215 3215 b'debugrevlogindex',
3216 3216 cmdutil.debugrevlogopts
3217 3217 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3218 3218 _(b'[-f FORMAT] -c|-m|FILE'),
3219 3219 optionalrepo=True,
3220 3220 )
3221 3221 def debugrevlogindex(ui, repo, file_=None, **opts):
3222 3222 """dump the contents of a revlog index"""
3223 3223 opts = pycompat.byteskwargs(opts)
3224 3224 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3225 3225 format = opts.get(b'format', 0)
3226 3226 if format not in (0, 1):
3227 3227 raise error.Abort(_(b"unknown format %d") % format)
3228 3228
3229 3229 if ui.debugflag:
3230 3230 shortfn = hex
3231 3231 else:
3232 3232 shortfn = short
3233 3233
3234 3234 # There might not be anything in r, so have a sane default
3235 3235 idlen = 12
3236 3236 for i in r:
3237 3237 idlen = len(shortfn(r.node(i)))
3238 3238 break
3239 3239
3240 3240 if format == 0:
3241 3241 if ui.verbose:
3242 3242 ui.writenoi18n(
3243 3243 b" rev offset length linkrev %s %s p2\n"
3244 3244 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3245 3245 )
3246 3246 else:
3247 3247 ui.writenoi18n(
3248 3248 b" rev linkrev %s %s p2\n"
3249 3249 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3250 3250 )
3251 3251 elif format == 1:
3252 3252 if ui.verbose:
3253 3253 ui.writenoi18n(
3254 3254 (
3255 3255 b" rev flag offset length size link p1"
3256 3256 b" p2 %s\n"
3257 3257 )
3258 3258 % b"nodeid".rjust(idlen)
3259 3259 )
3260 3260 else:
3261 3261 ui.writenoi18n(
3262 3262 b" rev flag size link p1 p2 %s\n"
3263 3263 % b"nodeid".rjust(idlen)
3264 3264 )
3265 3265
3266 3266 for i in r:
3267 3267 node = r.node(i)
3268 3268 if format == 0:
3269 3269 try:
3270 3270 pp = r.parents(node)
3271 3271 except Exception:
3272 3272 pp = [repo.nullid, repo.nullid]
3273 3273 if ui.verbose:
3274 3274 ui.write(
3275 3275 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3276 3276 % (
3277 3277 i,
3278 3278 r.start(i),
3279 3279 r.length(i),
3280 3280 r.linkrev(i),
3281 3281 shortfn(node),
3282 3282 shortfn(pp[0]),
3283 3283 shortfn(pp[1]),
3284 3284 )
3285 3285 )
3286 3286 else:
3287 3287 ui.write(
3288 3288 b"% 6d % 7d %s %s %s\n"
3289 3289 % (
3290 3290 i,
3291 3291 r.linkrev(i),
3292 3292 shortfn(node),
3293 3293 shortfn(pp[0]),
3294 3294 shortfn(pp[1]),
3295 3295 )
3296 3296 )
3297 3297 elif format == 1:
3298 3298 pr = r.parentrevs(i)
3299 3299 if ui.verbose:
3300 3300 ui.write(
3301 3301 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3302 3302 % (
3303 3303 i,
3304 3304 r.flags(i),
3305 3305 r.start(i),
3306 3306 r.length(i),
3307 3307 r.rawsize(i),
3308 3308 r.linkrev(i),
3309 3309 pr[0],
3310 3310 pr[1],
3311 3311 shortfn(node),
3312 3312 )
3313 3313 )
3314 3314 else:
3315 3315 ui.write(
3316 3316 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3317 3317 % (
3318 3318 i,
3319 3319 r.flags(i),
3320 3320 r.rawsize(i),
3321 3321 r.linkrev(i),
3322 3322 pr[0],
3323 3323 pr[1],
3324 3324 shortfn(node),
3325 3325 )
3326 3326 )
3327 3327
3328 3328
3329 3329 @command(
3330 3330 b'debugrevspec',
3331 3331 [
3332 3332 (
3333 3333 b'',
3334 3334 b'optimize',
3335 3335 None,
3336 3336 _(b'print parsed tree after optimizing (DEPRECATED)'),
3337 3337 ),
3338 3338 (
3339 3339 b'',
3340 3340 b'show-revs',
3341 3341 True,
3342 3342 _(b'print list of result revisions (default)'),
3343 3343 ),
3344 3344 (
3345 3345 b's',
3346 3346 b'show-set',
3347 3347 None,
3348 3348 _(b'print internal representation of result set'),
3349 3349 ),
3350 3350 (
3351 3351 b'p',
3352 3352 b'show-stage',
3353 3353 [],
3354 3354 _(b'print parsed tree at the given stage'),
3355 3355 _(b'NAME'),
3356 3356 ),
3357 3357 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3358 3358 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3359 3359 ],
3360 3360 b'REVSPEC',
3361 3361 )
3362 3362 def debugrevspec(ui, repo, expr, **opts):
3363 3363 """parse and apply a revision specification
3364 3364
3365 3365 Use -p/--show-stage option to print the parsed tree at the given stages.
3366 3366 Use -p all to print tree at every stage.
3367 3367
3368 3368 Use --no-show-revs option with -s or -p to print only the set
3369 3369 representation or the parsed tree respectively.
3370 3370
3371 3371 Use --verify-optimized to compare the optimized result with the unoptimized
3372 3372 one. Returns 1 if the optimized result differs.
3373 3373 """
3374 3374 opts = pycompat.byteskwargs(opts)
3375 3375 aliases = ui.configitems(b'revsetalias')
3376 3376 stages = [
3377 3377 (b'parsed', lambda tree: tree),
3378 3378 (
3379 3379 b'expanded',
3380 3380 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3381 3381 ),
3382 3382 (b'concatenated', revsetlang.foldconcat),
3383 3383 (b'analyzed', revsetlang.analyze),
3384 3384 (b'optimized', revsetlang.optimize),
3385 3385 ]
3386 3386 if opts[b'no_optimized']:
3387 3387 stages = stages[:-1]
3388 3388 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3389 3389 raise error.Abort(
3390 3390 _(b'cannot use --verify-optimized with --no-optimized')
3391 3391 )
3392 3392 stagenames = {n for n, f in stages}
3393 3393
3394 3394 showalways = set()
3395 3395 showchanged = set()
3396 3396 if ui.verbose and not opts[b'show_stage']:
3397 3397 # show parsed tree by --verbose (deprecated)
3398 3398 showalways.add(b'parsed')
3399 3399 showchanged.update([b'expanded', b'concatenated'])
3400 3400 if opts[b'optimize']:
3401 3401 showalways.add(b'optimized')
3402 3402 if opts[b'show_stage'] and opts[b'optimize']:
3403 3403 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3404 3404 if opts[b'show_stage'] == [b'all']:
3405 3405 showalways.update(stagenames)
3406 3406 else:
3407 3407 for n in opts[b'show_stage']:
3408 3408 if n not in stagenames:
3409 3409 raise error.Abort(_(b'invalid stage name: %s') % n)
3410 3410 showalways.update(opts[b'show_stage'])
3411 3411
3412 3412 treebystage = {}
3413 3413 printedtree = None
3414 3414 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3415 3415 for n, f in stages:
3416 3416 treebystage[n] = tree = f(tree)
3417 3417 if n in showalways or (n in showchanged and tree != printedtree):
3418 3418 if opts[b'show_stage'] or n != b'parsed':
3419 3419 ui.write(b"* %s:\n" % n)
3420 3420 ui.write(revsetlang.prettyformat(tree), b"\n")
3421 3421 printedtree = tree
3422 3422
3423 3423 if opts[b'verify_optimized']:
3424 3424 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3425 3425 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3426 3426 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3427 3427 ui.writenoi18n(
3428 3428 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3429 3429 )
3430 3430 ui.writenoi18n(
3431 3431 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3432 3432 )
3433 3433 arevs = list(arevs)
3434 3434 brevs = list(brevs)
3435 3435 if arevs == brevs:
3436 3436 return 0
3437 3437 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3438 3438 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3439 3439 sm = difflib.SequenceMatcher(None, arevs, brevs)
3440 3440 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3441 3441 if tag in ('delete', 'replace'):
3442 3442 for c in arevs[alo:ahi]:
3443 3443 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3444 3444 if tag in ('insert', 'replace'):
3445 3445 for c in brevs[blo:bhi]:
3446 3446 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3447 3447 if tag == 'equal':
3448 3448 for c in arevs[alo:ahi]:
3449 3449 ui.write(b' %d\n' % c)
3450 3450 return 1
3451 3451
3452 3452 func = revset.makematcher(tree)
3453 3453 revs = func(repo)
3454 3454 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3455 3455 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3456 3456 if not opts[b'show_revs']:
3457 3457 return
3458 3458 for c in revs:
3459 3459 ui.write(b"%d\n" % c)
3460 3460
3461 3461
3462 3462 @command(
3463 3463 b'debugserve',
3464 3464 [
3465 3465 (
3466 3466 b'',
3467 3467 b'sshstdio',
3468 3468 False,
3469 3469 _(b'run an SSH server bound to process handles'),
3470 3470 ),
3471 3471 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3472 3472 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3473 3473 ],
3474 3474 b'',
3475 3475 )
3476 3476 def debugserve(ui, repo, **opts):
3477 3477 """run a server with advanced settings
3478 3478
3479 3479 This command is similar to :hg:`serve`. It exists partially as a
3480 3480 workaround to the fact that ``hg serve --stdio`` must have specific
3481 3481 arguments for security reasons.
3482 3482 """
3483 3483 opts = pycompat.byteskwargs(opts)
3484 3484
3485 3485 if not opts[b'sshstdio']:
3486 3486 raise error.Abort(_(b'only --sshstdio is currently supported'))
3487 3487
3488 3488 logfh = None
3489 3489
3490 3490 if opts[b'logiofd'] and opts[b'logiofile']:
3491 3491 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3492 3492
3493 3493 if opts[b'logiofd']:
3494 3494 # Ideally we would be line buffered. But line buffering in binary
3495 3495 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3496 3496 # buffering could have performance impacts. But since this isn't
3497 3497 # performance critical code, it should be fine.
3498 3498 try:
3499 3499 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3500 3500 except OSError as e:
3501 3501 if e.errno != errno.ESPIPE:
3502 3502 raise
3503 3503 # can't seek a pipe, so `ab` mode fails on py3
3504 3504 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3505 3505 elif opts[b'logiofile']:
3506 3506 logfh = open(opts[b'logiofile'], b'ab', 0)
3507 3507
3508 3508 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3509 3509 s.serve_forever()
3510 3510
3511 3511
3512 3512 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3513 3513 def debugsetparents(ui, repo, rev1, rev2=None):
3514 3514 """manually set the parents of the current working directory (DANGEROUS)
3515 3515
3516 3516 This command is not what you are looking for and should not be used. Using
3517 3517 this command will most certainly results in slight corruption of the file
3518 3518 level histories withing your repository. DO NOT USE THIS COMMAND.
3519 3519
3520 3520 The command update the p1 and p2 field in the dirstate, and not touching
3521 3521 anything else. This useful for writing repository conversion tools, but
3522 3522 should be used with extreme care. For example, neither the working
3523 3523 directory nor the dirstate is updated, so file status may be incorrect
3524 3524 after running this command. Only used if you are one of the few people that
3525 3525 deeply unstand both conversion tools and file level histories. If you are
3526 3526 reading this help, you are not one of this people (most of them sailed west
3527 3527 from Mithlond anyway.
3528 3528
3529 3529 So one last time DO NOT USE THIS COMMAND.
3530 3530
3531 3531 Returns 0 on success.
3532 3532 """
3533 3533
3534 3534 node1 = scmutil.revsingle(repo, rev1).node()
3535 3535 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3536 3536
3537 3537 with repo.wlock():
3538 3538 repo.setparents(node1, node2)
3539 3539
3540 3540
3541 3541 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3542 3542 def debugsidedata(ui, repo, file_, rev=None, **opts):
3543 3543 """dump the side data for a cl/manifest/file revision
3544 3544
3545 3545 Use --verbose to dump the sidedata content."""
3546 3546 opts = pycompat.byteskwargs(opts)
3547 3547 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3548 3548 if rev is not None:
3549 3549 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3550 3550 file_, rev = None, file_
3551 3551 elif rev is None:
3552 3552 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3553 3553 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3554 3554 r = getattr(r, '_revlog', r)
3555 3555 try:
3556 3556 sidedata = r.sidedata(r.lookup(rev))
3557 3557 except KeyError:
3558 3558 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3559 3559 if sidedata:
3560 3560 sidedata = list(sidedata.items())
3561 3561 sidedata.sort()
3562 3562 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3563 3563 for key, value in sidedata:
3564 3564 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3565 3565 if ui.verbose:
3566 3566 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3567 3567
3568 3568
3569 3569 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3570 3570 def debugssl(ui, repo, source=None, **opts):
3571 3571 """test a secure connection to a server
3572 3572
3573 3573 This builds the certificate chain for the server on Windows, installing the
3574 3574 missing intermediates and trusted root via Windows Update if necessary. It
3575 3575 does nothing on other platforms.
3576 3576
3577 3577 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3578 3578 that server is used. See :hg:`help urls` for more information.
3579 3579
3580 3580 If the update succeeds, retry the original operation. Otherwise, the cause
3581 3581 of the SSL error is likely another issue.
3582 3582 """
3583 3583 if not pycompat.iswindows:
3584 3584 raise error.Abort(
3585 3585 _(b'certificate chain building is only possible on Windows')
3586 3586 )
3587 3587
3588 3588 if not source:
3589 3589 if not repo:
3590 3590 raise error.Abort(
3591 3591 _(
3592 3592 b"there is no Mercurial repository here, and no "
3593 3593 b"server specified"
3594 3594 )
3595 3595 )
3596 3596 source = b"default"
3597 3597
3598 3598 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3599 3599 url = path.url
3600 3600
3601 3601 defaultport = {b'https': 443, b'ssh': 22}
3602 3602 if url.scheme in defaultport:
3603 3603 try:
3604 3604 addr = (url.host, int(url.port or defaultport[url.scheme]))
3605 3605 except ValueError:
3606 3606 raise error.Abort(_(b"malformed port number in URL"))
3607 3607 else:
3608 3608 raise error.Abort(_(b"only https and ssh connections are supported"))
3609 3609
3610 3610 from . import win32
3611 3611
3612 3612 s = ssl.wrap_socket(
3613 3613 socket.socket(),
3614 3614 ssl_version=ssl.PROTOCOL_TLS,
3615 3615 cert_reqs=ssl.CERT_NONE,
3616 3616 ca_certs=None,
3617 3617 )
3618 3618
3619 3619 try:
3620 3620 s.connect(addr)
3621 3621 cert = s.getpeercert(True)
3622 3622
3623 3623 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3624 3624
3625 3625 complete = win32.checkcertificatechain(cert, build=False)
3626 3626
3627 3627 if not complete:
3628 3628 ui.status(_(b'certificate chain is incomplete, updating... '))
3629 3629
3630 3630 if not win32.checkcertificatechain(cert):
3631 3631 ui.status(_(b'failed.\n'))
3632 3632 else:
3633 3633 ui.status(_(b'done.\n'))
3634 3634 else:
3635 3635 ui.status(_(b'full certificate chain is available\n'))
3636 3636 finally:
3637 3637 s.close()
3638 3638
3639 3639
3640 3640 @command(
3641 3641 b"debugbackupbundle",
3642 3642 [
3643 3643 (
3644 3644 b"",
3645 3645 b"recover",
3646 3646 b"",
3647 3647 b"brings the specified changeset back into the repository",
3648 3648 )
3649 3649 ]
3650 3650 + cmdutil.logopts,
3651 3651 _(b"hg debugbackupbundle [--recover HASH]"),
3652 3652 )
3653 3653 def debugbackupbundle(ui, repo, *pats, **opts):
3654 3654 """lists the changesets available in backup bundles
3655 3655
3656 3656 Without any arguments, this command prints a list of the changesets in each
3657 3657 backup bundle.
3658 3658
3659 3659 --recover takes a changeset hash and unbundles the first bundle that
3660 3660 contains that hash, which puts that changeset back in your repository.
3661 3661
3662 3662 --verbose will print the entire commit message and the bundle path for that
3663 3663 backup.
3664 3664 """
3665 3665 backups = list(
3666 3666 filter(
3667 3667 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3668 3668 )
3669 3669 )
3670 3670 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3671 3671
3672 3672 opts = pycompat.byteskwargs(opts)
3673 3673 opts[b"bundle"] = b""
3674 3674 opts[b"force"] = None
3675 3675 limit = logcmdutil.getlimit(opts)
3676 3676
3677 3677 def display(other, chlist, displayer):
3678 3678 if opts.get(b"newest_first"):
3679 3679 chlist.reverse()
3680 3680 count = 0
3681 3681 for n in chlist:
3682 3682 if limit is not None and count >= limit:
3683 3683 break
3684 3684 parents = [
3685 3685 True for p in other.changelog.parents(n) if p != repo.nullid
3686 3686 ]
3687 3687 if opts.get(b"no_merges") and len(parents) == 2:
3688 3688 continue
3689 3689 count += 1
3690 3690 displayer.show(other[n])
3691 3691
3692 3692 recovernode = opts.get(b"recover")
3693 3693 if recovernode:
3694 3694 if scmutil.isrevsymbol(repo, recovernode):
3695 3695 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3696 3696 return
3697 3697 elif backups:
3698 3698 msg = _(
3699 3699 b"Recover changesets using: hg debugbackupbundle --recover "
3700 3700 b"<changeset hash>\n\nAvailable backup changesets:"
3701 3701 )
3702 3702 ui.status(msg, label=b"status.removed")
3703 3703 else:
3704 3704 ui.status(_(b"no backup changesets found\n"))
3705 3705 return
3706 3706
3707 3707 for backup in backups:
3708 3708 # Much of this is copied from the hg incoming logic
3709 3709 source = os.path.relpath(backup, encoding.getcwd())
3710 source, branches = urlutil.get_unique_pull_path(
3710 path = urlutil.get_unique_pull_path_obj(
3711 3711 b'debugbackupbundle',
3712 repo,
3713 3712 ui,
3714 3713 source,
3715 default_branches=opts.get(b'branch'),
3716 3714 )
3717 3715 try:
3718 other = hg.peer(repo, opts, source)
3716 other = hg.peer(repo, opts, path)
3719 3717 except error.LookupError as ex:
3720 msg = _(b"\nwarning: unable to open bundle %s") % source
3718 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3721 3719 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3722 3720 ui.warn(msg, hint=hint)
3723 3721 continue
3722 branches = (path.branch, opts.get(b'branch', []))
3724 3723 revs, checkout = hg.addbranchrevs(
3725 3724 repo, other, branches, opts.get(b"rev")
3726 3725 )
3727 3726
3728 3727 if revs:
3729 3728 revs = [other.lookup(rev) for rev in revs]
3730 3729
3731 3730 with ui.silent():
3732 3731 try:
3733 3732 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3734 3733 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3735 3734 )
3736 3735 except error.LookupError:
3737 3736 continue
3738 3737
3739 3738 try:
3740 3739 if not chlist:
3741 3740 continue
3742 3741 if recovernode:
3743 3742 with repo.lock(), repo.transaction(b"unbundle") as tr:
3744 3743 if scmutil.isrevsymbol(other, recovernode):
3745 3744 ui.status(_(b"Unbundling %s\n") % (recovernode))
3746 f = hg.openpath(ui, source)
3747 gen = exchange.readbundle(ui, f, source)
3745 f = hg.openpath(ui, path.loc)
3746 gen = exchange.readbundle(ui, f, path.loc)
3748 3747 if isinstance(gen, bundle2.unbundle20):
3749 3748 bundle2.applybundle(
3750 3749 repo,
3751 3750 gen,
3752 3751 tr,
3753 3752 source=b"unbundle",
3754 url=b"bundle:" + source,
3753 url=b"bundle:" + path.loc,
3755 3754 )
3756 3755 else:
3757 gen.apply(repo, b"unbundle", b"bundle:" + source)
3756 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3758 3757 break
3759 3758 else:
3760 3759 backupdate = encoding.strtolocal(
3761 3760 time.strftime(
3762 3761 "%a %H:%M, %Y-%m-%d",
3763 time.localtime(os.path.getmtime(source)),
3762 time.localtime(os.path.getmtime(path.loc)),
3764 3763 )
3765 3764 )
3766 3765 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3767 3766 if ui.verbose:
3768 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3767 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3769 3768 else:
3770 3769 opts[
3771 3770 b"template"
3772 3771 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3773 3772 displayer = logcmdutil.changesetdisplayer(
3774 3773 ui, other, opts, False
3775 3774 )
3776 3775 display(other, chlist, displayer)
3777 3776 displayer.close()
3778 3777 finally:
3779 3778 cleanupfn()
3780 3779
3781 3780
3782 3781 @command(
3783 3782 b'debugsub',
3784 3783 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3785 3784 _(b'[-r REV] [REV]'),
3786 3785 )
3787 3786 def debugsub(ui, repo, rev=None):
3788 3787 ctx = scmutil.revsingle(repo, rev, None)
3789 3788 for k, v in sorted(ctx.substate.items()):
3790 3789 ui.writenoi18n(b'path %s\n' % k)
3791 3790 ui.writenoi18n(b' source %s\n' % v[0])
3792 3791 ui.writenoi18n(b' revision %s\n' % v[1])
3793 3792
3794 3793
3795 3794 @command(b'debugshell', optionalrepo=True)
3796 3795 def debugshell(ui, repo):
3797 3796 """run an interactive Python interpreter
3798 3797
3799 3798 The local namespace is provided with a reference to the ui and
3800 3799 the repo instance (if available).
3801 3800 """
3802 3801 import code
3803 3802
3804 3803 imported_objects = {
3805 3804 'ui': ui,
3806 3805 'repo': repo,
3807 3806 }
3808 3807
3809 3808 code.interact(local=imported_objects)
3810 3809
3811 3810
3812 3811 @command(
3813 3812 b'debugsuccessorssets',
3814 3813 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3815 3814 _(b'[REV]'),
3816 3815 )
3817 3816 def debugsuccessorssets(ui, repo, *revs, **opts):
3818 3817 """show set of successors for revision
3819 3818
3820 3819 A successors set of changeset A is a consistent group of revisions that
3821 3820 succeed A. It contains non-obsolete changesets only unless closests
3822 3821 successors set is set.
3823 3822
3824 3823 In most cases a changeset A has a single successors set containing a single
3825 3824 successor (changeset A replaced by A').
3826 3825
3827 3826 A changeset that is made obsolete with no successors are called "pruned".
3828 3827 Such changesets have no successors sets at all.
3829 3828
3830 3829 A changeset that has been "split" will have a successors set containing
3831 3830 more than one successor.
3832 3831
3833 3832 A changeset that has been rewritten in multiple different ways is called
3834 3833 "divergent". Such changesets have multiple successor sets (each of which
3835 3834 may also be split, i.e. have multiple successors).
3836 3835
3837 3836 Results are displayed as follows::
3838 3837
3839 3838 <rev1>
3840 3839 <successors-1A>
3841 3840 <rev2>
3842 3841 <successors-2A>
3843 3842 <successors-2B1> <successors-2B2> <successors-2B3>
3844 3843
3845 3844 Here rev2 has two possible (i.e. divergent) successors sets. The first
3846 3845 holds one element, whereas the second holds three (i.e. the changeset has
3847 3846 been split).
3848 3847 """
3849 3848 # passed to successorssets caching computation from one call to another
3850 3849 cache = {}
3851 3850 ctx2str = bytes
3852 3851 node2str = short
3853 3852 for rev in logcmdutil.revrange(repo, revs):
3854 3853 ctx = repo[rev]
3855 3854 ui.write(b'%s\n' % ctx2str(ctx))
3856 3855 for succsset in obsutil.successorssets(
3857 3856 repo, ctx.node(), closest=opts['closest'], cache=cache
3858 3857 ):
3859 3858 if succsset:
3860 3859 ui.write(b' ')
3861 3860 ui.write(node2str(succsset[0]))
3862 3861 for node in succsset[1:]:
3863 3862 ui.write(b' ')
3864 3863 ui.write(node2str(node))
3865 3864 ui.write(b'\n')
3866 3865
3867 3866
3868 3867 @command(b'debugtagscache', [])
3869 3868 def debugtagscache(ui, repo):
3870 3869 """display the contents of .hg/cache/hgtagsfnodes1"""
3871 3870 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3872 3871 flog = repo.file(b'.hgtags')
3873 3872 for r in repo:
3874 3873 node = repo[r].node()
3875 3874 tagsnode = cache.getfnode(node, computemissing=False)
3876 3875 if tagsnode:
3877 3876 tagsnodedisplay = hex(tagsnode)
3878 3877 if not flog.hasnode(tagsnode):
3879 3878 tagsnodedisplay += b' (unknown node)'
3880 3879 elif tagsnode is None:
3881 3880 tagsnodedisplay = b'missing'
3882 3881 else:
3883 3882 tagsnodedisplay = b'invalid'
3884 3883
3885 3884 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3886 3885
3887 3886
3888 3887 @command(
3889 3888 b'debugtemplate',
3890 3889 [
3891 3890 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3892 3891 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3893 3892 ],
3894 3893 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3895 3894 optionalrepo=True,
3896 3895 )
3897 3896 def debugtemplate(ui, repo, tmpl, **opts):
3898 3897 """parse and apply a template
3899 3898
3900 3899 If -r/--rev is given, the template is processed as a log template and
3901 3900 applied to the given changesets. Otherwise, it is processed as a generic
3902 3901 template.
3903 3902
3904 3903 Use --verbose to print the parsed tree.
3905 3904 """
3906 3905 revs = None
3907 3906 if opts['rev']:
3908 3907 if repo is None:
3909 3908 raise error.RepoError(
3910 3909 _(b'there is no Mercurial repository here (.hg not found)')
3911 3910 )
3912 3911 revs = logcmdutil.revrange(repo, opts['rev'])
3913 3912
3914 3913 props = {}
3915 3914 for d in opts['define']:
3916 3915 try:
3917 3916 k, v = (e.strip() for e in d.split(b'=', 1))
3918 3917 if not k or k == b'ui':
3919 3918 raise ValueError
3920 3919 props[k] = v
3921 3920 except ValueError:
3922 3921 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3923 3922
3924 3923 if ui.verbose:
3925 3924 aliases = ui.configitems(b'templatealias')
3926 3925 tree = templater.parse(tmpl)
3927 3926 ui.note(templater.prettyformat(tree), b'\n')
3928 3927 newtree = templater.expandaliases(tree, aliases)
3929 3928 if newtree != tree:
3930 3929 ui.notenoi18n(
3931 3930 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3932 3931 )
3933 3932
3934 3933 if revs is None:
3935 3934 tres = formatter.templateresources(ui, repo)
3936 3935 t = formatter.maketemplater(ui, tmpl, resources=tres)
3937 3936 if ui.verbose:
3938 3937 kwds, funcs = t.symbolsuseddefault()
3939 3938 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3940 3939 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3941 3940 ui.write(t.renderdefault(props))
3942 3941 else:
3943 3942 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3944 3943 if ui.verbose:
3945 3944 kwds, funcs = displayer.t.symbolsuseddefault()
3946 3945 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3947 3946 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3948 3947 for r in revs:
3949 3948 displayer.show(repo[r], **pycompat.strkwargs(props))
3950 3949 displayer.close()
3951 3950
3952 3951
3953 3952 @command(
3954 3953 b'debuguigetpass',
3955 3954 [
3956 3955 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3957 3956 ],
3958 3957 _(b'[-p TEXT]'),
3959 3958 norepo=True,
3960 3959 )
3961 3960 def debuguigetpass(ui, prompt=b''):
3962 3961 """show prompt to type password"""
3963 3962 r = ui.getpass(prompt)
3964 3963 if r is None:
3965 3964 r = b"<default response>"
3966 3965 ui.writenoi18n(b'response: %s\n' % r)
3967 3966
3968 3967
3969 3968 @command(
3970 3969 b'debuguiprompt',
3971 3970 [
3972 3971 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3973 3972 ],
3974 3973 _(b'[-p TEXT]'),
3975 3974 norepo=True,
3976 3975 )
3977 3976 def debuguiprompt(ui, prompt=b''):
3978 3977 """show plain prompt"""
3979 3978 r = ui.prompt(prompt)
3980 3979 ui.writenoi18n(b'response: %s\n' % r)
3981 3980
3982 3981
3983 3982 @command(b'debugupdatecaches', [])
3984 3983 def debugupdatecaches(ui, repo, *pats, **opts):
3985 3984 """warm all known caches in the repository"""
3986 3985 with repo.wlock(), repo.lock():
3987 3986 repo.updatecaches(caches=repository.CACHES_ALL)
3988 3987
3989 3988
3990 3989 @command(
3991 3990 b'debugupgraderepo',
3992 3991 [
3993 3992 (
3994 3993 b'o',
3995 3994 b'optimize',
3996 3995 [],
3997 3996 _(b'extra optimization to perform'),
3998 3997 _(b'NAME'),
3999 3998 ),
4000 3999 (b'', b'run', False, _(b'performs an upgrade')),
4001 4000 (b'', b'backup', True, _(b'keep the old repository content around')),
4002 4001 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4003 4002 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4004 4003 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4005 4004 ],
4006 4005 )
4007 4006 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4008 4007 """upgrade a repository to use different features
4009 4008
4010 4009 If no arguments are specified, the repository is evaluated for upgrade
4011 4010 and a list of problems and potential optimizations is printed.
4012 4011
4013 4012 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4014 4013 can be influenced via additional arguments. More details will be provided
4015 4014 by the command output when run without ``--run``.
4016 4015
4017 4016 During the upgrade, the repository will be locked and no writes will be
4018 4017 allowed.
4019 4018
4020 4019 At the end of the upgrade, the repository may not be readable while new
4021 4020 repository data is swapped in. This window will be as long as it takes to
4022 4021 rename some directories inside the ``.hg`` directory. On most machines, this
4023 4022 should complete almost instantaneously and the chances of a consumer being
4024 4023 unable to access the repository should be low.
4025 4024
4026 4025 By default, all revlogs will be upgraded. You can restrict this using flags
4027 4026 such as `--manifest`:
4028 4027
4029 4028 * `--manifest`: only optimize the manifest
4030 4029 * `--no-manifest`: optimize all revlog but the manifest
4031 4030 * `--changelog`: optimize the changelog only
4032 4031 * `--no-changelog --no-manifest`: optimize filelogs only
4033 4032 * `--filelogs`: optimize the filelogs only
4034 4033 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4035 4034 """
4036 4035 return upgrade.upgraderepo(
4037 4036 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4038 4037 )
4039 4038
4040 4039
4041 4040 @command(
4042 4041 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4043 4042 )
4044 4043 def debugwalk(ui, repo, *pats, **opts):
4045 4044 """show how files match on given patterns"""
4046 4045 opts = pycompat.byteskwargs(opts)
4047 4046 m = scmutil.match(repo[None], pats, opts)
4048 4047 if ui.verbose:
4049 4048 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4050 4049 items = list(repo[None].walk(m))
4051 4050 if not items:
4052 4051 return
4053 4052 f = lambda fn: fn
4054 4053 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4055 4054 f = lambda fn: util.normpath(fn)
4056 4055 fmt = b'f %%-%ds %%-%ds %%s' % (
4057 4056 max([len(abs) for abs in items]),
4058 4057 max([len(repo.pathto(abs)) for abs in items]),
4059 4058 )
4060 4059 for abs in items:
4061 4060 line = fmt % (
4062 4061 abs,
4063 4062 f(repo.pathto(abs)),
4064 4063 m.exact(abs) and b'exact' or b'',
4065 4064 )
4066 4065 ui.write(b"%s\n" % line.rstrip())
4067 4066
4068 4067
4069 4068 @command(b'debugwhyunstable', [], _(b'REV'))
4070 4069 def debugwhyunstable(ui, repo, rev):
4071 4070 """explain instabilities of a changeset"""
4072 4071 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4073 4072 dnodes = b''
4074 4073 if entry.get(b'divergentnodes'):
4075 4074 dnodes = (
4076 4075 b' '.join(
4077 4076 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4078 4077 for ctx in entry[b'divergentnodes']
4079 4078 )
4080 4079 + b' '
4081 4080 )
4082 4081 ui.write(
4083 4082 b'%s: %s%s %s\n'
4084 4083 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4085 4084 )
4086 4085
4087 4086
4088 4087 @command(
4089 4088 b'debugwireargs',
4090 4089 [
4091 4090 (b'', b'three', b'', b'three'),
4092 4091 (b'', b'four', b'', b'four'),
4093 4092 (b'', b'five', b'', b'five'),
4094 4093 ]
4095 4094 + cmdutil.remoteopts,
4096 4095 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4097 4096 norepo=True,
4098 4097 )
4099 4098 def debugwireargs(ui, repopath, *vals, **opts):
4100 4099 opts = pycompat.byteskwargs(opts)
4101 4100 repo = hg.peer(ui, opts, repopath)
4102 4101 try:
4103 4102 for opt in cmdutil.remoteopts:
4104 4103 del opts[opt[1]]
4105 4104 args = {}
4106 4105 for k, v in opts.items():
4107 4106 if v:
4108 4107 args[k] = v
4109 4108 args = pycompat.strkwargs(args)
4110 4109 # run twice to check that we don't mess up the stream for the next command
4111 4110 res1 = repo.debugwireargs(*vals, **args)
4112 4111 res2 = repo.debugwireargs(*vals, **args)
4113 4112 ui.write(b"%s\n" % res1)
4114 4113 if res1 != res2:
4115 4114 ui.warn(b"%s\n" % res2)
4116 4115 finally:
4117 4116 repo.close()
4118 4117
4119 4118
4120 4119 def _parsewirelangblocks(fh):
4121 4120 activeaction = None
4122 4121 blocklines = []
4123 4122 lastindent = 0
4124 4123
4125 4124 for line in fh:
4126 4125 line = line.rstrip()
4127 4126 if not line:
4128 4127 continue
4129 4128
4130 4129 if line.startswith(b'#'):
4131 4130 continue
4132 4131
4133 4132 if not line.startswith(b' '):
4134 4133 # New block. Flush previous one.
4135 4134 if activeaction:
4136 4135 yield activeaction, blocklines
4137 4136
4138 4137 activeaction = line
4139 4138 blocklines = []
4140 4139 lastindent = 0
4141 4140 continue
4142 4141
4143 4142 # Else we start with an indent.
4144 4143
4145 4144 if not activeaction:
4146 4145 raise error.Abort(_(b'indented line outside of block'))
4147 4146
4148 4147 indent = len(line) - len(line.lstrip())
4149 4148
4150 4149 # If this line is indented more than the last line, concatenate it.
4151 4150 if indent > lastindent and blocklines:
4152 4151 blocklines[-1] += line.lstrip()
4153 4152 else:
4154 4153 blocklines.append(line)
4155 4154 lastindent = indent
4156 4155
4157 4156 # Flush last block.
4158 4157 if activeaction:
4159 4158 yield activeaction, blocklines
4160 4159
4161 4160
4162 4161 @command(
4163 4162 b'debugwireproto',
4164 4163 [
4165 4164 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4166 4165 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4167 4166 (
4168 4167 b'',
4169 4168 b'noreadstderr',
4170 4169 False,
4171 4170 _(b'do not read from stderr of the remote'),
4172 4171 ),
4173 4172 (
4174 4173 b'',
4175 4174 b'nologhandshake',
4176 4175 False,
4177 4176 _(b'do not log I/O related to the peer handshake'),
4178 4177 ),
4179 4178 ]
4180 4179 + cmdutil.remoteopts,
4181 4180 _(b'[PATH]'),
4182 4181 optionalrepo=True,
4183 4182 )
4184 4183 def debugwireproto(ui, repo, path=None, **opts):
4185 4184 """send wire protocol commands to a server
4186 4185
4187 4186 This command can be used to issue wire protocol commands to remote
4188 4187 peers and to debug the raw data being exchanged.
4189 4188
4190 4189 ``--localssh`` will start an SSH server against the current repository
4191 4190 and connect to that. By default, the connection will perform a handshake
4192 4191 and establish an appropriate peer instance.
4193 4192
4194 4193 ``--peer`` can be used to bypass the handshake protocol and construct a
4195 4194 peer instance using the specified class type. Valid values are ``raw``,
4196 4195 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4197 4196 don't support higher-level command actions.
4198 4197
4199 4198 ``--noreadstderr`` can be used to disable automatic reading from stderr
4200 4199 of the peer (for SSH connections only). Disabling automatic reading of
4201 4200 stderr is useful for making output more deterministic.
4202 4201
4203 4202 Commands are issued via a mini language which is specified via stdin.
4204 4203 The language consists of individual actions to perform. An action is
4205 4204 defined by a block. A block is defined as a line with no leading
4206 4205 space followed by 0 or more lines with leading space. Blocks are
4207 4206 effectively a high-level command with additional metadata.
4208 4207
4209 4208 Lines beginning with ``#`` are ignored.
4210 4209
4211 4210 The following sections denote available actions.
4212 4211
4213 4212 raw
4214 4213 ---
4215 4214
4216 4215 Send raw data to the server.
4217 4216
4218 4217 The block payload contains the raw data to send as one atomic send
4219 4218 operation. The data may not actually be delivered in a single system
4220 4219 call: it depends on the abilities of the transport being used.
4221 4220
4222 4221 Each line in the block is de-indented and concatenated. Then, that
4223 4222 value is evaluated as a Python b'' literal. This allows the use of
4224 4223 backslash escaping, etc.
4225 4224
4226 4225 raw+
4227 4226 ----
4228 4227
4229 4228 Behaves like ``raw`` except flushes output afterwards.
4230 4229
4231 4230 command <X>
4232 4231 -----------
4233 4232
4234 4233 Send a request to run a named command, whose name follows the ``command``
4235 4234 string.
4236 4235
4237 4236 Arguments to the command are defined as lines in this block. The format of
4238 4237 each line is ``<key> <value>``. e.g.::
4239 4238
4240 4239 command listkeys
4241 4240 namespace bookmarks
4242 4241
4243 4242 If the value begins with ``eval:``, it will be interpreted as a Python
4244 4243 literal expression. Otherwise values are interpreted as Python b'' literals.
4245 4244 This allows sending complex types and encoding special byte sequences via
4246 4245 backslash escaping.
4247 4246
4248 4247 The following arguments have special meaning:
4249 4248
4250 4249 ``PUSHFILE``
4251 4250 When defined, the *push* mechanism of the peer will be used instead
4252 4251 of the static request-response mechanism and the content of the
4253 4252 file specified in the value of this argument will be sent as the
4254 4253 command payload.
4255 4254
4256 4255 This can be used to submit a local bundle file to the remote.
4257 4256
4258 4257 batchbegin
4259 4258 ----------
4260 4259
4261 4260 Instruct the peer to begin a batched send.
4262 4261
4263 4262 All ``command`` blocks are queued for execution until the next
4264 4263 ``batchsubmit`` block.
4265 4264
4266 4265 batchsubmit
4267 4266 -----------
4268 4267
4269 4268 Submit previously queued ``command`` blocks as a batch request.
4270 4269
4271 4270 This action MUST be paired with a ``batchbegin`` action.
4272 4271
4273 4272 httprequest <method> <path>
4274 4273 ---------------------------
4275 4274
4276 4275 (HTTP peer only)
4277 4276
4278 4277 Send an HTTP request to the peer.
4279 4278
4280 4279 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4281 4280
4282 4281 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4283 4282 headers to add to the request. e.g. ``Accept: foo``.
4284 4283
4285 4284 The following arguments are special:
4286 4285
4287 4286 ``BODYFILE``
4288 4287 The content of the file defined as the value to this argument will be
4289 4288 transferred verbatim as the HTTP request body.
4290 4289
4291 4290 ``frame <type> <flags> <payload>``
4292 4291 Send a unified protocol frame as part of the request body.
4293 4292
4294 4293 All frames will be collected and sent as the body to the HTTP
4295 4294 request.
4296 4295
4297 4296 close
4298 4297 -----
4299 4298
4300 4299 Close the connection to the server.
4301 4300
4302 4301 flush
4303 4302 -----
4304 4303
4305 4304 Flush data written to the server.
4306 4305
4307 4306 readavailable
4308 4307 -------------
4309 4308
4310 4309 Close the write end of the connection and read all available data from
4311 4310 the server.
4312 4311
4313 4312 If the connection to the server encompasses multiple pipes, we poll both
4314 4313 pipes and read available data.
4315 4314
4316 4315 readline
4317 4316 --------
4318 4317
4319 4318 Read a line of output from the server. If there are multiple output
4320 4319 pipes, reads only the main pipe.
4321 4320
4322 4321 ereadline
4323 4322 ---------
4324 4323
4325 4324 Like ``readline``, but read from the stderr pipe, if available.
4326 4325
4327 4326 read <X>
4328 4327 --------
4329 4328
4330 4329 ``read()`` N bytes from the server's main output pipe.
4331 4330
4332 4331 eread <X>
4333 4332 ---------
4334 4333
4335 4334 ``read()`` N bytes from the server's stderr pipe, if available.
4336 4335
4337 4336 Specifying Unified Frame-Based Protocol Frames
4338 4337 ----------------------------------------------
4339 4338
4340 4339 It is possible to emit a *Unified Frame-Based Protocol* by using special
4341 4340 syntax.
4342 4341
4343 4342 A frame is composed as a type, flags, and payload. These can be parsed
4344 4343 from a string of the form:
4345 4344
4346 4345 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4347 4346
4348 4347 ``request-id`` and ``stream-id`` are integers defining the request and
4349 4348 stream identifiers.
4350 4349
4351 4350 ``type`` can be an integer value for the frame type or the string name
4352 4351 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4353 4352 ``command-name``.
4354 4353
4355 4354 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4356 4355 components. Each component (and there can be just one) can be an integer
4357 4356 or a flag name for stream flags or frame flags, respectively. Values are
4358 4357 resolved to integers and then bitwise OR'd together.
4359 4358
4360 4359 ``payload`` represents the raw frame payload. If it begins with
4361 4360 ``cbor:``, the following string is evaluated as Python code and the
4362 4361 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4363 4362 as a Python byte string literal.
4364 4363 """
4365 4364 opts = pycompat.byteskwargs(opts)
4366 4365
4367 4366 if opts[b'localssh'] and not repo:
4368 4367 raise error.Abort(_(b'--localssh requires a repository'))
4369 4368
4370 4369 if opts[b'peer'] and opts[b'peer'] not in (
4371 4370 b'raw',
4372 4371 b'ssh1',
4373 4372 ):
4374 4373 raise error.Abort(
4375 4374 _(b'invalid value for --peer'),
4376 4375 hint=_(b'valid values are "raw" and "ssh1"'),
4377 4376 )
4378 4377
4379 4378 if path and opts[b'localssh']:
4380 4379 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4381 4380
4382 4381 if ui.interactive():
4383 4382 ui.write(_(b'(waiting for commands on stdin)\n'))
4384 4383
4385 4384 blocks = list(_parsewirelangblocks(ui.fin))
4386 4385
4387 4386 proc = None
4388 4387 stdin = None
4389 4388 stdout = None
4390 4389 stderr = None
4391 4390 opener = None
4392 4391
4393 4392 if opts[b'localssh']:
4394 4393 # We start the SSH server in its own process so there is process
4395 4394 # separation. This prevents a whole class of potential bugs around
4396 4395 # shared state from interfering with server operation.
4397 4396 args = procutil.hgcmd() + [
4398 4397 b'-R',
4399 4398 repo.root,
4400 4399 b'debugserve',
4401 4400 b'--sshstdio',
4402 4401 ]
4403 4402 proc = subprocess.Popen(
4404 4403 pycompat.rapply(procutil.tonativestr, args),
4405 4404 stdin=subprocess.PIPE,
4406 4405 stdout=subprocess.PIPE,
4407 4406 stderr=subprocess.PIPE,
4408 4407 bufsize=0,
4409 4408 )
4410 4409
4411 4410 stdin = proc.stdin
4412 4411 stdout = proc.stdout
4413 4412 stderr = proc.stderr
4414 4413
4415 4414 # We turn the pipes into observers so we can log I/O.
4416 4415 if ui.verbose or opts[b'peer'] == b'raw':
4417 4416 stdin = util.makeloggingfileobject(
4418 4417 ui, proc.stdin, b'i', logdata=True
4419 4418 )
4420 4419 stdout = util.makeloggingfileobject(
4421 4420 ui, proc.stdout, b'o', logdata=True
4422 4421 )
4423 4422 stderr = util.makeloggingfileobject(
4424 4423 ui, proc.stderr, b'e', logdata=True
4425 4424 )
4426 4425
4427 4426 # --localssh also implies the peer connection settings.
4428 4427
4429 4428 url = b'ssh://localserver'
4430 4429 autoreadstderr = not opts[b'noreadstderr']
4431 4430
4432 4431 if opts[b'peer'] == b'ssh1':
4433 4432 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4434 4433 peer = sshpeer.sshv1peer(
4435 4434 ui,
4436 4435 url,
4437 4436 proc,
4438 4437 stdin,
4439 4438 stdout,
4440 4439 stderr,
4441 4440 None,
4442 4441 autoreadstderr=autoreadstderr,
4443 4442 )
4444 4443 elif opts[b'peer'] == b'raw':
4445 4444 ui.write(_(b'using raw connection to peer\n'))
4446 4445 peer = None
4447 4446 else:
4448 4447 ui.write(_(b'creating ssh peer from handshake results\n'))
4449 4448 peer = sshpeer.makepeer(
4450 4449 ui,
4451 4450 url,
4452 4451 proc,
4453 4452 stdin,
4454 4453 stdout,
4455 4454 stderr,
4456 4455 autoreadstderr=autoreadstderr,
4457 4456 )
4458 4457
4459 4458 elif path:
4460 4459 # We bypass hg.peer() so we can proxy the sockets.
4461 4460 # TODO consider not doing this because we skip
4462 4461 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4463 4462 u = urlutil.url(path)
4464 4463 if u.scheme != b'http':
4465 4464 raise error.Abort(_(b'only http:// paths are currently supported'))
4466 4465
4467 4466 url, authinfo = u.authinfo()
4468 4467 openerargs = {
4469 4468 'useragent': b'Mercurial debugwireproto',
4470 4469 }
4471 4470
4472 4471 # Turn pipes/sockets into observers so we can log I/O.
4473 4472 if ui.verbose:
4474 4473 openerargs.update(
4475 4474 {
4476 4475 'loggingfh': ui,
4477 4476 'loggingname': b's',
4478 4477 'loggingopts': {
4479 4478 'logdata': True,
4480 4479 'logdataapis': False,
4481 4480 },
4482 4481 }
4483 4482 )
4484 4483
4485 4484 if ui.debugflag:
4486 4485 openerargs['loggingopts']['logdataapis'] = True
4487 4486
4488 4487 # Don't send default headers when in raw mode. This allows us to
4489 4488 # bypass most of the behavior of our URL handling code so we can
4490 4489 # have near complete control over what's sent on the wire.
4491 4490 if opts[b'peer'] == b'raw':
4492 4491 openerargs['sendaccept'] = False
4493 4492
4494 4493 opener = urlmod.opener(ui, authinfo, **openerargs)
4495 4494
4496 4495 if opts[b'peer'] == b'raw':
4497 4496 ui.write(_(b'using raw connection to peer\n'))
4498 4497 peer = None
4499 4498 elif opts[b'peer']:
4500 4499 raise error.Abort(
4501 4500 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4502 4501 )
4503 4502 else:
4504 4503 peer = httppeer.makepeer(ui, path, opener=opener)
4505 4504
4506 4505 # We /could/ populate stdin/stdout with sock.makefile()...
4507 4506 else:
4508 4507 raise error.Abort(_(b'unsupported connection configuration'))
4509 4508
4510 4509 batchedcommands = None
4511 4510
4512 4511 # Now perform actions based on the parsed wire language instructions.
4513 4512 for action, lines in blocks:
4514 4513 if action in (b'raw', b'raw+'):
4515 4514 if not stdin:
4516 4515 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4517 4516
4518 4517 # Concatenate the data together.
4519 4518 data = b''.join(l.lstrip() for l in lines)
4520 4519 data = stringutil.unescapestr(data)
4521 4520 stdin.write(data)
4522 4521
4523 4522 if action == b'raw+':
4524 4523 stdin.flush()
4525 4524 elif action == b'flush':
4526 4525 if not stdin:
4527 4526 raise error.Abort(_(b'cannot call flush on this peer'))
4528 4527 stdin.flush()
4529 4528 elif action.startswith(b'command'):
4530 4529 if not peer:
4531 4530 raise error.Abort(
4532 4531 _(
4533 4532 b'cannot send commands unless peer instance '
4534 4533 b'is available'
4535 4534 )
4536 4535 )
4537 4536
4538 4537 command = action.split(b' ', 1)[1]
4539 4538
4540 4539 args = {}
4541 4540 for line in lines:
4542 4541 # We need to allow empty values.
4543 4542 fields = line.lstrip().split(b' ', 1)
4544 4543 if len(fields) == 1:
4545 4544 key = fields[0]
4546 4545 value = b''
4547 4546 else:
4548 4547 key, value = fields
4549 4548
4550 4549 if value.startswith(b'eval:'):
4551 4550 value = stringutil.evalpythonliteral(value[5:])
4552 4551 else:
4553 4552 value = stringutil.unescapestr(value)
4554 4553
4555 4554 args[key] = value
4556 4555
4557 4556 if batchedcommands is not None:
4558 4557 batchedcommands.append((command, args))
4559 4558 continue
4560 4559
4561 4560 ui.status(_(b'sending %s command\n') % command)
4562 4561
4563 4562 if b'PUSHFILE' in args:
4564 4563 with open(args[b'PUSHFILE'], 'rb') as fh:
4565 4564 del args[b'PUSHFILE']
4566 4565 res, output = peer._callpush(
4567 4566 command, fh, **pycompat.strkwargs(args)
4568 4567 )
4569 4568 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4570 4569 ui.status(
4571 4570 _(b'remote output: %s\n') % stringutil.escapestr(output)
4572 4571 )
4573 4572 else:
4574 4573 with peer.commandexecutor() as e:
4575 4574 res = e.callcommand(command, args).result()
4576 4575
4577 4576 ui.status(
4578 4577 _(b'response: %s\n')
4579 4578 % stringutil.pprint(res, bprefix=True, indent=2)
4580 4579 )
4581 4580
4582 4581 elif action == b'batchbegin':
4583 4582 if batchedcommands is not None:
4584 4583 raise error.Abort(_(b'nested batchbegin not allowed'))
4585 4584
4586 4585 batchedcommands = []
4587 4586 elif action == b'batchsubmit':
4588 4587 # There is a batching API we could go through. But it would be
4589 4588 # difficult to normalize requests into function calls. It is easier
4590 4589 # to bypass this layer and normalize to commands + args.
4591 4590 ui.status(
4592 4591 _(b'sending batch with %d sub-commands\n')
4593 4592 % len(batchedcommands)
4594 4593 )
4595 4594 assert peer is not None
4596 4595 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4597 4596 ui.status(
4598 4597 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4599 4598 )
4600 4599
4601 4600 batchedcommands = None
4602 4601
4603 4602 elif action.startswith(b'httprequest '):
4604 4603 if not opener:
4605 4604 raise error.Abort(
4606 4605 _(b'cannot use httprequest without an HTTP peer')
4607 4606 )
4608 4607
4609 4608 request = action.split(b' ', 2)
4610 4609 if len(request) != 3:
4611 4610 raise error.Abort(
4612 4611 _(
4613 4612 b'invalid httprequest: expected format is '
4614 4613 b'"httprequest <method> <path>'
4615 4614 )
4616 4615 )
4617 4616
4618 4617 method, httppath = request[1:]
4619 4618 headers = {}
4620 4619 body = None
4621 4620 frames = []
4622 4621 for line in lines:
4623 4622 line = line.lstrip()
4624 4623 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4625 4624 if m:
4626 4625 # Headers need to use native strings.
4627 4626 key = pycompat.strurl(m.group(1))
4628 4627 value = pycompat.strurl(m.group(2))
4629 4628 headers[key] = value
4630 4629 continue
4631 4630
4632 4631 if line.startswith(b'BODYFILE '):
4633 4632 with open(line.split(b' ', 1), b'rb') as fh:
4634 4633 body = fh.read()
4635 4634 elif line.startswith(b'frame '):
4636 4635 frame = wireprotoframing.makeframefromhumanstring(
4637 4636 line[len(b'frame ') :]
4638 4637 )
4639 4638
4640 4639 frames.append(frame)
4641 4640 else:
4642 4641 raise error.Abort(
4643 4642 _(b'unknown argument to httprequest: %s') % line
4644 4643 )
4645 4644
4646 4645 url = path + httppath
4647 4646
4648 4647 if frames:
4649 4648 body = b''.join(bytes(f) for f in frames)
4650 4649
4651 4650 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4652 4651
4653 4652 # urllib.Request insists on using has_data() as a proxy for
4654 4653 # determining the request method. Override that to use our
4655 4654 # explicitly requested method.
4656 4655 req.get_method = lambda: pycompat.sysstr(method)
4657 4656
4658 4657 try:
4659 4658 res = opener.open(req)
4660 4659 body = res.read()
4661 4660 except util.urlerr.urlerror as e:
4662 4661 # read() method must be called, but only exists in Python 2
4663 4662 getattr(e, 'read', lambda: None)()
4664 4663 continue
4665 4664
4666 4665 ct = res.headers.get('Content-Type')
4667 4666 if ct == 'application/mercurial-cbor':
4668 4667 ui.write(
4669 4668 _(b'cbor> %s\n')
4670 4669 % stringutil.pprint(
4671 4670 cborutil.decodeall(body), bprefix=True, indent=2
4672 4671 )
4673 4672 )
4674 4673
4675 4674 elif action == b'close':
4676 4675 assert peer is not None
4677 4676 peer.close()
4678 4677 elif action == b'readavailable':
4679 4678 if not stdout or not stderr:
4680 4679 raise error.Abort(
4681 4680 _(b'readavailable not available on this peer')
4682 4681 )
4683 4682
4684 4683 stdin.close()
4685 4684 stdout.read()
4686 4685 stderr.read()
4687 4686
4688 4687 elif action == b'readline':
4689 4688 if not stdout:
4690 4689 raise error.Abort(_(b'readline not available on this peer'))
4691 4690 stdout.readline()
4692 4691 elif action == b'ereadline':
4693 4692 if not stderr:
4694 4693 raise error.Abort(_(b'ereadline not available on this peer'))
4695 4694 stderr.readline()
4696 4695 elif action.startswith(b'read '):
4697 4696 count = int(action.split(b' ', 1)[1])
4698 4697 if not stdout:
4699 4698 raise error.Abort(_(b'read not available on this peer'))
4700 4699 stdout.read(count)
4701 4700 elif action.startswith(b'eread '):
4702 4701 count = int(action.split(b' ', 1)[1])
4703 4702 if not stderr:
4704 4703 raise error.Abort(_(b'eread not available on this peer'))
4705 4704 stderr.read(count)
4706 4705 else:
4707 4706 raise error.Abort(_(b'unknown action: %s') % action)
4708 4707
4709 4708 if batchedcommands is not None:
4710 4709 raise error.Abort(_(b'unclosed "batchbegin" request'))
4711 4710
4712 4711 if peer:
4713 4712 peer.close()
4714 4713
4715 4714 if proc:
4716 4715 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now