##// END OF EJS Templates
debug-discovery: fix a typo in the doc...
marmoute -
r50294:a3fdc4fc stable
parent child Browse files
Show More
@@ -1,5031 +1,5031 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revlogutils,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 constants as revlog_constants,
108 108 debug as revlog_debug,
109 109 deltas as deltautil,
110 110 nodemap,
111 111 rewrite,
112 112 sidedata,
113 113 )
114 114
115 115 release = lockmod.release
116 116
117 117 table = {}
118 118 table.update(strip.command._table)
119 119 command = registrar.command(table)
120 120
121 121
122 122 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 123 def debugancestor(ui, repo, *args):
124 124 """find the ancestor revision of two revisions in a given index"""
125 125 if len(args) == 3:
126 126 index, rev1, rev2 = args
127 127 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 128 lookup = r.lookup
129 129 elif len(args) == 2:
130 130 if not repo:
131 131 raise error.Abort(
132 132 _(b'there is no Mercurial repository here (.hg not found)')
133 133 )
134 134 rev1, rev2 = args
135 135 r = repo.changelog
136 136 lookup = repo.lookup
137 137 else:
138 138 raise error.Abort(_(b'either two or three arguments required'))
139 139 a = r.ancestor(lookup(rev1), lookup(rev2))
140 140 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 141
142 142
143 143 @command(b'debugantivirusrunning', [])
144 144 def debugantivirusrunning(ui, repo):
145 145 """attempt to trigger an antivirus scanner to see if one is active"""
146 146 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 147 f.write(
148 148 util.b85decode(
149 149 # This is a base85-armored version of the EICAR test file. See
150 150 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 151 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 152 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 153 )
154 154 )
155 155 # Give an AV engine time to scan the file.
156 156 time.sleep(2)
157 157 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 158
159 159
160 160 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 161 def debugapplystreamclonebundle(ui, repo, fname):
162 162 """apply a stream clone bundle file"""
163 163 f = hg.openpath(ui, fname)
164 164 gen = exchange.readbundle(ui, f, fname)
165 165 gen.apply(repo)
166 166
167 167
168 168 @command(
169 169 b'debugbuilddag',
170 170 [
171 171 (
172 172 b'm',
173 173 b'mergeable-file',
174 174 None,
175 175 _(b'add single file mergeable changes'),
176 176 ),
177 177 (
178 178 b'o',
179 179 b'overwritten-file',
180 180 None,
181 181 _(b'add single file all revs overwrite'),
182 182 ),
183 183 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 184 (
185 185 b'',
186 186 b'from-existing',
187 187 None,
188 188 _(b'continue from a non-empty repository'),
189 189 ),
190 190 ],
191 191 _(b'[OPTION]... [TEXT]'),
192 192 )
193 193 def debugbuilddag(
194 194 ui,
195 195 repo,
196 196 text=None,
197 197 mergeable_file=False,
198 198 overwritten_file=False,
199 199 new_file=False,
200 200 from_existing=False,
201 201 ):
202 202 """builds a repo with a given DAG from scratch in the current empty repo
203 203
204 204 The description of the DAG is read from stdin if not given on the
205 205 command line.
206 206
207 207 Elements:
208 208
209 209 - "+n" is a linear run of n nodes based on the current default parent
210 210 - "." is a single node based on the current default parent
211 211 - "$" resets the default parent to null (implied at the start);
212 212 otherwise the default parent is always the last node created
213 213 - "<p" sets the default parent to the backref p
214 214 - "*p" is a fork at parent p, which is a backref
215 215 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 216 - "/p2" is a merge of the preceding node and p2
217 217 - ":tag" defines a local tag for the preceding node
218 218 - "@branch" sets the named branch for subsequent nodes
219 219 - "#...\\n" is a comment up to the end of the line
220 220
221 221 Whitespace between the above elements is ignored.
222 222
223 223 A backref is either
224 224
225 225 - a number n, which references the node curr-n, where curr is the current
226 226 node, or
227 227 - the name of a local tag you placed earlier using ":tag", or
228 228 - empty to denote the default parent.
229 229
230 230 All string valued-elements are either strictly alphanumeric, or must
231 231 be enclosed in double quotes ("..."), with "\\" as escape character.
232 232 """
233 233
234 234 if text is None:
235 235 ui.status(_(b"reading DAG from stdin\n"))
236 236 text = ui.fin.read()
237 237
238 238 cl = repo.changelog
239 239 if len(cl) > 0 and not from_existing:
240 240 raise error.Abort(_(b'repository is not empty'))
241 241
242 242 # determine number of revs in DAG
243 243 total = 0
244 244 for type, data in dagparser.parsedag(text):
245 245 if type == b'n':
246 246 total += 1
247 247
248 248 if mergeable_file:
249 249 linesperrev = 2
250 250 # make a file with k lines per rev
251 251 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 252 initialmergedlines.append(b"")
253 253
254 254 tags = []
255 255 progress = ui.makeprogress(
256 256 _(b'building'), unit=_(b'revisions'), total=total
257 257 )
258 258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 259 at = -1
260 260 atbranch = b'default'
261 261 nodeids = []
262 262 id = 0
263 263 progress.update(id)
264 264 for type, data in dagparser.parsedag(text):
265 265 if type == b'n':
266 266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 267 id, ps = data
268 268
269 269 files = []
270 270 filecontent = {}
271 271
272 272 p2 = None
273 273 if mergeable_file:
274 274 fn = b"mf"
275 275 p1 = repo[ps[0]]
276 276 if len(ps) > 1:
277 277 p2 = repo[ps[1]]
278 278 pa = p1.ancestor(p2)
279 279 base, local, other = [
280 280 x[fn].data() for x in (pa, p1, p2)
281 281 ]
282 282 m3 = simplemerge.Merge3Text(base, local, other)
283 283 ml = [
284 284 l.strip()
285 285 for l in simplemerge.render_minimized(m3)[0]
286 286 ]
287 287 ml.append(b"")
288 288 elif at > 0:
289 289 ml = p1[fn].data().split(b"\n")
290 290 else:
291 291 ml = initialmergedlines
292 292 ml[id * linesperrev] += b" r%i" % id
293 293 mergedtext = b"\n".join(ml)
294 294 files.append(fn)
295 295 filecontent[fn] = mergedtext
296 296
297 297 if overwritten_file:
298 298 fn = b"of"
299 299 files.append(fn)
300 300 filecontent[fn] = b"r%i\n" % id
301 301
302 302 if new_file:
303 303 fn = b"nf%i" % id
304 304 files.append(fn)
305 305 filecontent[fn] = b"r%i\n" % id
306 306 if len(ps) > 1:
307 307 if not p2:
308 308 p2 = repo[ps[1]]
309 309 for fn in p2:
310 310 if fn.startswith(b"nf"):
311 311 files.append(fn)
312 312 filecontent[fn] = p2[fn].data()
313 313
314 314 def fctxfn(repo, cx, path):
315 315 if path in filecontent:
316 316 return context.memfilectx(
317 317 repo, cx, path, filecontent[path]
318 318 )
319 319 return None
320 320
321 321 if len(ps) == 0 or ps[0] < 0:
322 322 pars = [None, None]
323 323 elif len(ps) == 1:
324 324 pars = [nodeids[ps[0]], None]
325 325 else:
326 326 pars = [nodeids[p] for p in ps]
327 327 cx = context.memctx(
328 328 repo,
329 329 pars,
330 330 b"r%i" % id,
331 331 files,
332 332 fctxfn,
333 333 date=(id, 0),
334 334 user=b"debugbuilddag",
335 335 extra={b'branch': atbranch},
336 336 )
337 337 nodeid = repo.commitctx(cx)
338 338 nodeids.append(nodeid)
339 339 at = id
340 340 elif type == b'l':
341 341 id, name = data
342 342 ui.note((b'tag %s\n' % name))
343 343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 344 elif type == b'a':
345 345 ui.note((b'branch %s\n' % data))
346 346 atbranch = data
347 347 progress.update(id)
348 348
349 349 if tags:
350 350 repo.vfs.write(b"localtags", b"".join(tags))
351 351
352 352
353 353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 354 indent_string = b' ' * indent
355 355 if all:
356 356 ui.writenoi18n(
357 357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 358 % indent_string
359 359 )
360 360
361 361 def showchunks(named):
362 362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 363 for deltadata in gen.deltaiter():
364 364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 365 ui.write(
366 366 b"%s%s %s %s %s %s %d\n"
367 367 % (
368 368 indent_string,
369 369 hex(node),
370 370 hex(p1),
371 371 hex(p2),
372 372 hex(cs),
373 373 hex(deltabase),
374 374 len(delta),
375 375 )
376 376 )
377 377
378 378 gen.changelogheader()
379 379 showchunks(b"changelog")
380 380 gen.manifestheader()
381 381 showchunks(b"manifest")
382 382 for chunkdata in iter(gen.filelogheader, {}):
383 383 fname = chunkdata[b'filename']
384 384 showchunks(fname)
385 385 else:
386 386 if isinstance(gen, bundle2.unbundle20):
387 387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 388 gen.changelogheader()
389 389 for deltadata in gen.deltaiter():
390 390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 392
393 393
394 394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 395 """display version and markers contained in 'data'"""
396 396 opts = pycompat.byteskwargs(opts)
397 397 data = part.read()
398 398 indent_string = b' ' * indent
399 399 try:
400 400 version, markers = obsolete._readmarkers(data)
401 401 except error.UnknownVersion as exc:
402 402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 403 msg %= indent_string, exc.version, len(data)
404 404 ui.write(msg)
405 405 else:
406 406 msg = b"%sversion: %d (%d bytes)\n"
407 407 msg %= indent_string, version, len(data)
408 408 ui.write(msg)
409 409 fm = ui.formatter(b'debugobsolete', opts)
410 410 for rawmarker in sorted(markers):
411 411 m = obsutil.marker(None, rawmarker)
412 412 fm.startitem()
413 413 fm.plain(indent_string)
414 414 cmdutil.showmarker(fm, m)
415 415 fm.end()
416 416
417 417
418 418 def _debugphaseheads(ui, data, indent=0):
419 419 """display version and markers contained in 'data'"""
420 420 indent_string = b' ' * indent
421 421 headsbyphase = phases.binarydecode(data)
422 422 for phase in phases.allphases:
423 423 for head in headsbyphase[phase]:
424 424 ui.write(indent_string)
425 425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 426
427 427
428 428 def _quasirepr(thing):
429 429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 430 return b'{%s}' % (
431 431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 432 )
433 433 return pycompat.bytestr(repr(thing))
434 434
435 435
436 436 def _debugbundle2(ui, gen, all=None, **opts):
437 437 """lists the contents of a bundle2"""
438 438 if not isinstance(gen, bundle2.unbundle20):
439 439 raise error.Abort(_(b'not a bundle2 file'))
440 440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 441 parttypes = opts.get('part_type', [])
442 442 for part in gen.iterparts():
443 443 if parttypes and part.type not in parttypes:
444 444 continue
445 445 msg = b'%s -- %s (mandatory: %r)\n'
446 446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 447 if part.type == b'changegroup':
448 448 version = part.params.get(b'version', b'01')
449 449 cg = changegroup.getunbundler(version, part, b'UN')
450 450 if not ui.quiet:
451 451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 452 if part.type == b'obsmarkers':
453 453 if not ui.quiet:
454 454 _debugobsmarkers(ui, part, indent=4, **opts)
455 455 if part.type == b'phase-heads':
456 456 if not ui.quiet:
457 457 _debugphaseheads(ui, part, indent=4)
458 458
459 459
460 460 @command(
461 461 b'debugbundle',
462 462 [
463 463 (b'a', b'all', None, _(b'show all details')),
464 464 (b'', b'part-type', [], _(b'show only the named part type')),
465 465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 466 ],
467 467 _(b'FILE'),
468 468 norepo=True,
469 469 )
470 470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 471 """lists the contents of a bundle"""
472 472 with hg.openpath(ui, bundlepath) as f:
473 473 if spec:
474 474 spec = exchange.getbundlespec(ui, f)
475 475 ui.write(b'%s\n' % spec)
476 476 return
477 477
478 478 gen = exchange.readbundle(ui, f, bundlepath)
479 479 if isinstance(gen, bundle2.unbundle20):
480 480 return _debugbundle2(ui, gen, all=all, **opts)
481 481 _debugchangegroup(ui, gen, all=all, **opts)
482 482
483 483
484 484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 485 def debugcapabilities(ui, path, **opts):
486 486 """lists the capabilities of a remote peer"""
487 487 opts = pycompat.byteskwargs(opts)
488 488 peer = hg.peer(ui, opts, path)
489 489 try:
490 490 caps = peer.capabilities()
491 491 ui.writenoi18n(b'Main capabilities:\n')
492 492 for c in sorted(caps):
493 493 ui.write(b' %s\n' % c)
494 494 b2caps = bundle2.bundle2caps(peer)
495 495 if b2caps:
496 496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 497 for key, values in sorted(b2caps.items()):
498 498 ui.write(b' %s\n' % key)
499 499 for v in values:
500 500 ui.write(b' %s\n' % v)
501 501 finally:
502 502 peer.close()
503 503
504 504
505 505 @command(
506 506 b'debugchangedfiles',
507 507 [
508 508 (
509 509 b'',
510 510 b'compute',
511 511 False,
512 512 b"compute information instead of reading it from storage",
513 513 ),
514 514 ],
515 515 b'REV',
516 516 )
517 517 def debugchangedfiles(ui, repo, rev, **opts):
518 518 """list the stored files changes for a revision"""
519 519 ctx = logcmdutil.revsingle(repo, rev, None)
520 520 files = None
521 521
522 522 if opts['compute']:
523 523 files = metadata.compute_all_files_changes(ctx)
524 524 else:
525 525 sd = repo.changelog.sidedata(ctx.rev())
526 526 files_block = sd.get(sidedata.SD_FILES)
527 527 if files_block is not None:
528 528 files = metadata.decode_files_sidedata(sd)
529 529 if files is not None:
530 530 for f in sorted(files.touched):
531 531 if f in files.added:
532 532 action = b"added"
533 533 elif f in files.removed:
534 534 action = b"removed"
535 535 elif f in files.merged:
536 536 action = b"merged"
537 537 elif f in files.salvaged:
538 538 action = b"salvaged"
539 539 else:
540 540 action = b"touched"
541 541
542 542 copy_parent = b""
543 543 copy_source = b""
544 544 if f in files.copied_from_p1:
545 545 copy_parent = b"p1"
546 546 copy_source = files.copied_from_p1[f]
547 547 elif f in files.copied_from_p2:
548 548 copy_parent = b"p2"
549 549 copy_source = files.copied_from_p2[f]
550 550
551 551 data = (action, copy_parent, f, copy_source)
552 552 template = b"%-8s %2s: %s, %s;\n"
553 553 ui.write(template % data)
554 554
555 555
556 556 @command(b'debugcheckstate', [], b'')
557 557 def debugcheckstate(ui, repo):
558 558 """validate the correctness of the current dirstate"""
559 559 parent1, parent2 = repo.dirstate.parents()
560 560 m1 = repo[parent1].manifest()
561 561 m2 = repo[parent2].manifest()
562 562 errors = 0
563 563 for err in repo.dirstate.verify(m1, m2):
564 564 ui.warn(err[0] % err[1:])
565 565 errors += 1
566 566 if errors:
567 567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 568 raise error.Abort(errstr)
569 569
570 570
571 571 @command(
572 572 b'debugcolor',
573 573 [(b'', b'style', None, _(b'show all configured styles'))],
574 574 b'hg debugcolor',
575 575 )
576 576 def debugcolor(ui, repo, **opts):
577 577 """show available color, effects or style"""
578 578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 579 if opts.get('style'):
580 580 return _debugdisplaystyle(ui)
581 581 else:
582 582 return _debugdisplaycolor(ui)
583 583
584 584
585 585 def _debugdisplaycolor(ui):
586 586 ui = ui.copy()
587 587 ui._styles.clear()
588 588 for effect in color._activeeffects(ui).keys():
589 589 ui._styles[effect] = effect
590 590 if ui._terminfoparams:
591 591 for k, v in ui.configitems(b'color'):
592 592 if k.startswith(b'color.'):
593 593 ui._styles[k] = k[6:]
594 594 elif k.startswith(b'terminfo.'):
595 595 ui._styles[k] = k[9:]
596 596 ui.write(_(b'available colors:\n'))
597 597 # sort label with a '_' after the other to group '_background' entry.
598 598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 599 for colorname, label in items:
600 600 ui.write(b'%s\n' % colorname, label=label)
601 601
602 602
603 603 def _debugdisplaystyle(ui):
604 604 ui.write(_(b'available style:\n'))
605 605 if not ui._styles:
606 606 return
607 607 width = max(len(s) for s in ui._styles)
608 608 for label, effects in sorted(ui._styles.items()):
609 609 ui.write(b'%s' % label, label=label)
610 610 if effects:
611 611 # 50
612 612 ui.write(b': ')
613 613 ui.write(b' ' * (max(0, width - len(label))))
614 614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 615 ui.write(b'\n')
616 616
617 617
618 618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 619 def debugcreatestreamclonebundle(ui, repo, fname):
620 620 """create a stream clone bundle file
621 621
622 622 Stream bundles are special bundles that are essentially archives of
623 623 revlog files. They are commonly used for cloning very quickly.
624 624 """
625 625 # TODO we may want to turn this into an abort when this functionality
626 626 # is moved into `hg bundle`.
627 627 if phases.hassecret(repo):
628 628 ui.warn(
629 629 _(
630 630 b'(warning: stream clone bundle will contain secret '
631 631 b'revisions)\n'
632 632 )
633 633 )
634 634
635 635 requirements, gen = streamclone.generatebundlev1(repo)
636 636 changegroup.writechunks(ui, gen, fname)
637 637
638 638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 639
640 640
641 641 @command(
642 642 b'debugdag',
643 643 [
644 644 (b't', b'tags', None, _(b'use tags as labels')),
645 645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 646 (b'', b'dots', None, _(b'use dots for runs')),
647 647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 648 ],
649 649 _(b'[OPTION]... [FILE [REV]...]'),
650 650 optionalrepo=True,
651 651 )
652 652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 653 """format the changelog or an index DAG as a concise textual description
654 654
655 655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 656 revision numbers, they get labeled in the output as rN.
657 657
658 658 Otherwise, the changelog DAG of the current repo is emitted.
659 659 """
660 660 spaces = opts.get('spaces')
661 661 dots = opts.get('dots')
662 662 if file_:
663 663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 664 revs = {int(r) for r in revs}
665 665
666 666 def events():
667 667 for r in rlog:
668 668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 669 if r in revs:
670 670 yield b'l', (r, b"r%i" % r)
671 671
672 672 elif repo:
673 673 cl = repo.changelog
674 674 tags = opts.get('tags')
675 675 branches = opts.get('branches')
676 676 if tags:
677 677 labels = {}
678 678 for l, n in repo.tags().items():
679 679 labels.setdefault(cl.rev(n), []).append(l)
680 680
681 681 def events():
682 682 b = b"default"
683 683 for r in cl:
684 684 if branches:
685 685 newb = cl.read(cl.node(r))[5][b'branch']
686 686 if newb != b:
687 687 yield b'a', newb
688 688 b = newb
689 689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 690 if tags:
691 691 ls = labels.get(r)
692 692 if ls:
693 693 for l in ls:
694 694 yield b'l', (r, l)
695 695
696 696 else:
697 697 raise error.Abort(_(b'need repo for changelog dag'))
698 698
699 699 for line in dagparser.dagtextlines(
700 700 events(),
701 701 addspaces=spaces,
702 702 wraplabels=True,
703 703 wrapannotations=True,
704 704 wrapnonlinear=dots,
705 705 usedots=dots,
706 706 maxlinewidth=70,
707 707 ):
708 708 ui.write(line)
709 709 ui.write(b"\n")
710 710
711 711
712 712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 713 def debugdata(ui, repo, file_, rev=None, **opts):
714 714 """dump the contents of a data file revision"""
715 715 opts = pycompat.byteskwargs(opts)
716 716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 717 if rev is not None:
718 718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 719 file_, rev = None, file_
720 720 elif rev is None:
721 721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 723 try:
724 724 ui.write(r.rawdata(r.lookup(rev)))
725 725 except KeyError:
726 726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 727
728 728
729 729 @command(
730 730 b'debugdate',
731 731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 732 _(b'[-e] DATE [RANGE]'),
733 733 norepo=True,
734 734 optionalrepo=True,
735 735 )
736 736 def debugdate(ui, date, range=None, **opts):
737 737 """parse and display a date"""
738 738 if opts["extended"]:
739 739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 740 else:
741 741 d = dateutil.parsedate(date)
742 742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 744 if range:
745 745 m = dateutil.matchdate(range)
746 746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 747
748 748
749 749 @command(
750 750 b'debugdeltachain',
751 751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 752 _(b'-c|-m|FILE'),
753 753 optionalrepo=True,
754 754 )
755 755 def debugdeltachain(ui, repo, file_=None, **opts):
756 756 """dump information about delta chains in a revlog
757 757
758 758 Output can be templatized. Available template keywords are:
759 759
760 760 :``rev``: revision number
761 761 :``p1``: parent 1 revision number (for reference)
762 762 :``p2``: parent 2 revision number (for reference)
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 - base: a full snapshot
768 768 - snap: an intermediate snapshot
769 769 - p1: a delta against the first parent
770 770 - p2: a delta against the second parent
771 771 - skip1: a delta against the same base as p1
772 772 (when p1 has empty delta
773 773 - skip2: a delta against the same base as p2
774 774 (when p2 has empty delta
775 775 - prev: a delta against the previous revision
776 776 - other: a delta against an arbitrary revision
777 777 :``compsize``: compressed size of revision
778 778 :``uncompsize``: uncompressed size of revision
779 779 :``chainsize``: total size of compressed revisions in chain
780 780 :``chainratio``: total chain size divided by uncompressed revision size
781 781 (new delta chains typically start at ratio 2.00)
782 782 :``lindist``: linear distance from base revision in delta chain to end
783 783 of this revision
784 784 :``extradist``: total size of revisions not part of this delta chain from
785 785 base of delta chain to end of this revision; a measurement
786 786 of how much extra data we need to read/seek across to read
787 787 the delta chain for this revision
788 788 :``extraratio``: extradist divided by chainsize; another representation of
789 789 how much unrelated data is needed to load this delta chain
790 790
791 791 If the repository is configured to use the sparse read, additional keywords
792 792 are available:
793 793
794 794 :``readsize``: total size of data read from the disk for a revision
795 795 (sum of the sizes of all the blocks)
796 796 :``largestblock``: size of the largest block of data read from the disk
797 797 :``readdensity``: density of useful bytes in the data read from the disk
798 798 :``srchunks``: in how many data hunks the whole revision would be read
799 799
800 800 The sparse read can be enabled with experimental.sparse-read = True
801 801 """
802 802 opts = pycompat.byteskwargs(opts)
803 803 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 804 index = r.index
805 805 start = r.start
806 806 length = r.length
807 807 generaldelta = r._generaldelta
808 808 withsparseread = getattr(r, '_withsparseread', False)
809 809
810 810 # security to avoid crash on corrupted revlogs
811 811 total_revs = len(index)
812 812
813 813 def revinfo(rev):
814 814 e = index[rev]
815 815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 817 chainsize = 0
818 818
819 819 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 820 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 821 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 822
823 823 # If the parents of a revision has an empty delta, we never try to delta
824 824 # against that parent, but directly against the delta base of that
825 825 # parent (recursively). It avoids adding a useless entry in the chain.
826 826 #
827 827 # However we need to detect that as a special case for delta-type, that
828 828 # is not simply "other".
829 829 p1_base = p1
830 830 if p1 != nullrev and p1 < total_revs:
831 831 e1 = index[p1]
832 832 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 833 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 834 if (
835 835 new_base == p1_base
836 836 or new_base == nullrev
837 837 or new_base >= total_revs
838 838 ):
839 839 break
840 840 p1_base = new_base
841 841 e1 = index[p1_base]
842 842 p2_base = p2
843 843 if p2 != nullrev and p2 < total_revs:
844 844 e2 = index[p2]
845 845 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 846 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 847 if (
848 848 new_base == p2_base
849 849 or new_base == nullrev
850 850 or new_base >= total_revs
851 851 ):
852 852 break
853 853 p2_base = new_base
854 854 e2 = index[p2_base]
855 855
856 856 if generaldelta:
857 857 if base == p1:
858 858 deltatype = b'p1'
859 859 elif base == p2:
860 860 deltatype = b'p2'
861 861 elif base == rev:
862 862 deltatype = b'base'
863 863 elif base == p1_base:
864 864 deltatype = b'skip1'
865 865 elif base == p2_base:
866 866 deltatype = b'skip2'
867 867 elif r.issnapshot(rev):
868 868 deltatype = b'snap'
869 869 elif base == rev - 1:
870 870 deltatype = b'prev'
871 871 else:
872 872 deltatype = b'other'
873 873 else:
874 874 if base == rev:
875 875 deltatype = b'base'
876 876 else:
877 877 deltatype = b'prev'
878 878
879 879 chain = r._deltachain(rev)[0]
880 880 for iterrev in chain:
881 881 e = index[iterrev]
882 882 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 883
884 884 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 885
886 886 fm = ui.formatter(b'debugdeltachain', opts)
887 887
888 888 fm.plain(
889 889 b' rev p1 p2 chain# chainlen prev delta '
890 890 b'size rawsize chainsize ratio lindist extradist '
891 891 b'extraratio'
892 892 )
893 893 if withsparseread:
894 894 fm.plain(b' readsize largestblk rddensity srchunks')
895 895 fm.plain(b'\n')
896 896
897 897 chainbases = {}
898 898 for rev in r:
899 899 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 900 chainbase = chain[0]
901 901 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 902 basestart = start(chainbase)
903 903 revstart = start(rev)
904 904 lineardist = revstart + comp - basestart
905 905 extradist = lineardist - chainsize
906 906 try:
907 907 prevrev = chain[-2]
908 908 except IndexError:
909 909 prevrev = -1
910 910
911 911 if uncomp != 0:
912 912 chainratio = float(chainsize) / float(uncomp)
913 913 else:
914 914 chainratio = chainsize
915 915
916 916 if chainsize != 0:
917 917 extraratio = float(extradist) / float(chainsize)
918 918 else:
919 919 extraratio = extradist
920 920
921 921 fm.startitem()
922 922 fm.write(
923 923 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 924 b'uncompsize chainsize chainratio lindist extradist '
925 925 b'extraratio',
926 926 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 927 rev,
928 928 p1,
929 929 p2,
930 930 chainid,
931 931 len(chain),
932 932 prevrev,
933 933 deltatype,
934 934 comp,
935 935 uncomp,
936 936 chainsize,
937 937 chainratio,
938 938 lineardist,
939 939 extradist,
940 940 extraratio,
941 941 rev=rev,
942 942 chainid=chainid,
943 943 chainlen=len(chain),
944 944 prevrev=prevrev,
945 945 deltatype=deltatype,
946 946 compsize=comp,
947 947 uncompsize=uncomp,
948 948 chainsize=chainsize,
949 949 chainratio=chainratio,
950 950 lindist=lineardist,
951 951 extradist=extradist,
952 952 extraratio=extraratio,
953 953 )
954 954 if withsparseread:
955 955 readsize = 0
956 956 largestblock = 0
957 957 srchunks = 0
958 958
959 959 for revschunk in deltautil.slicechunk(r, chain):
960 960 srchunks += 1
961 961 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 962 blksize = blkend - start(revschunk[0])
963 963
964 964 readsize += blksize
965 965 if largestblock < blksize:
966 966 largestblock = blksize
967 967
968 968 if readsize:
969 969 readdensity = float(chainsize) / float(readsize)
970 970 else:
971 971 readdensity = 1
972 972
973 973 fm.write(
974 974 b'readsize largestblock readdensity srchunks',
975 975 b' %10d %10d %9.5f %8d',
976 976 readsize,
977 977 largestblock,
978 978 readdensity,
979 979 srchunks,
980 980 readsize=readsize,
981 981 largestblock=largestblock,
982 982 readdensity=readdensity,
983 983 srchunks=srchunks,
984 984 )
985 985
986 986 fm.plain(b'\n')
987 987
988 988 fm.end()
989 989
990 990
991 991 @command(
992 992 b'debug-delta-find',
993 993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 994 _(b'-c|-m|FILE REV'),
995 995 optionalrepo=True,
996 996 )
997 997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
998 998 """display the computation to get to a valid delta for storing REV
999 999
1000 1000 This command will replay the process used to find the "best" delta to store
1001 1001 a revision and display information about all the steps used to get to that
1002 1002 result.
1003 1003
1004 1004 The revision use the revision number of the target storage (not changelog
1005 1005 revision number).
1006 1006
1007 1007 note: the process is initiated from a full text of the revision to store.
1008 1008 """
1009 1009 opts = pycompat.byteskwargs(opts)
1010 1010 if arg_2 is None:
1011 1011 file_ = None
1012 1012 rev = arg_1
1013 1013 else:
1014 1014 file_ = arg_1
1015 1015 rev = arg_2
1016 1016
1017 1017 rev = int(rev)
1018 1018
1019 1019 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020 1020
1021 1021 deltacomputer = deltautil.deltacomputer(
1022 1022 revlog,
1023 1023 write_debug=ui.write,
1024 1024 debug_search=True,
1025 1025 )
1026 1026
1027 1027 node = revlog.node(rev)
1028 1028 p1r, p2r = revlog.parentrevs(rev)
1029 1029 p1 = revlog.node(p1r)
1030 1030 p2 = revlog.node(p2r)
1031 1031 btext = [revlog.revision(rev)]
1032 1032 textlen = len(btext[0])
1033 1033 cachedelta = None
1034 1034 flags = revlog.flags(rev)
1035 1035
1036 1036 revinfo = revlogutils.revisioninfo(
1037 1037 node,
1038 1038 p1,
1039 1039 p2,
1040 1040 btext,
1041 1041 textlen,
1042 1042 cachedelta,
1043 1043 flags,
1044 1044 )
1045 1045
1046 1046 fh = revlog._datafp()
1047 1047 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048 1048
1049 1049
1050 1050 @command(
1051 1051 b'debugdirstate|debugstate',
1052 1052 [
1053 1053 (
1054 1054 b'',
1055 1055 b'nodates',
1056 1056 None,
1057 1057 _(b'do not display the saved mtime (DEPRECATED)'),
1058 1058 ),
1059 1059 (b'', b'dates', True, _(b'display the saved mtime')),
1060 1060 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 1061 (
1062 1062 b'',
1063 1063 b'docket',
1064 1064 False,
1065 1065 _(b'display the docket (metadata file) instead'),
1066 1066 ),
1067 1067 (
1068 1068 b'',
1069 1069 b'all',
1070 1070 False,
1071 1071 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 1072 ),
1073 1073 ],
1074 1074 _(b'[OPTION]...'),
1075 1075 )
1076 1076 def debugstate(ui, repo, **opts):
1077 1077 """show the contents of the current dirstate"""
1078 1078
1079 1079 if opts.get("docket"):
1080 1080 if not repo.dirstate._use_dirstate_v2:
1081 1081 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082 1082
1083 1083 docket = repo.dirstate._map.docket
1084 1084 (
1085 1085 start_offset,
1086 1086 root_nodes,
1087 1087 nodes_with_entry,
1088 1088 nodes_with_copy,
1089 1089 unused_bytes,
1090 1090 _unused,
1091 1091 ignore_pattern,
1092 1092 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093 1093
1094 1094 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 1095 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 1096 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 1097 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 1098 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 1099 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 1100 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 1101 ui.write(
1102 1102 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 1103 )
1104 1104 return
1105 1105
1106 1106 nodates = not opts['dates']
1107 1107 if opts.get('nodates') is not None:
1108 1108 nodates = True
1109 1109 datesort = opts.get('datesort')
1110 1110
1111 1111 if datesort:
1112 1112
1113 1113 def keyfunc(entry):
1114 1114 filename, _state, _mode, _size, mtime = entry
1115 1115 return (mtime, filename)
1116 1116
1117 1117 else:
1118 1118 keyfunc = None # sort by filename
1119 1119 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 1120 entries.sort(key=keyfunc)
1121 1121 for entry in entries:
1122 1122 filename, state, mode, size, mtime = entry
1123 1123 if mtime == -1:
1124 1124 timestr = b'unset '
1125 1125 elif nodates:
1126 1126 timestr = b'set '
1127 1127 else:
1128 1128 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 1129 timestr = encoding.strtolocal(timestr)
1130 1130 if mode & 0o20000:
1131 1131 mode = b'lnk'
1132 1132 else:
1133 1133 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 1134 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 1135 for f in repo.dirstate.copies():
1136 1136 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137 1137
1138 1138
1139 1139 @command(
1140 1140 b'debugdirstateignorepatternshash',
1141 1141 [],
1142 1142 _(b''),
1143 1143 )
1144 1144 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 1145 """show the hash of ignore patterns stored in dirstate if v2,
1146 1146 or nothing for dirstate-v2
1147 1147 """
1148 1148 if repo.dirstate._use_dirstate_v2:
1149 1149 docket = repo.dirstate._map.docket
1150 1150 hash_len = 20 # 160 bits for SHA-1
1151 1151 hash_bytes = docket.tree_metadata[-hash_len:]
1152 1152 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153 1153
1154 1154
1155 1155 @command(
1156 1156 b'debugdiscovery',
1157 1157 [
1158 1158 (b'', b'old', None, _(b'use old-style discovery')),
1159 1159 (
1160 1160 b'',
1161 1161 b'nonheads',
1162 1162 None,
1163 1163 _(b'use old-style discovery with non-heads included'),
1164 1164 ),
1165 1165 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 1166 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 1167 (
1168 1168 b'',
1169 1169 b'local-as-revs',
1170 1170 b"",
1171 1171 b'treat local has having these revisions only',
1172 1172 ),
1173 1173 (
1174 1174 b'',
1175 1175 b'remote-as-revs',
1176 1176 b"",
1177 1177 b'use local as remote, with only these revisions',
1178 1178 ),
1179 1179 ]
1180 1180 + cmdutil.remoteopts
1181 1181 + cmdutil.formatteropts,
1182 1182 _(b'[--rev REV] [OTHER]'),
1183 1183 )
1184 1184 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 1185 """runs the changeset discovery protocol in isolation
1186 1186
1187 1187 The local peer can be "replaced" by a subset of the local repository by
1188 1188 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1189 1189 be "replaced" by a subset of the local repository using the
1190 `--local-as-revs` flag. This is useful to efficiently debug pathological
1190 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 1191 discovery situation.
1192 1192
1193 1193 The following developer oriented config are relevant for people playing with this command:
1194 1194
1195 1195 * devel.discovery.exchange-heads=True
1196 1196
1197 1197 If False, the discovery will not start with
1198 1198 remote head fetching and local head querying.
1199 1199
1200 1200 * devel.discovery.grow-sample=True
1201 1201
1202 1202 If False, the sample size used in set discovery will not be increased
1203 1203 through the process
1204 1204
1205 1205 * devel.discovery.grow-sample.dynamic=True
1206 1206
1207 1207 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 1208 adapted to the shape of the undecided set (it is set to the max of:
1209 1209 <target-size>, len(roots(undecided)), len(heads(undecided)
1210 1210
1211 1211 * devel.discovery.grow-sample.rate=1.05
1212 1212
1213 1213 the rate at which the sample grow
1214 1214
1215 1215 * devel.discovery.randomize=True
1216 1216
1217 1217 If andom sampling during discovery are deterministic. It is meant for
1218 1218 integration tests.
1219 1219
1220 1220 * devel.discovery.sample-size=200
1221 1221
1222 1222 Control the initial size of the discovery sample
1223 1223
1224 1224 * devel.discovery.sample-size.initial=100
1225 1225
1226 1226 Control the initial size of the discovery for initial change
1227 1227 """
1228 1228 opts = pycompat.byteskwargs(opts)
1229 1229 unfi = repo.unfiltered()
1230 1230
1231 1231 # setup potential extra filtering
1232 1232 local_revs = opts[b"local_as_revs"]
1233 1233 remote_revs = opts[b"remote_as_revs"]
1234 1234
1235 1235 # make sure tests are repeatable
1236 1236 random.seed(int(opts[b'seed']))
1237 1237
1238 1238 if not remote_revs:
1239 1239
1240 1240 remoteurl, branches = urlutil.get_unique_pull_path(
1241 1241 b'debugdiscovery', repo, ui, remoteurl
1242 1242 )
1243 1243 remote = hg.peer(repo, opts, remoteurl)
1244 1244 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 1245 else:
1246 1246 branches = (None, [])
1247 1247 remote_filtered_revs = logcmdutil.revrange(
1248 1248 unfi, [b"not (::(%s))" % remote_revs]
1249 1249 )
1250 1250 remote_filtered_revs = frozenset(remote_filtered_revs)
1251 1251
1252 1252 def remote_func(x):
1253 1253 return remote_filtered_revs
1254 1254
1255 1255 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256 1256
1257 1257 remote = repo.peer()
1258 1258 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259 1259
1260 1260 if local_revs:
1261 1261 local_filtered_revs = logcmdutil.revrange(
1262 1262 unfi, [b"not (::(%s))" % local_revs]
1263 1263 )
1264 1264 local_filtered_revs = frozenset(local_filtered_revs)
1265 1265
1266 1266 def local_func(x):
1267 1267 return local_filtered_revs
1268 1268
1269 1269 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 1270 repo = repo.filtered(b'debug-discovery-local-filter')
1271 1271
1272 1272 data = {}
1273 1273 if opts.get(b'old'):
1274 1274
1275 1275 def doit(pushedrevs, remoteheads, remote=remote):
1276 1276 if not util.safehasattr(remote, b'branches'):
1277 1277 # enable in-client legacy support
1278 1278 remote = localrepo.locallegacypeer(remote.local())
1279 1279 common, _in, hds = treediscovery.findcommonincoming(
1280 1280 repo, remote, force=True, audit=data
1281 1281 )
1282 1282 common = set(common)
1283 1283 if not opts.get(b'nonheads'):
1284 1284 ui.writenoi18n(
1285 1285 b"unpruned common: %s\n"
1286 1286 % b" ".join(sorted(short(n) for n in common))
1287 1287 )
1288 1288
1289 1289 clnode = repo.changelog.node
1290 1290 common = repo.revs(b'heads(::%ln)', common)
1291 1291 common = {clnode(r) for r in common}
1292 1292 return common, hds
1293 1293
1294 1294 else:
1295 1295
1296 1296 def doit(pushedrevs, remoteheads, remote=remote):
1297 1297 nodes = None
1298 1298 if pushedrevs:
1299 1299 revs = logcmdutil.revrange(repo, pushedrevs)
1300 1300 nodes = [repo[r].node() for r in revs]
1301 1301 common, any, hds = setdiscovery.findcommonheads(
1302 1302 ui, repo, remote, ancestorsof=nodes, audit=data
1303 1303 )
1304 1304 return common, hds
1305 1305
1306 1306 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1307 1307 localrevs = opts[b'rev']
1308 1308
1309 1309 fm = ui.formatter(b'debugdiscovery', opts)
1310 1310 if fm.strict_format:
1311 1311
1312 1312 @contextlib.contextmanager
1313 1313 def may_capture_output():
1314 1314 ui.pushbuffer()
1315 1315 yield
1316 1316 data[b'output'] = ui.popbuffer()
1317 1317
1318 1318 else:
1319 1319 may_capture_output = util.nullcontextmanager
1320 1320 with may_capture_output():
1321 1321 with util.timedcm('debug-discovery') as t:
1322 1322 common, hds = doit(localrevs, remoterevs)
1323 1323
1324 1324 # compute all statistics
1325 1325 heads_common = set(common)
1326 1326 heads_remote = set(hds)
1327 1327 heads_local = set(repo.heads())
1328 1328 # note: they cannot be a local or remote head that is in common and not
1329 1329 # itself a head of common.
1330 1330 heads_common_local = heads_common & heads_local
1331 1331 heads_common_remote = heads_common & heads_remote
1332 1332 heads_common_both = heads_common & heads_remote & heads_local
1333 1333
1334 1334 all = repo.revs(b'all()')
1335 1335 common = repo.revs(b'::%ln', common)
1336 1336 roots_common = repo.revs(b'roots(::%ld)', common)
1337 1337 missing = repo.revs(b'not ::%ld', common)
1338 1338 heads_missing = repo.revs(b'heads(%ld)', missing)
1339 1339 roots_missing = repo.revs(b'roots(%ld)', missing)
1340 1340 assert len(common) + len(missing) == len(all)
1341 1341
1342 1342 initial_undecided = repo.revs(
1343 1343 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1344 1344 )
1345 1345 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1346 1346 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1347 1347 common_initial_undecided = initial_undecided & common
1348 1348 missing_initial_undecided = initial_undecided & missing
1349 1349
1350 1350 data[b'elapsed'] = t.elapsed
1351 1351 data[b'nb-common-heads'] = len(heads_common)
1352 1352 data[b'nb-common-heads-local'] = len(heads_common_local)
1353 1353 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1354 1354 data[b'nb-common-heads-both'] = len(heads_common_both)
1355 1355 data[b'nb-common-roots'] = len(roots_common)
1356 1356 data[b'nb-head-local'] = len(heads_local)
1357 1357 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1358 1358 data[b'nb-head-remote'] = len(heads_remote)
1359 1359 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1360 1360 heads_common_remote
1361 1361 )
1362 1362 data[b'nb-revs'] = len(all)
1363 1363 data[b'nb-revs-common'] = len(common)
1364 1364 data[b'nb-revs-missing'] = len(missing)
1365 1365 data[b'nb-missing-heads'] = len(heads_missing)
1366 1366 data[b'nb-missing-roots'] = len(roots_missing)
1367 1367 data[b'nb-ini_und'] = len(initial_undecided)
1368 1368 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1369 1369 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1370 1370 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1371 1371 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1372 1372
1373 1373 fm.startitem()
1374 1374 fm.data(**pycompat.strkwargs(data))
1375 1375 # display discovery summary
1376 1376 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1377 1377 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1378 1378 fm.plain(b"queries: %(total-queries)9d\n" % data)
1379 1379 fm.plain(b"heads summary:\n")
1380 1380 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1381 1381 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1382 1382 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1383 1383 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1384 1384 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1385 1385 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1386 1386 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1387 1387 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1388 1388 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1389 1389 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1390 1390 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1391 1391 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1392 1392 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1393 1393 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1394 1394 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1395 1395 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1396 1396 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1397 1397 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1398 1398 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1399 1399 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1400 1400 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1401 1401 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1402 1402
1403 1403 if ui.verbose:
1404 1404 fm.plain(
1405 1405 b"common heads: %s\n"
1406 1406 % b" ".join(sorted(short(n) for n in heads_common))
1407 1407 )
1408 1408 fm.end()
1409 1409
1410 1410
1411 1411 _chunksize = 4 << 10
1412 1412
1413 1413
1414 1414 @command(
1415 1415 b'debugdownload',
1416 1416 [
1417 1417 (b'o', b'output', b'', _(b'path')),
1418 1418 ],
1419 1419 optionalrepo=True,
1420 1420 )
1421 1421 def debugdownload(ui, repo, url, output=None, **opts):
1422 1422 """download a resource using Mercurial logic and config"""
1423 1423 fh = urlmod.open(ui, url, output)
1424 1424
1425 1425 dest = ui
1426 1426 if output:
1427 1427 dest = open(output, b"wb", _chunksize)
1428 1428 try:
1429 1429 data = fh.read(_chunksize)
1430 1430 while data:
1431 1431 dest.write(data)
1432 1432 data = fh.read(_chunksize)
1433 1433 finally:
1434 1434 if output:
1435 1435 dest.close()
1436 1436
1437 1437
1438 1438 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1439 1439 def debugextensions(ui, repo, **opts):
1440 1440 '''show information about active extensions'''
1441 1441 opts = pycompat.byteskwargs(opts)
1442 1442 exts = extensions.extensions(ui)
1443 1443 hgver = util.version()
1444 1444 fm = ui.formatter(b'debugextensions', opts)
1445 1445 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1446 1446 isinternal = extensions.ismoduleinternal(extmod)
1447 1447 extsource = None
1448 1448
1449 1449 if util.safehasattr(extmod, '__file__'):
1450 1450 extsource = pycompat.fsencode(extmod.__file__)
1451 1451 elif getattr(sys, 'oxidized', False):
1452 1452 extsource = pycompat.sysexecutable
1453 1453 if isinternal:
1454 1454 exttestedwith = [] # never expose magic string to users
1455 1455 else:
1456 1456 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1457 1457 extbuglink = getattr(extmod, 'buglink', None)
1458 1458
1459 1459 fm.startitem()
1460 1460
1461 1461 if ui.quiet or ui.verbose:
1462 1462 fm.write(b'name', b'%s\n', extname)
1463 1463 else:
1464 1464 fm.write(b'name', b'%s', extname)
1465 1465 if isinternal or hgver in exttestedwith:
1466 1466 fm.plain(b'\n')
1467 1467 elif not exttestedwith:
1468 1468 fm.plain(_(b' (untested!)\n'))
1469 1469 else:
1470 1470 lasttestedversion = exttestedwith[-1]
1471 1471 fm.plain(b' (%s!)\n' % lasttestedversion)
1472 1472
1473 1473 fm.condwrite(
1474 1474 ui.verbose and extsource,
1475 1475 b'source',
1476 1476 _(b' location: %s\n'),
1477 1477 extsource or b"",
1478 1478 )
1479 1479
1480 1480 if ui.verbose:
1481 1481 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1482 1482 fm.data(bundled=isinternal)
1483 1483
1484 1484 fm.condwrite(
1485 1485 ui.verbose and exttestedwith,
1486 1486 b'testedwith',
1487 1487 _(b' tested with: %s\n'),
1488 1488 fm.formatlist(exttestedwith, name=b'ver'),
1489 1489 )
1490 1490
1491 1491 fm.condwrite(
1492 1492 ui.verbose and extbuglink,
1493 1493 b'buglink',
1494 1494 _(b' bug reporting: %s\n'),
1495 1495 extbuglink or b"",
1496 1496 )
1497 1497
1498 1498 fm.end()
1499 1499
1500 1500
1501 1501 @command(
1502 1502 b'debugfileset',
1503 1503 [
1504 1504 (
1505 1505 b'r',
1506 1506 b'rev',
1507 1507 b'',
1508 1508 _(b'apply the filespec on this revision'),
1509 1509 _(b'REV'),
1510 1510 ),
1511 1511 (
1512 1512 b'',
1513 1513 b'all-files',
1514 1514 False,
1515 1515 _(b'test files from all revisions and working directory'),
1516 1516 ),
1517 1517 (
1518 1518 b's',
1519 1519 b'show-matcher',
1520 1520 None,
1521 1521 _(b'print internal representation of matcher'),
1522 1522 ),
1523 1523 (
1524 1524 b'p',
1525 1525 b'show-stage',
1526 1526 [],
1527 1527 _(b'print parsed tree at the given stage'),
1528 1528 _(b'NAME'),
1529 1529 ),
1530 1530 ],
1531 1531 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1532 1532 )
1533 1533 def debugfileset(ui, repo, expr, **opts):
1534 1534 '''parse and apply a fileset specification'''
1535 1535 from . import fileset
1536 1536
1537 1537 fileset.symbols # force import of fileset so we have predicates to optimize
1538 1538 opts = pycompat.byteskwargs(opts)
1539 1539 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1540 1540
1541 1541 stages = [
1542 1542 (b'parsed', pycompat.identity),
1543 1543 (b'analyzed', filesetlang.analyze),
1544 1544 (b'optimized', filesetlang.optimize),
1545 1545 ]
1546 1546 stagenames = {n for n, f in stages}
1547 1547
1548 1548 showalways = set()
1549 1549 if ui.verbose and not opts[b'show_stage']:
1550 1550 # show parsed tree by --verbose (deprecated)
1551 1551 showalways.add(b'parsed')
1552 1552 if opts[b'show_stage'] == [b'all']:
1553 1553 showalways.update(stagenames)
1554 1554 else:
1555 1555 for n in opts[b'show_stage']:
1556 1556 if n not in stagenames:
1557 1557 raise error.Abort(_(b'invalid stage name: %s') % n)
1558 1558 showalways.update(opts[b'show_stage'])
1559 1559
1560 1560 tree = filesetlang.parse(expr)
1561 1561 for n, f in stages:
1562 1562 tree = f(tree)
1563 1563 if n in showalways:
1564 1564 if opts[b'show_stage'] or n != b'parsed':
1565 1565 ui.write(b"* %s:\n" % n)
1566 1566 ui.write(filesetlang.prettyformat(tree), b"\n")
1567 1567
1568 1568 files = set()
1569 1569 if opts[b'all_files']:
1570 1570 for r in repo:
1571 1571 c = repo[r]
1572 1572 files.update(c.files())
1573 1573 files.update(c.substate)
1574 1574 if opts[b'all_files'] or ctx.rev() is None:
1575 1575 wctx = repo[None]
1576 1576 files.update(
1577 1577 repo.dirstate.walk(
1578 1578 scmutil.matchall(repo),
1579 1579 subrepos=list(wctx.substate),
1580 1580 unknown=True,
1581 1581 ignored=True,
1582 1582 )
1583 1583 )
1584 1584 files.update(wctx.substate)
1585 1585 else:
1586 1586 files.update(ctx.files())
1587 1587 files.update(ctx.substate)
1588 1588
1589 1589 m = ctx.matchfileset(repo.getcwd(), expr)
1590 1590 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1591 1591 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1592 1592 for f in sorted(files):
1593 1593 if not m(f):
1594 1594 continue
1595 1595 ui.write(b"%s\n" % f)
1596 1596
1597 1597
1598 1598 @command(
1599 1599 b"debug-repair-issue6528",
1600 1600 [
1601 1601 (
1602 1602 b'',
1603 1603 b'to-report',
1604 1604 b'',
1605 1605 _(b'build a report of affected revisions to this file'),
1606 1606 _(b'FILE'),
1607 1607 ),
1608 1608 (
1609 1609 b'',
1610 1610 b'from-report',
1611 1611 b'',
1612 1612 _(b'repair revisions listed in this report file'),
1613 1613 _(b'FILE'),
1614 1614 ),
1615 1615 (
1616 1616 b'',
1617 1617 b'paranoid',
1618 1618 False,
1619 1619 _(b'check that both detection methods do the same thing'),
1620 1620 ),
1621 1621 ]
1622 1622 + cmdutil.dryrunopts,
1623 1623 )
1624 1624 def debug_repair_issue6528(ui, repo, **opts):
1625 1625 """find affected revisions and repair them. See issue6528 for more details.
1626 1626
1627 1627 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1628 1628 computation of affected revisions for a given repository across clones.
1629 1629 The report format is line-based (with empty lines ignored):
1630 1630
1631 1631 ```
1632 1632 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1633 1633 ```
1634 1634
1635 1635 There can be multiple broken revisions per filelog, they are separated by
1636 1636 a comma with no spaces. The only space is between the revision(s) and the
1637 1637 filename.
1638 1638
1639 1639 Note that this does *not* mean that this repairs future affected revisions,
1640 1640 that needs a separate fix at the exchange level that was introduced in
1641 1641 Mercurial 5.9.1.
1642 1642
1643 1643 There is a `--paranoid` flag to test that the fast implementation is correct
1644 1644 by checking it against the slow implementation. Since this matter is quite
1645 1645 urgent and testing every edge-case is probably quite costly, we use this
1646 1646 method to test on large repositories as a fuzzing method of sorts.
1647 1647 """
1648 1648 cmdutil.check_incompatible_arguments(
1649 1649 opts, 'to_report', ['from_report', 'dry_run']
1650 1650 )
1651 1651 dry_run = opts.get('dry_run')
1652 1652 to_report = opts.get('to_report')
1653 1653 from_report = opts.get('from_report')
1654 1654 paranoid = opts.get('paranoid')
1655 1655 # TODO maybe add filelog pattern and revision pattern parameters to help
1656 1656 # narrow down the search for users that know what they're looking for?
1657 1657
1658 1658 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1659 1659 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1660 1660 raise error.Abort(_(msg))
1661 1661
1662 1662 rewrite.repair_issue6528(
1663 1663 ui,
1664 1664 repo,
1665 1665 dry_run=dry_run,
1666 1666 to_report=to_report,
1667 1667 from_report=from_report,
1668 1668 paranoid=paranoid,
1669 1669 )
1670 1670
1671 1671
1672 1672 @command(b'debugformat', [] + cmdutil.formatteropts)
1673 1673 def debugformat(ui, repo, **opts):
1674 1674 """display format information about the current repository
1675 1675
1676 1676 Use --verbose to get extra information about current config value and
1677 1677 Mercurial default."""
1678 1678 opts = pycompat.byteskwargs(opts)
1679 1679 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1680 1680 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1681 1681
1682 1682 def makeformatname(name):
1683 1683 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1684 1684
1685 1685 fm = ui.formatter(b'debugformat', opts)
1686 1686 if fm.isplain():
1687 1687
1688 1688 def formatvalue(value):
1689 1689 if util.safehasattr(value, b'startswith'):
1690 1690 return value
1691 1691 if value:
1692 1692 return b'yes'
1693 1693 else:
1694 1694 return b'no'
1695 1695
1696 1696 else:
1697 1697 formatvalue = pycompat.identity
1698 1698
1699 1699 fm.plain(b'format-variant')
1700 1700 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1701 1701 fm.plain(b' repo')
1702 1702 if ui.verbose:
1703 1703 fm.plain(b' config default')
1704 1704 fm.plain(b'\n')
1705 1705 for fv in upgrade.allformatvariant:
1706 1706 fm.startitem()
1707 1707 repovalue = fv.fromrepo(repo)
1708 1708 configvalue = fv.fromconfig(repo)
1709 1709
1710 1710 if repovalue != configvalue:
1711 1711 namelabel = b'formatvariant.name.mismatchconfig'
1712 1712 repolabel = b'formatvariant.repo.mismatchconfig'
1713 1713 elif repovalue != fv.default:
1714 1714 namelabel = b'formatvariant.name.mismatchdefault'
1715 1715 repolabel = b'formatvariant.repo.mismatchdefault'
1716 1716 else:
1717 1717 namelabel = b'formatvariant.name.uptodate'
1718 1718 repolabel = b'formatvariant.repo.uptodate'
1719 1719
1720 1720 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1721 1721 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1722 1722 if fv.default != configvalue:
1723 1723 configlabel = b'formatvariant.config.special'
1724 1724 else:
1725 1725 configlabel = b'formatvariant.config.default'
1726 1726 fm.condwrite(
1727 1727 ui.verbose,
1728 1728 b'config',
1729 1729 b' %6s',
1730 1730 formatvalue(configvalue),
1731 1731 label=configlabel,
1732 1732 )
1733 1733 fm.condwrite(
1734 1734 ui.verbose,
1735 1735 b'default',
1736 1736 b' %7s',
1737 1737 formatvalue(fv.default),
1738 1738 label=b'formatvariant.default',
1739 1739 )
1740 1740 fm.plain(b'\n')
1741 1741 fm.end()
1742 1742
1743 1743
1744 1744 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1745 1745 def debugfsinfo(ui, path=b"."):
1746 1746 """show information detected about current filesystem"""
1747 1747 ui.writenoi18n(b'path: %s\n' % path)
1748 1748 ui.writenoi18n(
1749 1749 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1750 1750 )
1751 1751 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1752 1752 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1753 1753 ui.writenoi18n(
1754 1754 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1755 1755 )
1756 1756 ui.writenoi18n(
1757 1757 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1758 1758 )
1759 1759 casesensitive = b'(unknown)'
1760 1760 try:
1761 1761 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1762 1762 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1763 1763 except OSError:
1764 1764 pass
1765 1765 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1766 1766
1767 1767
1768 1768 @command(
1769 1769 b'debuggetbundle',
1770 1770 [
1771 1771 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1772 1772 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1773 1773 (
1774 1774 b't',
1775 1775 b'type',
1776 1776 b'bzip2',
1777 1777 _(b'bundle compression type to use'),
1778 1778 _(b'TYPE'),
1779 1779 ),
1780 1780 ],
1781 1781 _(b'REPO FILE [-H|-C ID]...'),
1782 1782 norepo=True,
1783 1783 )
1784 1784 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1785 1785 """retrieves a bundle from a repo
1786 1786
1787 1787 Every ID must be a full-length hex node id string. Saves the bundle to the
1788 1788 given file.
1789 1789 """
1790 1790 opts = pycompat.byteskwargs(opts)
1791 1791 repo = hg.peer(ui, opts, repopath)
1792 1792 if not repo.capable(b'getbundle'):
1793 1793 raise error.Abort(b"getbundle() not supported by target repository")
1794 1794 args = {}
1795 1795 if common:
1796 1796 args['common'] = [bin(s) for s in common]
1797 1797 if head:
1798 1798 args['heads'] = [bin(s) for s in head]
1799 1799 # TODO: get desired bundlecaps from command line.
1800 1800 args['bundlecaps'] = None
1801 1801 bundle = repo.getbundle(b'debug', **args)
1802 1802
1803 1803 bundletype = opts.get(b'type', b'bzip2').lower()
1804 1804 btypes = {
1805 1805 b'none': b'HG10UN',
1806 1806 b'bzip2': b'HG10BZ',
1807 1807 b'gzip': b'HG10GZ',
1808 1808 b'bundle2': b'HG20',
1809 1809 }
1810 1810 bundletype = btypes.get(bundletype)
1811 1811 if bundletype not in bundle2.bundletypes:
1812 1812 raise error.Abort(_(b'unknown bundle type specified with --type'))
1813 1813 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1814 1814
1815 1815
1816 1816 @command(b'debugignore', [], b'[FILE]')
1817 1817 def debugignore(ui, repo, *files, **opts):
1818 1818 """display the combined ignore pattern and information about ignored files
1819 1819
1820 1820 With no argument display the combined ignore pattern.
1821 1821
1822 1822 Given space separated file names, shows if the given file is ignored and
1823 1823 if so, show the ignore rule (file and line number) that matched it.
1824 1824 """
1825 1825 ignore = repo.dirstate._ignore
1826 1826 if not files:
1827 1827 # Show all the patterns
1828 1828 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1829 1829 else:
1830 1830 m = scmutil.match(repo[None], pats=files)
1831 1831 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1832 1832 for f in m.files():
1833 1833 nf = util.normpath(f)
1834 1834 ignored = None
1835 1835 ignoredata = None
1836 1836 if nf != b'.':
1837 1837 if ignore(nf):
1838 1838 ignored = nf
1839 1839 ignoredata = repo.dirstate._ignorefileandline(nf)
1840 1840 else:
1841 1841 for p in pathutil.finddirs(nf):
1842 1842 if ignore(p):
1843 1843 ignored = p
1844 1844 ignoredata = repo.dirstate._ignorefileandline(p)
1845 1845 break
1846 1846 if ignored:
1847 1847 if ignored == nf:
1848 1848 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1849 1849 else:
1850 1850 ui.write(
1851 1851 _(
1852 1852 b"%s is ignored because of "
1853 1853 b"containing directory %s\n"
1854 1854 )
1855 1855 % (uipathfn(f), ignored)
1856 1856 )
1857 1857 ignorefile, lineno, line = ignoredata
1858 1858 ui.write(
1859 1859 _(b"(ignore rule in %s, line %d: '%s')\n")
1860 1860 % (ignorefile, lineno, line)
1861 1861 )
1862 1862 else:
1863 1863 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1864 1864
1865 1865
1866 1866 @command(
1867 1867 b'debug-revlog-index|debugindex',
1868 1868 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1869 1869 _(b'-c|-m|FILE'),
1870 1870 )
1871 1871 def debugindex(ui, repo, file_=None, **opts):
1872 1872 """dump index data for a revlog"""
1873 1873 opts = pycompat.byteskwargs(opts)
1874 1874 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1875 1875
1876 1876 fm = ui.formatter(b'debugindex', opts)
1877 1877
1878 1878 revlog = getattr(store, b'_revlog', store)
1879 1879
1880 1880 return revlog_debug.debug_index(
1881 1881 ui,
1882 1882 repo,
1883 1883 formatter=fm,
1884 1884 revlog=revlog,
1885 1885 full_node=ui.debugflag,
1886 1886 )
1887 1887
1888 1888
1889 1889 @command(
1890 1890 b'debugindexdot',
1891 1891 cmdutil.debugrevlogopts,
1892 1892 _(b'-c|-m|FILE'),
1893 1893 optionalrepo=True,
1894 1894 )
1895 1895 def debugindexdot(ui, repo, file_=None, **opts):
1896 1896 """dump an index DAG as a graphviz dot file"""
1897 1897 opts = pycompat.byteskwargs(opts)
1898 1898 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1899 1899 ui.writenoi18n(b"digraph G {\n")
1900 1900 for i in r:
1901 1901 node = r.node(i)
1902 1902 pp = r.parents(node)
1903 1903 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1904 1904 if pp[1] != repo.nullid:
1905 1905 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1906 1906 ui.write(b"}\n")
1907 1907
1908 1908
1909 1909 @command(b'debugindexstats', [])
1910 1910 def debugindexstats(ui, repo):
1911 1911 """show stats related to the changelog index"""
1912 1912 repo.changelog.shortest(repo.nullid, 1)
1913 1913 index = repo.changelog.index
1914 1914 if not util.safehasattr(index, b'stats'):
1915 1915 raise error.Abort(_(b'debugindexstats only works with native code'))
1916 1916 for k, v in sorted(index.stats().items()):
1917 1917 ui.write(b'%s: %d\n' % (k, v))
1918 1918
1919 1919
1920 1920 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1921 1921 def debuginstall(ui, **opts):
1922 1922 """test Mercurial installation
1923 1923
1924 1924 Returns 0 on success.
1925 1925 """
1926 1926 opts = pycompat.byteskwargs(opts)
1927 1927
1928 1928 problems = 0
1929 1929
1930 1930 fm = ui.formatter(b'debuginstall', opts)
1931 1931 fm.startitem()
1932 1932
1933 1933 # encoding might be unknown or wrong. don't translate these messages.
1934 1934 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1935 1935 err = None
1936 1936 try:
1937 1937 codecs.lookup(pycompat.sysstr(encoding.encoding))
1938 1938 except LookupError as inst:
1939 1939 err = stringutil.forcebytestr(inst)
1940 1940 problems += 1
1941 1941 fm.condwrite(
1942 1942 err,
1943 1943 b'encodingerror',
1944 1944 b" %s\n (check that your locale is properly set)\n",
1945 1945 err,
1946 1946 )
1947 1947
1948 1948 # Python
1949 1949 pythonlib = None
1950 1950 if util.safehasattr(os, '__file__'):
1951 1951 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1952 1952 elif getattr(sys, 'oxidized', False):
1953 1953 pythonlib = pycompat.sysexecutable
1954 1954
1955 1955 fm.write(
1956 1956 b'pythonexe',
1957 1957 _(b"checking Python executable (%s)\n"),
1958 1958 pycompat.sysexecutable or _(b"unknown"),
1959 1959 )
1960 1960 fm.write(
1961 1961 b'pythonimplementation',
1962 1962 _(b"checking Python implementation (%s)\n"),
1963 1963 pycompat.sysbytes(platform.python_implementation()),
1964 1964 )
1965 1965 fm.write(
1966 1966 b'pythonver',
1967 1967 _(b"checking Python version (%s)\n"),
1968 1968 (b"%d.%d.%d" % sys.version_info[:3]),
1969 1969 )
1970 1970 fm.write(
1971 1971 b'pythonlib',
1972 1972 _(b"checking Python lib (%s)...\n"),
1973 1973 pythonlib or _(b"unknown"),
1974 1974 )
1975 1975
1976 1976 try:
1977 1977 from . import rustext # pytype: disable=import-error
1978 1978
1979 1979 rustext.__doc__ # trigger lazy import
1980 1980 except ImportError:
1981 1981 rustext = None
1982 1982
1983 1983 security = set(sslutil.supportedprotocols)
1984 1984 if sslutil.hassni:
1985 1985 security.add(b'sni')
1986 1986
1987 1987 fm.write(
1988 1988 b'pythonsecurity',
1989 1989 _(b"checking Python security support (%s)\n"),
1990 1990 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1991 1991 )
1992 1992
1993 1993 # These are warnings, not errors. So don't increment problem count. This
1994 1994 # may change in the future.
1995 1995 if b'tls1.2' not in security:
1996 1996 fm.plain(
1997 1997 _(
1998 1998 b' TLS 1.2 not supported by Python install; '
1999 1999 b'network connections lack modern security\n'
2000 2000 )
2001 2001 )
2002 2002 if b'sni' not in security:
2003 2003 fm.plain(
2004 2004 _(
2005 2005 b' SNI not supported by Python install; may have '
2006 2006 b'connectivity issues with some servers\n'
2007 2007 )
2008 2008 )
2009 2009
2010 2010 fm.plain(
2011 2011 _(
2012 2012 b"checking Rust extensions (%s)\n"
2013 2013 % (b'missing' if rustext is None else b'installed')
2014 2014 ),
2015 2015 )
2016 2016
2017 2017 # TODO print CA cert info
2018 2018
2019 2019 # hg version
2020 2020 hgver = util.version()
2021 2021 fm.write(
2022 2022 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2023 2023 )
2024 2024 fm.write(
2025 2025 b'hgverextra',
2026 2026 _(b"checking Mercurial custom build (%s)\n"),
2027 2027 b'+'.join(hgver.split(b'+')[1:]),
2028 2028 )
2029 2029
2030 2030 # compiled modules
2031 2031 hgmodules = None
2032 2032 if util.safehasattr(sys.modules[__name__], '__file__'):
2033 2033 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2034 2034 elif getattr(sys, 'oxidized', False):
2035 2035 hgmodules = pycompat.sysexecutable
2036 2036
2037 2037 fm.write(
2038 2038 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2039 2039 )
2040 2040 fm.write(
2041 2041 b'hgmodules',
2042 2042 _(b"checking installed modules (%s)...\n"),
2043 2043 hgmodules or _(b"unknown"),
2044 2044 )
2045 2045
2046 2046 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2047 2047 rustext = rustandc # for now, that's the only case
2048 2048 cext = policy.policy in (b'c', b'allow') or rustandc
2049 2049 nopure = cext or rustext
2050 2050 if nopure:
2051 2051 err = None
2052 2052 try:
2053 2053 if cext:
2054 2054 from .cext import ( # pytype: disable=import-error
2055 2055 base85,
2056 2056 bdiff,
2057 2057 mpatch,
2058 2058 osutil,
2059 2059 )
2060 2060
2061 2061 # quiet pyflakes
2062 2062 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2063 2063 if rustext:
2064 2064 from .rustext import ( # pytype: disable=import-error
2065 2065 ancestor,
2066 2066 dirstate,
2067 2067 )
2068 2068
2069 2069 dir(ancestor), dir(dirstate) # quiet pyflakes
2070 2070 except Exception as inst:
2071 2071 err = stringutil.forcebytestr(inst)
2072 2072 problems += 1
2073 2073 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2074 2074
2075 2075 compengines = util.compengines._engines.values()
2076 2076 fm.write(
2077 2077 b'compengines',
2078 2078 _(b'checking registered compression engines (%s)\n'),
2079 2079 fm.formatlist(
2080 2080 sorted(e.name() for e in compengines),
2081 2081 name=b'compengine',
2082 2082 fmt=b'%s',
2083 2083 sep=b', ',
2084 2084 ),
2085 2085 )
2086 2086 fm.write(
2087 2087 b'compenginesavail',
2088 2088 _(b'checking available compression engines (%s)\n'),
2089 2089 fm.formatlist(
2090 2090 sorted(e.name() for e in compengines if e.available()),
2091 2091 name=b'compengine',
2092 2092 fmt=b'%s',
2093 2093 sep=b', ',
2094 2094 ),
2095 2095 )
2096 2096 wirecompengines = compression.compengines.supportedwireengines(
2097 2097 compression.SERVERROLE
2098 2098 )
2099 2099 fm.write(
2100 2100 b'compenginesserver',
2101 2101 _(
2102 2102 b'checking available compression engines '
2103 2103 b'for wire protocol (%s)\n'
2104 2104 ),
2105 2105 fm.formatlist(
2106 2106 [e.name() for e in wirecompengines if e.wireprotosupport()],
2107 2107 name=b'compengine',
2108 2108 fmt=b'%s',
2109 2109 sep=b', ',
2110 2110 ),
2111 2111 )
2112 2112 re2 = b'missing'
2113 2113 if util._re2:
2114 2114 re2 = b'available'
2115 2115 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2116 2116 fm.data(re2=bool(util._re2))
2117 2117
2118 2118 # templates
2119 2119 p = templater.templatedir()
2120 2120 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2121 2121 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2122 2122 if p:
2123 2123 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2124 2124 if m:
2125 2125 # template found, check if it is working
2126 2126 err = None
2127 2127 try:
2128 2128 templater.templater.frommapfile(m)
2129 2129 except Exception as inst:
2130 2130 err = stringutil.forcebytestr(inst)
2131 2131 p = None
2132 2132 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2133 2133 else:
2134 2134 p = None
2135 2135 fm.condwrite(
2136 2136 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2137 2137 )
2138 2138 fm.condwrite(
2139 2139 not m,
2140 2140 b'defaulttemplatenotfound',
2141 2141 _(b" template '%s' not found\n"),
2142 2142 b"default",
2143 2143 )
2144 2144 if not p:
2145 2145 problems += 1
2146 2146 fm.condwrite(
2147 2147 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2148 2148 )
2149 2149
2150 2150 # editor
2151 2151 editor = ui.geteditor()
2152 2152 editor = util.expandpath(editor)
2153 2153 editorbin = procutil.shellsplit(editor)[0]
2154 2154 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2155 2155 cmdpath = procutil.findexe(editorbin)
2156 2156 fm.condwrite(
2157 2157 not cmdpath and editor == b'vi',
2158 2158 b'vinotfound',
2159 2159 _(
2160 2160 b" No commit editor set and can't find %s in PATH\n"
2161 2161 b" (specify a commit editor in your configuration"
2162 2162 b" file)\n"
2163 2163 ),
2164 2164 not cmdpath and editor == b'vi' and editorbin,
2165 2165 )
2166 2166 fm.condwrite(
2167 2167 not cmdpath and editor != b'vi',
2168 2168 b'editornotfound',
2169 2169 _(
2170 2170 b" Can't find editor '%s' in PATH\n"
2171 2171 b" (specify a commit editor in your configuration"
2172 2172 b" file)\n"
2173 2173 ),
2174 2174 not cmdpath and editorbin,
2175 2175 )
2176 2176 if not cmdpath and editor != b'vi':
2177 2177 problems += 1
2178 2178
2179 2179 # check username
2180 2180 username = None
2181 2181 err = None
2182 2182 try:
2183 2183 username = ui.username()
2184 2184 except error.Abort as e:
2185 2185 err = e.message
2186 2186 problems += 1
2187 2187
2188 2188 fm.condwrite(
2189 2189 username, b'username', _(b"checking username (%s)\n"), username
2190 2190 )
2191 2191 fm.condwrite(
2192 2192 err,
2193 2193 b'usernameerror',
2194 2194 _(
2195 2195 b"checking username...\n %s\n"
2196 2196 b" (specify a username in your configuration file)\n"
2197 2197 ),
2198 2198 err,
2199 2199 )
2200 2200
2201 2201 for name, mod in extensions.extensions():
2202 2202 handler = getattr(mod, 'debuginstall', None)
2203 2203 if handler is not None:
2204 2204 problems += handler(ui, fm)
2205 2205
2206 2206 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2207 2207 if not problems:
2208 2208 fm.data(problems=problems)
2209 2209 fm.condwrite(
2210 2210 problems,
2211 2211 b'problems',
2212 2212 _(b"%d problems detected, please check your install!\n"),
2213 2213 problems,
2214 2214 )
2215 2215 fm.end()
2216 2216
2217 2217 return problems
2218 2218
2219 2219
2220 2220 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2221 2221 def debugknown(ui, repopath, *ids, **opts):
2222 2222 """test whether node ids are known to a repo
2223 2223
2224 2224 Every ID must be a full-length hex node id string. Returns a list of 0s
2225 2225 and 1s indicating unknown/known.
2226 2226 """
2227 2227 opts = pycompat.byteskwargs(opts)
2228 2228 repo = hg.peer(ui, opts, repopath)
2229 2229 if not repo.capable(b'known'):
2230 2230 raise error.Abort(b"known() not supported by target repository")
2231 2231 flags = repo.known([bin(s) for s in ids])
2232 2232 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2233 2233
2234 2234
2235 2235 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2236 2236 def debuglabelcomplete(ui, repo, *args):
2237 2237 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2238 2238 debugnamecomplete(ui, repo, *args)
2239 2239
2240 2240
2241 2241 @command(
2242 2242 b'debuglocks',
2243 2243 [
2244 2244 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2245 2245 (
2246 2246 b'W',
2247 2247 b'force-free-wlock',
2248 2248 None,
2249 2249 _(b'free the working state lock (DANGEROUS)'),
2250 2250 ),
2251 2251 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2252 2252 (
2253 2253 b'S',
2254 2254 b'set-wlock',
2255 2255 None,
2256 2256 _(b'set the working state lock until stopped'),
2257 2257 ),
2258 2258 ],
2259 2259 _(b'[OPTION]...'),
2260 2260 )
2261 2261 def debuglocks(ui, repo, **opts):
2262 2262 """show or modify state of locks
2263 2263
2264 2264 By default, this command will show which locks are held. This
2265 2265 includes the user and process holding the lock, the amount of time
2266 2266 the lock has been held, and the machine name where the process is
2267 2267 running if it's not local.
2268 2268
2269 2269 Locks protect the integrity of Mercurial's data, so should be
2270 2270 treated with care. System crashes or other interruptions may cause
2271 2271 locks to not be properly released, though Mercurial will usually
2272 2272 detect and remove such stale locks automatically.
2273 2273
2274 2274 However, detecting stale locks may not always be possible (for
2275 2275 instance, on a shared filesystem). Removing locks may also be
2276 2276 blocked by filesystem permissions.
2277 2277
2278 2278 Setting a lock will prevent other commands from changing the data.
2279 2279 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2280 2280 The set locks are removed when the command exits.
2281 2281
2282 2282 Returns 0 if no locks are held.
2283 2283
2284 2284 """
2285 2285
2286 2286 if opts.get('force_free_lock'):
2287 2287 repo.svfs.tryunlink(b'lock')
2288 2288 if opts.get('force_free_wlock'):
2289 2289 repo.vfs.tryunlink(b'wlock')
2290 2290 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2291 2291 return 0
2292 2292
2293 2293 locks = []
2294 2294 try:
2295 2295 if opts.get('set_wlock'):
2296 2296 try:
2297 2297 locks.append(repo.wlock(False))
2298 2298 except error.LockHeld:
2299 2299 raise error.Abort(_(b'wlock is already held'))
2300 2300 if opts.get('set_lock'):
2301 2301 try:
2302 2302 locks.append(repo.lock(False))
2303 2303 except error.LockHeld:
2304 2304 raise error.Abort(_(b'lock is already held'))
2305 2305 if len(locks):
2306 2306 try:
2307 2307 if ui.interactive():
2308 2308 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2309 2309 ui.promptchoice(prompt)
2310 2310 else:
2311 2311 msg = b"%d locks held, waiting for signal\n"
2312 2312 msg %= len(locks)
2313 2313 ui.status(msg)
2314 2314 while True: # XXX wait for a signal
2315 2315 time.sleep(0.1)
2316 2316 except KeyboardInterrupt:
2317 2317 msg = b"signal-received releasing locks\n"
2318 2318 ui.status(msg)
2319 2319 return 0
2320 2320 finally:
2321 2321 release(*locks)
2322 2322
2323 2323 now = time.time()
2324 2324 held = 0
2325 2325
2326 2326 def report(vfs, name, method):
2327 2327 # this causes stale locks to get reaped for more accurate reporting
2328 2328 try:
2329 2329 l = method(False)
2330 2330 except error.LockHeld:
2331 2331 l = None
2332 2332
2333 2333 if l:
2334 2334 l.release()
2335 2335 else:
2336 2336 try:
2337 2337 st = vfs.lstat(name)
2338 2338 age = now - st[stat.ST_MTIME]
2339 2339 user = util.username(st.st_uid)
2340 2340 locker = vfs.readlock(name)
2341 2341 if b":" in locker:
2342 2342 host, pid = locker.split(b':')
2343 2343 if host == socket.gethostname():
2344 2344 locker = b'user %s, process %s' % (user or b'None', pid)
2345 2345 else:
2346 2346 locker = b'user %s, process %s, host %s' % (
2347 2347 user or b'None',
2348 2348 pid,
2349 2349 host,
2350 2350 )
2351 2351 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2352 2352 return 1
2353 2353 except FileNotFoundError:
2354 2354 pass
2355 2355
2356 2356 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2357 2357 return 0
2358 2358
2359 2359 held += report(repo.svfs, b"lock", repo.lock)
2360 2360 held += report(repo.vfs, b"wlock", repo.wlock)
2361 2361
2362 2362 return held
2363 2363
2364 2364
2365 2365 @command(
2366 2366 b'debugmanifestfulltextcache',
2367 2367 [
2368 2368 (b'', b'clear', False, _(b'clear the cache')),
2369 2369 (
2370 2370 b'a',
2371 2371 b'add',
2372 2372 [],
2373 2373 _(b'add the given manifest nodes to the cache'),
2374 2374 _(b'NODE'),
2375 2375 ),
2376 2376 ],
2377 2377 b'',
2378 2378 )
2379 2379 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2380 2380 """show, clear or amend the contents of the manifest fulltext cache"""
2381 2381
2382 2382 def getcache():
2383 2383 r = repo.manifestlog.getstorage(b'')
2384 2384 try:
2385 2385 return r._fulltextcache
2386 2386 except AttributeError:
2387 2387 msg = _(
2388 2388 b"Current revlog implementation doesn't appear to have a "
2389 2389 b"manifest fulltext cache\n"
2390 2390 )
2391 2391 raise error.Abort(msg)
2392 2392
2393 2393 if opts.get('clear'):
2394 2394 with repo.wlock():
2395 2395 cache = getcache()
2396 2396 cache.clear(clear_persisted_data=True)
2397 2397 return
2398 2398
2399 2399 if add:
2400 2400 with repo.wlock():
2401 2401 m = repo.manifestlog
2402 2402 store = m.getstorage(b'')
2403 2403 for n in add:
2404 2404 try:
2405 2405 manifest = m[store.lookup(n)]
2406 2406 except error.LookupError as e:
2407 2407 raise error.Abort(
2408 2408 bytes(e), hint=b"Check your manifest node id"
2409 2409 )
2410 2410 manifest.read() # stores revisision in cache too
2411 2411 return
2412 2412
2413 2413 cache = getcache()
2414 2414 if not len(cache):
2415 2415 ui.write(_(b'cache empty\n'))
2416 2416 else:
2417 2417 ui.write(
2418 2418 _(
2419 2419 b'cache contains %d manifest entries, in order of most to '
2420 2420 b'least recent:\n'
2421 2421 )
2422 2422 % (len(cache),)
2423 2423 )
2424 2424 totalsize = 0
2425 2425 for nodeid in cache:
2426 2426 # Use cache.get to not update the LRU order
2427 2427 data = cache.peek(nodeid)
2428 2428 size = len(data)
2429 2429 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2430 2430 ui.write(
2431 2431 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2432 2432 )
2433 2433 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2434 2434 ui.write(
2435 2435 _(b'total cache data size %s, on-disk %s\n')
2436 2436 % (util.bytecount(totalsize), util.bytecount(ondisk))
2437 2437 )
2438 2438
2439 2439
2440 2440 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2441 2441 def debugmergestate(ui, repo, *args, **opts):
2442 2442 """print merge state
2443 2443
2444 2444 Use --verbose to print out information about whether v1 or v2 merge state
2445 2445 was chosen."""
2446 2446
2447 2447 if ui.verbose:
2448 2448 ms = mergestatemod.mergestate(repo)
2449 2449
2450 2450 # sort so that reasonable information is on top
2451 2451 v1records = ms._readrecordsv1()
2452 2452 v2records = ms._readrecordsv2()
2453 2453
2454 2454 if not v1records and not v2records:
2455 2455 pass
2456 2456 elif not v2records:
2457 2457 ui.writenoi18n(b'no version 2 merge state\n')
2458 2458 elif ms._v1v2match(v1records, v2records):
2459 2459 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2460 2460 else:
2461 2461 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2462 2462
2463 2463 opts = pycompat.byteskwargs(opts)
2464 2464 if not opts[b'template']:
2465 2465 opts[b'template'] = (
2466 2466 b'{if(commits, "", "no merge state found\n")}'
2467 2467 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2468 2468 b'{files % "file: {path} (state \\"{state}\\")\n'
2469 2469 b'{if(local_path, "'
2470 2470 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2471 2471 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2472 2472 b' other path: {other_path} (node {other_node})\n'
2473 2473 b'")}'
2474 2474 b'{if(rename_side, "'
2475 2475 b' rename side: {rename_side}\n'
2476 2476 b' renamed path: {renamed_path}\n'
2477 2477 b'")}'
2478 2478 b'{extras % " extra: {key} = {value}\n"}'
2479 2479 b'"}'
2480 2480 b'{extras % "extra: {file} ({key} = {value})\n"}'
2481 2481 )
2482 2482
2483 2483 ms = mergestatemod.mergestate.read(repo)
2484 2484
2485 2485 fm = ui.formatter(b'debugmergestate', opts)
2486 2486 fm.startitem()
2487 2487
2488 2488 fm_commits = fm.nested(b'commits')
2489 2489 if ms.active():
2490 2490 for name, node, label_index in (
2491 2491 (b'local', ms.local, 0),
2492 2492 (b'other', ms.other, 1),
2493 2493 ):
2494 2494 fm_commits.startitem()
2495 2495 fm_commits.data(name=name)
2496 2496 fm_commits.data(node=hex(node))
2497 2497 if ms._labels and len(ms._labels) > label_index:
2498 2498 fm_commits.data(label=ms._labels[label_index])
2499 2499 fm_commits.end()
2500 2500
2501 2501 fm_files = fm.nested(b'files')
2502 2502 if ms.active():
2503 2503 for f in ms:
2504 2504 fm_files.startitem()
2505 2505 fm_files.data(path=f)
2506 2506 state = ms._state[f]
2507 2507 fm_files.data(state=state[0])
2508 2508 if state[0] in (
2509 2509 mergestatemod.MERGE_RECORD_UNRESOLVED,
2510 2510 mergestatemod.MERGE_RECORD_RESOLVED,
2511 2511 ):
2512 2512 fm_files.data(local_key=state[1])
2513 2513 fm_files.data(local_path=state[2])
2514 2514 fm_files.data(ancestor_path=state[3])
2515 2515 fm_files.data(ancestor_node=state[4])
2516 2516 fm_files.data(other_path=state[5])
2517 2517 fm_files.data(other_node=state[6])
2518 2518 fm_files.data(local_flags=state[7])
2519 2519 elif state[0] in (
2520 2520 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2521 2521 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2522 2522 ):
2523 2523 fm_files.data(renamed_path=state[1])
2524 2524 fm_files.data(rename_side=state[2])
2525 2525 fm_extras = fm_files.nested(b'extras')
2526 2526 for k, v in sorted(ms.extras(f).items()):
2527 2527 fm_extras.startitem()
2528 2528 fm_extras.data(key=k)
2529 2529 fm_extras.data(value=v)
2530 2530 fm_extras.end()
2531 2531
2532 2532 fm_files.end()
2533 2533
2534 2534 fm_extras = fm.nested(b'extras')
2535 2535 for f, d in sorted(ms.allextras().items()):
2536 2536 if f in ms:
2537 2537 # If file is in mergestate, we have already processed it's extras
2538 2538 continue
2539 2539 for k, v in d.items():
2540 2540 fm_extras.startitem()
2541 2541 fm_extras.data(file=f)
2542 2542 fm_extras.data(key=k)
2543 2543 fm_extras.data(value=v)
2544 2544 fm_extras.end()
2545 2545
2546 2546 fm.end()
2547 2547
2548 2548
2549 2549 @command(b'debugnamecomplete', [], _(b'NAME...'))
2550 2550 def debugnamecomplete(ui, repo, *args):
2551 2551 '''complete "names" - tags, open branch names, bookmark names'''
2552 2552
2553 2553 names = set()
2554 2554 # since we previously only listed open branches, we will handle that
2555 2555 # specially (after this for loop)
2556 2556 for name, ns in repo.names.items():
2557 2557 if name != b'branches':
2558 2558 names.update(ns.listnames(repo))
2559 2559 names.update(
2560 2560 tag
2561 2561 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2562 2562 if not closed
2563 2563 )
2564 2564 completions = set()
2565 2565 if not args:
2566 2566 args = [b'']
2567 2567 for a in args:
2568 2568 completions.update(n for n in names if n.startswith(a))
2569 2569 ui.write(b'\n'.join(sorted(completions)))
2570 2570 ui.write(b'\n')
2571 2571
2572 2572
2573 2573 @command(
2574 2574 b'debugnodemap',
2575 2575 [
2576 2576 (
2577 2577 b'',
2578 2578 b'dump-new',
2579 2579 False,
2580 2580 _(b'write a (new) persistent binary nodemap on stdout'),
2581 2581 ),
2582 2582 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2583 2583 (
2584 2584 b'',
2585 2585 b'check',
2586 2586 False,
2587 2587 _(b'check that the data on disk data are correct.'),
2588 2588 ),
2589 2589 (
2590 2590 b'',
2591 2591 b'metadata',
2592 2592 False,
2593 2593 _(b'display the on disk meta data for the nodemap'),
2594 2594 ),
2595 2595 ],
2596 2596 )
2597 2597 def debugnodemap(ui, repo, **opts):
2598 2598 """write and inspect on disk nodemap"""
2599 2599 if opts['dump_new']:
2600 2600 unfi = repo.unfiltered()
2601 2601 cl = unfi.changelog
2602 2602 if util.safehasattr(cl.index, "nodemap_data_all"):
2603 2603 data = cl.index.nodemap_data_all()
2604 2604 else:
2605 2605 data = nodemap.persistent_data(cl.index)
2606 2606 ui.write(data)
2607 2607 elif opts['dump_disk']:
2608 2608 unfi = repo.unfiltered()
2609 2609 cl = unfi.changelog
2610 2610 nm_data = nodemap.persisted_data(cl)
2611 2611 if nm_data is not None:
2612 2612 docket, data = nm_data
2613 2613 ui.write(data[:])
2614 2614 elif opts['check']:
2615 2615 unfi = repo.unfiltered()
2616 2616 cl = unfi.changelog
2617 2617 nm_data = nodemap.persisted_data(cl)
2618 2618 if nm_data is not None:
2619 2619 docket, data = nm_data
2620 2620 return nodemap.check_data(ui, cl.index, data)
2621 2621 elif opts['metadata']:
2622 2622 unfi = repo.unfiltered()
2623 2623 cl = unfi.changelog
2624 2624 nm_data = nodemap.persisted_data(cl)
2625 2625 if nm_data is not None:
2626 2626 docket, data = nm_data
2627 2627 ui.write((b"uid: %s\n") % docket.uid)
2628 2628 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2629 2629 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2630 2630 ui.write((b"data-length: %d\n") % docket.data_length)
2631 2631 ui.write((b"data-unused: %d\n") % docket.data_unused)
2632 2632 unused_perc = docket.data_unused * 100.0 / docket.data_length
2633 2633 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2634 2634
2635 2635
2636 2636 @command(
2637 2637 b'debugobsolete',
2638 2638 [
2639 2639 (b'', b'flags', 0, _(b'markers flag')),
2640 2640 (
2641 2641 b'',
2642 2642 b'record-parents',
2643 2643 False,
2644 2644 _(b'record parent information for the precursor'),
2645 2645 ),
2646 2646 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2647 2647 (
2648 2648 b'',
2649 2649 b'exclusive',
2650 2650 False,
2651 2651 _(b'restrict display to markers only relevant to REV'),
2652 2652 ),
2653 2653 (b'', b'index', False, _(b'display index of the marker')),
2654 2654 (b'', b'delete', [], _(b'delete markers specified by indices')),
2655 2655 ]
2656 2656 + cmdutil.commitopts2
2657 2657 + cmdutil.formatteropts,
2658 2658 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2659 2659 )
2660 2660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2661 2661 """create arbitrary obsolete marker
2662 2662
2663 2663 With no arguments, displays the list of obsolescence markers."""
2664 2664
2665 2665 opts = pycompat.byteskwargs(opts)
2666 2666
2667 2667 def parsenodeid(s):
2668 2668 try:
2669 2669 # We do not use revsingle/revrange functions here to accept
2670 2670 # arbitrary node identifiers, possibly not present in the
2671 2671 # local repository.
2672 2672 n = bin(s)
2673 2673 if len(n) != repo.nodeconstants.nodelen:
2674 2674 raise ValueError
2675 2675 return n
2676 2676 except ValueError:
2677 2677 raise error.InputError(
2678 2678 b'changeset references must be full hexadecimal '
2679 2679 b'node identifiers'
2680 2680 )
2681 2681
2682 2682 if opts.get(b'delete'):
2683 2683 indices = []
2684 2684 for v in opts.get(b'delete'):
2685 2685 try:
2686 2686 indices.append(int(v))
2687 2687 except ValueError:
2688 2688 raise error.InputError(
2689 2689 _(b'invalid index value: %r') % v,
2690 2690 hint=_(b'use integers for indices'),
2691 2691 )
2692 2692
2693 2693 if repo.currenttransaction():
2694 2694 raise error.Abort(
2695 2695 _(b'cannot delete obsmarkers in the middle of transaction.')
2696 2696 )
2697 2697
2698 2698 with repo.lock():
2699 2699 n = repair.deleteobsmarkers(repo.obsstore, indices)
2700 2700 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2701 2701
2702 2702 return
2703 2703
2704 2704 if precursor is not None:
2705 2705 if opts[b'rev']:
2706 2706 raise error.InputError(
2707 2707 b'cannot select revision when creating marker'
2708 2708 )
2709 2709 metadata = {}
2710 2710 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2711 2711 succs = tuple(parsenodeid(succ) for succ in successors)
2712 2712 l = repo.lock()
2713 2713 try:
2714 2714 tr = repo.transaction(b'debugobsolete')
2715 2715 try:
2716 2716 date = opts.get(b'date')
2717 2717 if date:
2718 2718 date = dateutil.parsedate(date)
2719 2719 else:
2720 2720 date = None
2721 2721 prec = parsenodeid(precursor)
2722 2722 parents = None
2723 2723 if opts[b'record_parents']:
2724 2724 if prec not in repo.unfiltered():
2725 2725 raise error.Abort(
2726 2726 b'cannot used --record-parents on '
2727 2727 b'unknown changesets'
2728 2728 )
2729 2729 parents = repo.unfiltered()[prec].parents()
2730 2730 parents = tuple(p.node() for p in parents)
2731 2731 repo.obsstore.create(
2732 2732 tr,
2733 2733 prec,
2734 2734 succs,
2735 2735 opts[b'flags'],
2736 2736 parents=parents,
2737 2737 date=date,
2738 2738 metadata=metadata,
2739 2739 ui=ui,
2740 2740 )
2741 2741 tr.close()
2742 2742 except ValueError as exc:
2743 2743 raise error.Abort(
2744 2744 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2745 2745 )
2746 2746 finally:
2747 2747 tr.release()
2748 2748 finally:
2749 2749 l.release()
2750 2750 else:
2751 2751 if opts[b'rev']:
2752 2752 revs = logcmdutil.revrange(repo, opts[b'rev'])
2753 2753 nodes = [repo[r].node() for r in revs]
2754 2754 markers = list(
2755 2755 obsutil.getmarkers(
2756 2756 repo, nodes=nodes, exclusive=opts[b'exclusive']
2757 2757 )
2758 2758 )
2759 2759 markers.sort(key=lambda x: x._data)
2760 2760 else:
2761 2761 markers = obsutil.getmarkers(repo)
2762 2762
2763 2763 markerstoiter = markers
2764 2764 isrelevant = lambda m: True
2765 2765 if opts.get(b'rev') and opts.get(b'index'):
2766 2766 markerstoiter = obsutil.getmarkers(repo)
2767 2767 markerset = set(markers)
2768 2768 isrelevant = lambda m: m in markerset
2769 2769
2770 2770 fm = ui.formatter(b'debugobsolete', opts)
2771 2771 for i, m in enumerate(markerstoiter):
2772 2772 if not isrelevant(m):
2773 2773 # marker can be irrelevant when we're iterating over a set
2774 2774 # of markers (markerstoiter) which is bigger than the set
2775 2775 # of markers we want to display (markers)
2776 2776 # this can happen if both --index and --rev options are
2777 2777 # provided and thus we need to iterate over all of the markers
2778 2778 # to get the correct indices, but only display the ones that
2779 2779 # are relevant to --rev value
2780 2780 continue
2781 2781 fm.startitem()
2782 2782 ind = i if opts.get(b'index') else None
2783 2783 cmdutil.showmarker(fm, m, index=ind)
2784 2784 fm.end()
2785 2785
2786 2786
2787 2787 @command(
2788 2788 b'debugp1copies',
2789 2789 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2790 2790 _(b'[-r REV]'),
2791 2791 )
2792 2792 def debugp1copies(ui, repo, **opts):
2793 2793 """dump copy information compared to p1"""
2794 2794
2795 2795 opts = pycompat.byteskwargs(opts)
2796 2796 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2797 2797 for dst, src in ctx.p1copies().items():
2798 2798 ui.write(b'%s -> %s\n' % (src, dst))
2799 2799
2800 2800
2801 2801 @command(
2802 2802 b'debugp2copies',
2803 2803 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2804 2804 _(b'[-r REV]'),
2805 2805 )
2806 2806 def debugp2copies(ui, repo, **opts):
2807 2807 """dump copy information compared to p2"""
2808 2808
2809 2809 opts = pycompat.byteskwargs(opts)
2810 2810 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2811 2811 for dst, src in ctx.p2copies().items():
2812 2812 ui.write(b'%s -> %s\n' % (src, dst))
2813 2813
2814 2814
2815 2815 @command(
2816 2816 b'debugpathcomplete',
2817 2817 [
2818 2818 (b'f', b'full', None, _(b'complete an entire path')),
2819 2819 (b'n', b'normal', None, _(b'show only normal files')),
2820 2820 (b'a', b'added', None, _(b'show only added files')),
2821 2821 (b'r', b'removed', None, _(b'show only removed files')),
2822 2822 ],
2823 2823 _(b'FILESPEC...'),
2824 2824 )
2825 2825 def debugpathcomplete(ui, repo, *specs, **opts):
2826 2826 """complete part or all of a tracked path
2827 2827
2828 2828 This command supports shells that offer path name completion. It
2829 2829 currently completes only files already known to the dirstate.
2830 2830
2831 2831 Completion extends only to the next path segment unless
2832 2832 --full is specified, in which case entire paths are used."""
2833 2833
2834 2834 def complete(path, acceptable):
2835 2835 dirstate = repo.dirstate
2836 2836 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2837 2837 rootdir = repo.root + pycompat.ossep
2838 2838 if spec != repo.root and not spec.startswith(rootdir):
2839 2839 return [], []
2840 2840 if os.path.isdir(spec):
2841 2841 spec += b'/'
2842 2842 spec = spec[len(rootdir) :]
2843 2843 fixpaths = pycompat.ossep != b'/'
2844 2844 if fixpaths:
2845 2845 spec = spec.replace(pycompat.ossep, b'/')
2846 2846 speclen = len(spec)
2847 2847 fullpaths = opts['full']
2848 2848 files, dirs = set(), set()
2849 2849 adddir, addfile = dirs.add, files.add
2850 2850 for f, st in dirstate.items():
2851 2851 if f.startswith(spec) and st.state in acceptable:
2852 2852 if fixpaths:
2853 2853 f = f.replace(b'/', pycompat.ossep)
2854 2854 if fullpaths:
2855 2855 addfile(f)
2856 2856 continue
2857 2857 s = f.find(pycompat.ossep, speclen)
2858 2858 if s >= 0:
2859 2859 adddir(f[:s])
2860 2860 else:
2861 2861 addfile(f)
2862 2862 return files, dirs
2863 2863
2864 2864 acceptable = b''
2865 2865 if opts['normal']:
2866 2866 acceptable += b'nm'
2867 2867 if opts['added']:
2868 2868 acceptable += b'a'
2869 2869 if opts['removed']:
2870 2870 acceptable += b'r'
2871 2871 cwd = repo.getcwd()
2872 2872 if not specs:
2873 2873 specs = [b'.']
2874 2874
2875 2875 files, dirs = set(), set()
2876 2876 for spec in specs:
2877 2877 f, d = complete(spec, acceptable or b'nmar')
2878 2878 files.update(f)
2879 2879 dirs.update(d)
2880 2880 files.update(dirs)
2881 2881 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2882 2882 ui.write(b'\n')
2883 2883
2884 2884
2885 2885 @command(
2886 2886 b'debugpathcopies',
2887 2887 cmdutil.walkopts,
2888 2888 b'hg debugpathcopies REV1 REV2 [FILE]',
2889 2889 inferrepo=True,
2890 2890 )
2891 2891 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2892 2892 """show copies between two revisions"""
2893 2893 ctx1 = scmutil.revsingle(repo, rev1)
2894 2894 ctx2 = scmutil.revsingle(repo, rev2)
2895 2895 m = scmutil.match(ctx1, pats, opts)
2896 2896 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2897 2897 ui.write(b'%s -> %s\n' % (src, dst))
2898 2898
2899 2899
2900 2900 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2901 2901 def debugpeer(ui, path):
2902 2902 """establish a connection to a peer repository"""
2903 2903 # Always enable peer request logging. Requires --debug to display
2904 2904 # though.
2905 2905 overrides = {
2906 2906 (b'devel', b'debug.peer-request'): True,
2907 2907 }
2908 2908
2909 2909 with ui.configoverride(overrides):
2910 2910 peer = hg.peer(ui, {}, path)
2911 2911
2912 2912 try:
2913 2913 local = peer.local() is not None
2914 2914 canpush = peer.canpush()
2915 2915
2916 2916 ui.write(_(b'url: %s\n') % peer.url())
2917 2917 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2918 2918 ui.write(
2919 2919 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2920 2920 )
2921 2921 finally:
2922 2922 peer.close()
2923 2923
2924 2924
2925 2925 @command(
2926 2926 b'debugpickmergetool',
2927 2927 [
2928 2928 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2929 2929 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2930 2930 ]
2931 2931 + cmdutil.walkopts
2932 2932 + cmdutil.mergetoolopts,
2933 2933 _(b'[PATTERN]...'),
2934 2934 inferrepo=True,
2935 2935 )
2936 2936 def debugpickmergetool(ui, repo, *pats, **opts):
2937 2937 """examine which merge tool is chosen for specified file
2938 2938
2939 2939 As described in :hg:`help merge-tools`, Mercurial examines
2940 2940 configurations below in this order to decide which merge tool is
2941 2941 chosen for specified file.
2942 2942
2943 2943 1. ``--tool`` option
2944 2944 2. ``HGMERGE`` environment variable
2945 2945 3. configurations in ``merge-patterns`` section
2946 2946 4. configuration of ``ui.merge``
2947 2947 5. configurations in ``merge-tools`` section
2948 2948 6. ``hgmerge`` tool (for historical reason only)
2949 2949 7. default tool for fallback (``:merge`` or ``:prompt``)
2950 2950
2951 2951 This command writes out examination result in the style below::
2952 2952
2953 2953 FILE = MERGETOOL
2954 2954
2955 2955 By default, all files known in the first parent context of the
2956 2956 working directory are examined. Use file patterns and/or -I/-X
2957 2957 options to limit target files. -r/--rev is also useful to examine
2958 2958 files in another context without actual updating to it.
2959 2959
2960 2960 With --debug, this command shows warning messages while matching
2961 2961 against ``merge-patterns`` and so on, too. It is recommended to
2962 2962 use this option with explicit file patterns and/or -I/-X options,
2963 2963 because this option increases amount of output per file according
2964 2964 to configurations in hgrc.
2965 2965
2966 2966 With -v/--verbose, this command shows configurations below at
2967 2967 first (only if specified).
2968 2968
2969 2969 - ``--tool`` option
2970 2970 - ``HGMERGE`` environment variable
2971 2971 - configuration of ``ui.merge``
2972 2972
2973 2973 If merge tool is chosen before matching against
2974 2974 ``merge-patterns``, this command can't show any helpful
2975 2975 information, even with --debug. In such case, information above is
2976 2976 useful to know why a merge tool is chosen.
2977 2977 """
2978 2978 opts = pycompat.byteskwargs(opts)
2979 2979 overrides = {}
2980 2980 if opts[b'tool']:
2981 2981 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2982 2982 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2983 2983
2984 2984 with ui.configoverride(overrides, b'debugmergepatterns'):
2985 2985 hgmerge = encoding.environ.get(b"HGMERGE")
2986 2986 if hgmerge is not None:
2987 2987 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2988 2988 uimerge = ui.config(b"ui", b"merge")
2989 2989 if uimerge:
2990 2990 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2991 2991
2992 2992 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2993 2993 m = scmutil.match(ctx, pats, opts)
2994 2994 changedelete = opts[b'changedelete']
2995 2995 for path in ctx.walk(m):
2996 2996 fctx = ctx[path]
2997 2997 with ui.silent(
2998 2998 error=True
2999 2999 ) if not ui.debugflag else util.nullcontextmanager():
3000 3000 tool, toolpath = filemerge._picktool(
3001 3001 repo,
3002 3002 ui,
3003 3003 path,
3004 3004 fctx.isbinary(),
3005 3005 b'l' in fctx.flags(),
3006 3006 changedelete,
3007 3007 )
3008 3008 ui.write(b'%s = %s\n' % (path, tool))
3009 3009
3010 3010
3011 3011 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3012 3012 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3013 3013 """access the pushkey key/value protocol
3014 3014
3015 3015 With two args, list the keys in the given namespace.
3016 3016
3017 3017 With five args, set a key to new if it currently is set to old.
3018 3018 Reports success or failure.
3019 3019 """
3020 3020
3021 3021 target = hg.peer(ui, {}, repopath)
3022 3022 try:
3023 3023 if keyinfo:
3024 3024 key, old, new = keyinfo
3025 3025 with target.commandexecutor() as e:
3026 3026 r = e.callcommand(
3027 3027 b'pushkey',
3028 3028 {
3029 3029 b'namespace': namespace,
3030 3030 b'key': key,
3031 3031 b'old': old,
3032 3032 b'new': new,
3033 3033 },
3034 3034 ).result()
3035 3035
3036 3036 ui.status(pycompat.bytestr(r) + b'\n')
3037 3037 return not r
3038 3038 else:
3039 3039 for k, v in sorted(target.listkeys(namespace).items()):
3040 3040 ui.write(
3041 3041 b"%s\t%s\n"
3042 3042 % (stringutil.escapestr(k), stringutil.escapestr(v))
3043 3043 )
3044 3044 finally:
3045 3045 target.close()
3046 3046
3047 3047
3048 3048 @command(b'debugpvec', [], _(b'A B'))
3049 3049 def debugpvec(ui, repo, a, b=None):
3050 3050 ca = scmutil.revsingle(repo, a)
3051 3051 cb = scmutil.revsingle(repo, b)
3052 3052 pa = pvec.ctxpvec(ca)
3053 3053 pb = pvec.ctxpvec(cb)
3054 3054 if pa == pb:
3055 3055 rel = b"="
3056 3056 elif pa > pb:
3057 3057 rel = b">"
3058 3058 elif pa < pb:
3059 3059 rel = b"<"
3060 3060 elif pa | pb:
3061 3061 rel = b"|"
3062 3062 ui.write(_(b"a: %s\n") % pa)
3063 3063 ui.write(_(b"b: %s\n") % pb)
3064 3064 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3065 3065 ui.write(
3066 3066 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3067 3067 % (
3068 3068 abs(pa._depth - pb._depth),
3069 3069 pvec._hamming(pa._vec, pb._vec),
3070 3070 pa.distance(pb),
3071 3071 rel,
3072 3072 )
3073 3073 )
3074 3074
3075 3075
3076 3076 @command(
3077 3077 b'debugrebuilddirstate|debugrebuildstate',
3078 3078 [
3079 3079 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3080 3080 (
3081 3081 b'',
3082 3082 b'minimal',
3083 3083 None,
3084 3084 _(
3085 3085 b'only rebuild files that are inconsistent with '
3086 3086 b'the working copy parent'
3087 3087 ),
3088 3088 ),
3089 3089 ],
3090 3090 _(b'[-r REV]'),
3091 3091 )
3092 3092 def debugrebuilddirstate(ui, repo, rev, **opts):
3093 3093 """rebuild the dirstate as it would look like for the given revision
3094 3094
3095 3095 If no revision is specified the first current parent will be used.
3096 3096
3097 3097 The dirstate will be set to the files of the given revision.
3098 3098 The actual working directory content or existing dirstate
3099 3099 information such as adds or removes is not considered.
3100 3100
3101 3101 ``minimal`` will only rebuild the dirstate status for files that claim to be
3102 3102 tracked but are not in the parent manifest, or that exist in the parent
3103 3103 manifest but are not in the dirstate. It will not change adds, removes, or
3104 3104 modified files that are in the working copy parent.
3105 3105
3106 3106 One use of this command is to make the next :hg:`status` invocation
3107 3107 check the actual file content.
3108 3108 """
3109 3109 ctx = scmutil.revsingle(repo, rev)
3110 3110 with repo.wlock():
3111 3111 dirstate = repo.dirstate
3112 3112 changedfiles = None
3113 3113 # See command doc for what minimal does.
3114 3114 if opts.get('minimal'):
3115 3115 manifestfiles = set(ctx.manifest().keys())
3116 3116 dirstatefiles = set(dirstate)
3117 3117 manifestonly = manifestfiles - dirstatefiles
3118 3118 dsonly = dirstatefiles - manifestfiles
3119 3119 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3120 3120 changedfiles = manifestonly | dsnotadded
3121 3121
3122 3122 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3123 3123
3124 3124
3125 3125 @command(
3126 3126 b'debugrebuildfncache',
3127 3127 [
3128 3128 (
3129 3129 b'',
3130 3130 b'only-data',
3131 3131 False,
3132 3132 _(b'only look for wrong .d files (much faster)'),
3133 3133 )
3134 3134 ],
3135 3135 b'',
3136 3136 )
3137 3137 def debugrebuildfncache(ui, repo, **opts):
3138 3138 """rebuild the fncache file"""
3139 3139 opts = pycompat.byteskwargs(opts)
3140 3140 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3141 3141
3142 3142
3143 3143 @command(
3144 3144 b'debugrename',
3145 3145 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3146 3146 _(b'[-r REV] [FILE]...'),
3147 3147 )
3148 3148 def debugrename(ui, repo, *pats, **opts):
3149 3149 """dump rename information"""
3150 3150
3151 3151 opts = pycompat.byteskwargs(opts)
3152 3152 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3153 3153 m = scmutil.match(ctx, pats, opts)
3154 3154 for abs in ctx.walk(m):
3155 3155 fctx = ctx[abs]
3156 3156 o = fctx.filelog().renamed(fctx.filenode())
3157 3157 rel = repo.pathto(abs)
3158 3158 if o:
3159 3159 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3160 3160 else:
3161 3161 ui.write(_(b"%s not renamed\n") % rel)
3162 3162
3163 3163
3164 3164 @command(b'debugrequires|debugrequirements', [], b'')
3165 3165 def debugrequirements(ui, repo):
3166 3166 """print the current repo requirements"""
3167 3167 for r in sorted(repo.requirements):
3168 3168 ui.write(b"%s\n" % r)
3169 3169
3170 3170
3171 3171 @command(
3172 3172 b'debugrevlog',
3173 3173 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3174 3174 _(b'-c|-m|FILE'),
3175 3175 optionalrepo=True,
3176 3176 )
3177 3177 def debugrevlog(ui, repo, file_=None, **opts):
3178 3178 """show data and statistics about a revlog"""
3179 3179 opts = pycompat.byteskwargs(opts)
3180 3180 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3181 3181
3182 3182 if opts.get(b"dump"):
3183 3183 numrevs = len(r)
3184 3184 ui.write(
3185 3185 (
3186 3186 b"# rev p1rev p2rev start end deltastart base p1 p2"
3187 3187 b" rawsize totalsize compression heads chainlen\n"
3188 3188 )
3189 3189 )
3190 3190 ts = 0
3191 3191 heads = set()
3192 3192
3193 3193 for rev in range(numrevs):
3194 3194 dbase = r.deltaparent(rev)
3195 3195 if dbase == -1:
3196 3196 dbase = rev
3197 3197 cbase = r.chainbase(rev)
3198 3198 clen = r.chainlen(rev)
3199 3199 p1, p2 = r.parentrevs(rev)
3200 3200 rs = r.rawsize(rev)
3201 3201 ts = ts + rs
3202 3202 heads -= set(r.parentrevs(rev))
3203 3203 heads.add(rev)
3204 3204 try:
3205 3205 compression = ts / r.end(rev)
3206 3206 except ZeroDivisionError:
3207 3207 compression = 0
3208 3208 ui.write(
3209 3209 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3210 3210 b"%11d %5d %8d\n"
3211 3211 % (
3212 3212 rev,
3213 3213 p1,
3214 3214 p2,
3215 3215 r.start(rev),
3216 3216 r.end(rev),
3217 3217 r.start(dbase),
3218 3218 r.start(cbase),
3219 3219 r.start(p1),
3220 3220 r.start(p2),
3221 3221 rs,
3222 3222 ts,
3223 3223 compression,
3224 3224 len(heads),
3225 3225 clen,
3226 3226 )
3227 3227 )
3228 3228 return 0
3229 3229
3230 3230 format = r._format_version
3231 3231 v = r._format_flags
3232 3232 flags = []
3233 3233 gdelta = False
3234 3234 if v & revlog.FLAG_INLINE_DATA:
3235 3235 flags.append(b'inline')
3236 3236 if v & revlog.FLAG_GENERALDELTA:
3237 3237 gdelta = True
3238 3238 flags.append(b'generaldelta')
3239 3239 if not flags:
3240 3240 flags = [b'(none)']
3241 3241
3242 3242 ### tracks merge vs single parent
3243 3243 nummerges = 0
3244 3244
3245 3245 ### tracks ways the "delta" are build
3246 3246 # nodelta
3247 3247 numempty = 0
3248 3248 numemptytext = 0
3249 3249 numemptydelta = 0
3250 3250 # full file content
3251 3251 numfull = 0
3252 3252 # intermediate snapshot against a prior snapshot
3253 3253 numsemi = 0
3254 3254 # snapshot count per depth
3255 3255 numsnapdepth = collections.defaultdict(lambda: 0)
3256 3256 # delta against previous revision
3257 3257 numprev = 0
3258 3258 # delta against first or second parent (not prev)
3259 3259 nump1 = 0
3260 3260 nump2 = 0
3261 3261 # delta against neither prev nor parents
3262 3262 numother = 0
3263 3263 # delta against prev that are also first or second parent
3264 3264 # (details of `numprev`)
3265 3265 nump1prev = 0
3266 3266 nump2prev = 0
3267 3267
3268 3268 # data about delta chain of each revs
3269 3269 chainlengths = []
3270 3270 chainbases = []
3271 3271 chainspans = []
3272 3272
3273 3273 # data about each revision
3274 3274 datasize = [None, 0, 0]
3275 3275 fullsize = [None, 0, 0]
3276 3276 semisize = [None, 0, 0]
3277 3277 # snapshot count per depth
3278 3278 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3279 3279 deltasize = [None, 0, 0]
3280 3280 chunktypecounts = {}
3281 3281 chunktypesizes = {}
3282 3282
3283 3283 def addsize(size, l):
3284 3284 if l[0] is None or size < l[0]:
3285 3285 l[0] = size
3286 3286 if size > l[1]:
3287 3287 l[1] = size
3288 3288 l[2] += size
3289 3289
3290 3290 numrevs = len(r)
3291 3291 for rev in range(numrevs):
3292 3292 p1, p2 = r.parentrevs(rev)
3293 3293 delta = r.deltaparent(rev)
3294 3294 if format > 0:
3295 3295 addsize(r.rawsize(rev), datasize)
3296 3296 if p2 != nullrev:
3297 3297 nummerges += 1
3298 3298 size = r.length(rev)
3299 3299 if delta == nullrev:
3300 3300 chainlengths.append(0)
3301 3301 chainbases.append(r.start(rev))
3302 3302 chainspans.append(size)
3303 3303 if size == 0:
3304 3304 numempty += 1
3305 3305 numemptytext += 1
3306 3306 else:
3307 3307 numfull += 1
3308 3308 numsnapdepth[0] += 1
3309 3309 addsize(size, fullsize)
3310 3310 addsize(size, snapsizedepth[0])
3311 3311 else:
3312 3312 chainlengths.append(chainlengths[delta] + 1)
3313 3313 baseaddr = chainbases[delta]
3314 3314 revaddr = r.start(rev)
3315 3315 chainbases.append(baseaddr)
3316 3316 chainspans.append((revaddr - baseaddr) + size)
3317 3317 if size == 0:
3318 3318 numempty += 1
3319 3319 numemptydelta += 1
3320 3320 elif r.issnapshot(rev):
3321 3321 addsize(size, semisize)
3322 3322 numsemi += 1
3323 3323 depth = r.snapshotdepth(rev)
3324 3324 numsnapdepth[depth] += 1
3325 3325 addsize(size, snapsizedepth[depth])
3326 3326 else:
3327 3327 addsize(size, deltasize)
3328 3328 if delta == rev - 1:
3329 3329 numprev += 1
3330 3330 if delta == p1:
3331 3331 nump1prev += 1
3332 3332 elif delta == p2:
3333 3333 nump2prev += 1
3334 3334 elif delta == p1:
3335 3335 nump1 += 1
3336 3336 elif delta == p2:
3337 3337 nump2 += 1
3338 3338 elif delta != nullrev:
3339 3339 numother += 1
3340 3340
3341 3341 # Obtain data on the raw chunks in the revlog.
3342 3342 if util.safehasattr(r, b'_getsegmentforrevs'):
3343 3343 segment = r._getsegmentforrevs(rev, rev)[1]
3344 3344 else:
3345 3345 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3346 3346 if segment:
3347 3347 chunktype = bytes(segment[0:1])
3348 3348 else:
3349 3349 chunktype = b'empty'
3350 3350
3351 3351 if chunktype not in chunktypecounts:
3352 3352 chunktypecounts[chunktype] = 0
3353 3353 chunktypesizes[chunktype] = 0
3354 3354
3355 3355 chunktypecounts[chunktype] += 1
3356 3356 chunktypesizes[chunktype] += size
3357 3357
3358 3358 # Adjust size min value for empty cases
3359 3359 for size in (datasize, fullsize, semisize, deltasize):
3360 3360 if size[0] is None:
3361 3361 size[0] = 0
3362 3362
3363 3363 numdeltas = numrevs - numfull - numempty - numsemi
3364 3364 numoprev = numprev - nump1prev - nump2prev
3365 3365 totalrawsize = datasize[2]
3366 3366 datasize[2] /= numrevs
3367 3367 fulltotal = fullsize[2]
3368 3368 if numfull == 0:
3369 3369 fullsize[2] = 0
3370 3370 else:
3371 3371 fullsize[2] /= numfull
3372 3372 semitotal = semisize[2]
3373 3373 snaptotal = {}
3374 3374 if numsemi > 0:
3375 3375 semisize[2] /= numsemi
3376 3376 for depth in snapsizedepth:
3377 3377 snaptotal[depth] = snapsizedepth[depth][2]
3378 3378 snapsizedepth[depth][2] /= numsnapdepth[depth]
3379 3379
3380 3380 deltatotal = deltasize[2]
3381 3381 if numdeltas > 0:
3382 3382 deltasize[2] /= numdeltas
3383 3383 totalsize = fulltotal + semitotal + deltatotal
3384 3384 avgchainlen = sum(chainlengths) / numrevs
3385 3385 maxchainlen = max(chainlengths)
3386 3386 maxchainspan = max(chainspans)
3387 3387 compratio = 1
3388 3388 if totalsize:
3389 3389 compratio = totalrawsize / totalsize
3390 3390
3391 3391 basedfmtstr = b'%%%dd\n'
3392 3392 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3393 3393
3394 3394 def dfmtstr(max):
3395 3395 return basedfmtstr % len(str(max))
3396 3396
3397 3397 def pcfmtstr(max, padding=0):
3398 3398 return basepcfmtstr % (len(str(max)), b' ' * padding)
3399 3399
3400 3400 def pcfmt(value, total):
3401 3401 if total:
3402 3402 return (value, 100 * float(value) / total)
3403 3403 else:
3404 3404 return value, 100.0
3405 3405
3406 3406 ui.writenoi18n(b'format : %d\n' % format)
3407 3407 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3408 3408
3409 3409 ui.write(b'\n')
3410 3410 fmt = pcfmtstr(totalsize)
3411 3411 fmt2 = dfmtstr(totalsize)
3412 3412 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3413 3413 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3414 3414 ui.writenoi18n(
3415 3415 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3416 3416 )
3417 3417 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3418 3418 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3419 3419 ui.writenoi18n(
3420 3420 b' text : '
3421 3421 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3422 3422 )
3423 3423 ui.writenoi18n(
3424 3424 b' delta : '
3425 3425 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3426 3426 )
3427 3427 ui.writenoi18n(
3428 3428 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3429 3429 )
3430 3430 for depth in sorted(numsnapdepth):
3431 3431 ui.write(
3432 3432 (b' lvl-%-3d : ' % depth)
3433 3433 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3434 3434 )
3435 3435 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3436 3436 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3437 3437 ui.writenoi18n(
3438 3438 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3439 3439 )
3440 3440 for depth in sorted(numsnapdepth):
3441 3441 ui.write(
3442 3442 (b' lvl-%-3d : ' % depth)
3443 3443 + fmt % pcfmt(snaptotal[depth], totalsize)
3444 3444 )
3445 3445 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3446 3446
3447 3447 def fmtchunktype(chunktype):
3448 3448 if chunktype == b'empty':
3449 3449 return b' %s : ' % chunktype
3450 3450 elif chunktype in pycompat.bytestr(string.ascii_letters):
3451 3451 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3452 3452 else:
3453 3453 return b' 0x%s : ' % hex(chunktype)
3454 3454
3455 3455 ui.write(b'\n')
3456 3456 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3457 3457 for chunktype in sorted(chunktypecounts):
3458 3458 ui.write(fmtchunktype(chunktype))
3459 3459 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3460 3460 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3461 3461 for chunktype in sorted(chunktypecounts):
3462 3462 ui.write(fmtchunktype(chunktype))
3463 3463 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3464 3464
3465 3465 ui.write(b'\n')
3466 3466 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3467 3467 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3468 3468 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3469 3469 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3470 3470 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3471 3471
3472 3472 if format > 0:
3473 3473 ui.write(b'\n')
3474 3474 ui.writenoi18n(
3475 3475 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3476 3476 % tuple(datasize)
3477 3477 )
3478 3478 ui.writenoi18n(
3479 3479 b'full revision size (min/max/avg) : %d / %d / %d\n'
3480 3480 % tuple(fullsize)
3481 3481 )
3482 3482 ui.writenoi18n(
3483 3483 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3484 3484 % tuple(semisize)
3485 3485 )
3486 3486 for depth in sorted(snapsizedepth):
3487 3487 if depth == 0:
3488 3488 continue
3489 3489 ui.writenoi18n(
3490 3490 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3491 3491 % ((depth,) + tuple(snapsizedepth[depth]))
3492 3492 )
3493 3493 ui.writenoi18n(
3494 3494 b'delta size (min/max/avg) : %d / %d / %d\n'
3495 3495 % tuple(deltasize)
3496 3496 )
3497 3497
3498 3498 if numdeltas > 0:
3499 3499 ui.write(b'\n')
3500 3500 fmt = pcfmtstr(numdeltas)
3501 3501 fmt2 = pcfmtstr(numdeltas, 4)
3502 3502 ui.writenoi18n(
3503 3503 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3504 3504 )
3505 3505 if numprev > 0:
3506 3506 ui.writenoi18n(
3507 3507 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3508 3508 )
3509 3509 ui.writenoi18n(
3510 3510 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3511 3511 )
3512 3512 ui.writenoi18n(
3513 3513 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3514 3514 )
3515 3515 if gdelta:
3516 3516 ui.writenoi18n(
3517 3517 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3518 3518 )
3519 3519 ui.writenoi18n(
3520 3520 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3521 3521 )
3522 3522 ui.writenoi18n(
3523 3523 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3524 3524 )
3525 3525
3526 3526
3527 3527 @command(
3528 3528 b'debugrevlogindex',
3529 3529 cmdutil.debugrevlogopts
3530 3530 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3531 3531 _(b'[-f FORMAT] -c|-m|FILE'),
3532 3532 optionalrepo=True,
3533 3533 )
3534 3534 def debugrevlogindex(ui, repo, file_=None, **opts):
3535 3535 """dump the contents of a revlog index"""
3536 3536 opts = pycompat.byteskwargs(opts)
3537 3537 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3538 3538 format = opts.get(b'format', 0)
3539 3539 if format not in (0, 1):
3540 3540 raise error.Abort(_(b"unknown format %d") % format)
3541 3541
3542 3542 if ui.debugflag:
3543 3543 shortfn = hex
3544 3544 else:
3545 3545 shortfn = short
3546 3546
3547 3547 # There might not be anything in r, so have a sane default
3548 3548 idlen = 12
3549 3549 for i in r:
3550 3550 idlen = len(shortfn(r.node(i)))
3551 3551 break
3552 3552
3553 3553 if format == 0:
3554 3554 if ui.verbose:
3555 3555 ui.writenoi18n(
3556 3556 b" rev offset length linkrev %s %s p2\n"
3557 3557 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3558 3558 )
3559 3559 else:
3560 3560 ui.writenoi18n(
3561 3561 b" rev linkrev %s %s p2\n"
3562 3562 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3563 3563 )
3564 3564 elif format == 1:
3565 3565 if ui.verbose:
3566 3566 ui.writenoi18n(
3567 3567 (
3568 3568 b" rev flag offset length size link p1"
3569 3569 b" p2 %s\n"
3570 3570 )
3571 3571 % b"nodeid".rjust(idlen)
3572 3572 )
3573 3573 else:
3574 3574 ui.writenoi18n(
3575 3575 b" rev flag size link p1 p2 %s\n"
3576 3576 % b"nodeid".rjust(idlen)
3577 3577 )
3578 3578
3579 3579 for i in r:
3580 3580 node = r.node(i)
3581 3581 if format == 0:
3582 3582 try:
3583 3583 pp = r.parents(node)
3584 3584 except Exception:
3585 3585 pp = [repo.nullid, repo.nullid]
3586 3586 if ui.verbose:
3587 3587 ui.write(
3588 3588 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3589 3589 % (
3590 3590 i,
3591 3591 r.start(i),
3592 3592 r.length(i),
3593 3593 r.linkrev(i),
3594 3594 shortfn(node),
3595 3595 shortfn(pp[0]),
3596 3596 shortfn(pp[1]),
3597 3597 )
3598 3598 )
3599 3599 else:
3600 3600 ui.write(
3601 3601 b"% 6d % 7d %s %s %s\n"
3602 3602 % (
3603 3603 i,
3604 3604 r.linkrev(i),
3605 3605 shortfn(node),
3606 3606 shortfn(pp[0]),
3607 3607 shortfn(pp[1]),
3608 3608 )
3609 3609 )
3610 3610 elif format == 1:
3611 3611 pr = r.parentrevs(i)
3612 3612 if ui.verbose:
3613 3613 ui.write(
3614 3614 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3615 3615 % (
3616 3616 i,
3617 3617 r.flags(i),
3618 3618 r.start(i),
3619 3619 r.length(i),
3620 3620 r.rawsize(i),
3621 3621 r.linkrev(i),
3622 3622 pr[0],
3623 3623 pr[1],
3624 3624 shortfn(node),
3625 3625 )
3626 3626 )
3627 3627 else:
3628 3628 ui.write(
3629 3629 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3630 3630 % (
3631 3631 i,
3632 3632 r.flags(i),
3633 3633 r.rawsize(i),
3634 3634 r.linkrev(i),
3635 3635 pr[0],
3636 3636 pr[1],
3637 3637 shortfn(node),
3638 3638 )
3639 3639 )
3640 3640
3641 3641
3642 3642 @command(
3643 3643 b'debugrevspec',
3644 3644 [
3645 3645 (
3646 3646 b'',
3647 3647 b'optimize',
3648 3648 None,
3649 3649 _(b'print parsed tree after optimizing (DEPRECATED)'),
3650 3650 ),
3651 3651 (
3652 3652 b'',
3653 3653 b'show-revs',
3654 3654 True,
3655 3655 _(b'print list of result revisions (default)'),
3656 3656 ),
3657 3657 (
3658 3658 b's',
3659 3659 b'show-set',
3660 3660 None,
3661 3661 _(b'print internal representation of result set'),
3662 3662 ),
3663 3663 (
3664 3664 b'p',
3665 3665 b'show-stage',
3666 3666 [],
3667 3667 _(b'print parsed tree at the given stage'),
3668 3668 _(b'NAME'),
3669 3669 ),
3670 3670 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3671 3671 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3672 3672 ],
3673 3673 b'REVSPEC',
3674 3674 )
3675 3675 def debugrevspec(ui, repo, expr, **opts):
3676 3676 """parse and apply a revision specification
3677 3677
3678 3678 Use -p/--show-stage option to print the parsed tree at the given stages.
3679 3679 Use -p all to print tree at every stage.
3680 3680
3681 3681 Use --no-show-revs option with -s or -p to print only the set
3682 3682 representation or the parsed tree respectively.
3683 3683
3684 3684 Use --verify-optimized to compare the optimized result with the unoptimized
3685 3685 one. Returns 1 if the optimized result differs.
3686 3686 """
3687 3687 opts = pycompat.byteskwargs(opts)
3688 3688 aliases = ui.configitems(b'revsetalias')
3689 3689 stages = [
3690 3690 (b'parsed', lambda tree: tree),
3691 3691 (
3692 3692 b'expanded',
3693 3693 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3694 3694 ),
3695 3695 (b'concatenated', revsetlang.foldconcat),
3696 3696 (b'analyzed', revsetlang.analyze),
3697 3697 (b'optimized', revsetlang.optimize),
3698 3698 ]
3699 3699 if opts[b'no_optimized']:
3700 3700 stages = stages[:-1]
3701 3701 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3702 3702 raise error.Abort(
3703 3703 _(b'cannot use --verify-optimized with --no-optimized')
3704 3704 )
3705 3705 stagenames = {n for n, f in stages}
3706 3706
3707 3707 showalways = set()
3708 3708 showchanged = set()
3709 3709 if ui.verbose and not opts[b'show_stage']:
3710 3710 # show parsed tree by --verbose (deprecated)
3711 3711 showalways.add(b'parsed')
3712 3712 showchanged.update([b'expanded', b'concatenated'])
3713 3713 if opts[b'optimize']:
3714 3714 showalways.add(b'optimized')
3715 3715 if opts[b'show_stage'] and opts[b'optimize']:
3716 3716 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3717 3717 if opts[b'show_stage'] == [b'all']:
3718 3718 showalways.update(stagenames)
3719 3719 else:
3720 3720 for n in opts[b'show_stage']:
3721 3721 if n not in stagenames:
3722 3722 raise error.Abort(_(b'invalid stage name: %s') % n)
3723 3723 showalways.update(opts[b'show_stage'])
3724 3724
3725 3725 treebystage = {}
3726 3726 printedtree = None
3727 3727 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3728 3728 for n, f in stages:
3729 3729 treebystage[n] = tree = f(tree)
3730 3730 if n in showalways or (n in showchanged and tree != printedtree):
3731 3731 if opts[b'show_stage'] or n != b'parsed':
3732 3732 ui.write(b"* %s:\n" % n)
3733 3733 ui.write(revsetlang.prettyformat(tree), b"\n")
3734 3734 printedtree = tree
3735 3735
3736 3736 if opts[b'verify_optimized']:
3737 3737 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3738 3738 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3739 3739 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3740 3740 ui.writenoi18n(
3741 3741 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3742 3742 )
3743 3743 ui.writenoi18n(
3744 3744 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3745 3745 )
3746 3746 arevs = list(arevs)
3747 3747 brevs = list(brevs)
3748 3748 if arevs == brevs:
3749 3749 return 0
3750 3750 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3751 3751 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3752 3752 sm = difflib.SequenceMatcher(None, arevs, brevs)
3753 3753 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3754 3754 if tag in ('delete', 'replace'):
3755 3755 for c in arevs[alo:ahi]:
3756 3756 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3757 3757 if tag in ('insert', 'replace'):
3758 3758 for c in brevs[blo:bhi]:
3759 3759 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3760 3760 if tag == 'equal':
3761 3761 for c in arevs[alo:ahi]:
3762 3762 ui.write(b' %d\n' % c)
3763 3763 return 1
3764 3764
3765 3765 func = revset.makematcher(tree)
3766 3766 revs = func(repo)
3767 3767 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3768 3768 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3769 3769 if not opts[b'show_revs']:
3770 3770 return
3771 3771 for c in revs:
3772 3772 ui.write(b"%d\n" % c)
3773 3773
3774 3774
3775 3775 @command(
3776 3776 b'debugserve',
3777 3777 [
3778 3778 (
3779 3779 b'',
3780 3780 b'sshstdio',
3781 3781 False,
3782 3782 _(b'run an SSH server bound to process handles'),
3783 3783 ),
3784 3784 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3785 3785 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3786 3786 ],
3787 3787 b'',
3788 3788 )
3789 3789 def debugserve(ui, repo, **opts):
3790 3790 """run a server with advanced settings
3791 3791
3792 3792 This command is similar to :hg:`serve`. It exists partially as a
3793 3793 workaround to the fact that ``hg serve --stdio`` must have specific
3794 3794 arguments for security reasons.
3795 3795 """
3796 3796 opts = pycompat.byteskwargs(opts)
3797 3797
3798 3798 if not opts[b'sshstdio']:
3799 3799 raise error.Abort(_(b'only --sshstdio is currently supported'))
3800 3800
3801 3801 logfh = None
3802 3802
3803 3803 if opts[b'logiofd'] and opts[b'logiofile']:
3804 3804 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3805 3805
3806 3806 if opts[b'logiofd']:
3807 3807 # Ideally we would be line buffered. But line buffering in binary
3808 3808 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3809 3809 # buffering could have performance impacts. But since this isn't
3810 3810 # performance critical code, it should be fine.
3811 3811 try:
3812 3812 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3813 3813 except OSError as e:
3814 3814 if e.errno != errno.ESPIPE:
3815 3815 raise
3816 3816 # can't seek a pipe, so `ab` mode fails on py3
3817 3817 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3818 3818 elif opts[b'logiofile']:
3819 3819 logfh = open(opts[b'logiofile'], b'ab', 0)
3820 3820
3821 3821 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3822 3822 s.serve_forever()
3823 3823
3824 3824
3825 3825 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3826 3826 def debugsetparents(ui, repo, rev1, rev2=None):
3827 3827 """manually set the parents of the current working directory (DANGEROUS)
3828 3828
3829 3829 This command is not what you are looking for and should not be used. Using
3830 3830 this command will most certainly results in slight corruption of the file
3831 3831 level histories withing your repository. DO NOT USE THIS COMMAND.
3832 3832
3833 3833 The command update the p1 and p2 field in the dirstate, and not touching
3834 3834 anything else. This useful for writing repository conversion tools, but
3835 3835 should be used with extreme care. For example, neither the working
3836 3836 directory nor the dirstate is updated, so file status may be incorrect
3837 3837 after running this command. Only used if you are one of the few people that
3838 3838 deeply unstand both conversion tools and file level histories. If you are
3839 3839 reading this help, you are not one of this people (most of them sailed west
3840 3840 from Mithlond anyway.
3841 3841
3842 3842 So one last time DO NOT USE THIS COMMAND.
3843 3843
3844 3844 Returns 0 on success.
3845 3845 """
3846 3846
3847 3847 node1 = scmutil.revsingle(repo, rev1).node()
3848 3848 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3849 3849
3850 3850 with repo.wlock():
3851 3851 repo.setparents(node1, node2)
3852 3852
3853 3853
3854 3854 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3855 3855 def debugsidedata(ui, repo, file_, rev=None, **opts):
3856 3856 """dump the side data for a cl/manifest/file revision
3857 3857
3858 3858 Use --verbose to dump the sidedata content."""
3859 3859 opts = pycompat.byteskwargs(opts)
3860 3860 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3861 3861 if rev is not None:
3862 3862 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3863 3863 file_, rev = None, file_
3864 3864 elif rev is None:
3865 3865 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3866 3866 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3867 3867 r = getattr(r, '_revlog', r)
3868 3868 try:
3869 3869 sidedata = r.sidedata(r.lookup(rev))
3870 3870 except KeyError:
3871 3871 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3872 3872 if sidedata:
3873 3873 sidedata = list(sidedata.items())
3874 3874 sidedata.sort()
3875 3875 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3876 3876 for key, value in sidedata:
3877 3877 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3878 3878 if ui.verbose:
3879 3879 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3880 3880
3881 3881
3882 3882 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3883 3883 def debugssl(ui, repo, source=None, **opts):
3884 3884 """test a secure connection to a server
3885 3885
3886 3886 This builds the certificate chain for the server on Windows, installing the
3887 3887 missing intermediates and trusted root via Windows Update if necessary. It
3888 3888 does nothing on other platforms.
3889 3889
3890 3890 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3891 3891 that server is used. See :hg:`help urls` for more information.
3892 3892
3893 3893 If the update succeeds, retry the original operation. Otherwise, the cause
3894 3894 of the SSL error is likely another issue.
3895 3895 """
3896 3896 if not pycompat.iswindows:
3897 3897 raise error.Abort(
3898 3898 _(b'certificate chain building is only possible on Windows')
3899 3899 )
3900 3900
3901 3901 if not source:
3902 3902 if not repo:
3903 3903 raise error.Abort(
3904 3904 _(
3905 3905 b"there is no Mercurial repository here, and no "
3906 3906 b"server specified"
3907 3907 )
3908 3908 )
3909 3909 source = b"default"
3910 3910
3911 3911 source, branches = urlutil.get_unique_pull_path(
3912 3912 b'debugssl', repo, ui, source
3913 3913 )
3914 3914 url = urlutil.url(source)
3915 3915
3916 3916 defaultport = {b'https': 443, b'ssh': 22}
3917 3917 if url.scheme in defaultport:
3918 3918 try:
3919 3919 addr = (url.host, int(url.port or defaultport[url.scheme]))
3920 3920 except ValueError:
3921 3921 raise error.Abort(_(b"malformed port number in URL"))
3922 3922 else:
3923 3923 raise error.Abort(_(b"only https and ssh connections are supported"))
3924 3924
3925 3925 from . import win32
3926 3926
3927 3927 s = ssl.wrap_socket(
3928 3928 socket.socket(),
3929 3929 ssl_version=ssl.PROTOCOL_TLS,
3930 3930 cert_reqs=ssl.CERT_NONE,
3931 3931 ca_certs=None,
3932 3932 )
3933 3933
3934 3934 try:
3935 3935 s.connect(addr)
3936 3936 cert = s.getpeercert(True)
3937 3937
3938 3938 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3939 3939
3940 3940 complete = win32.checkcertificatechain(cert, build=False)
3941 3941
3942 3942 if not complete:
3943 3943 ui.status(_(b'certificate chain is incomplete, updating... '))
3944 3944
3945 3945 if not win32.checkcertificatechain(cert):
3946 3946 ui.status(_(b'failed.\n'))
3947 3947 else:
3948 3948 ui.status(_(b'done.\n'))
3949 3949 else:
3950 3950 ui.status(_(b'full certificate chain is available\n'))
3951 3951 finally:
3952 3952 s.close()
3953 3953
3954 3954
3955 3955 @command(
3956 3956 b"debugbackupbundle",
3957 3957 [
3958 3958 (
3959 3959 b"",
3960 3960 b"recover",
3961 3961 b"",
3962 3962 b"brings the specified changeset back into the repository",
3963 3963 )
3964 3964 ]
3965 3965 + cmdutil.logopts,
3966 3966 _(b"hg debugbackupbundle [--recover HASH]"),
3967 3967 )
3968 3968 def debugbackupbundle(ui, repo, *pats, **opts):
3969 3969 """lists the changesets available in backup bundles
3970 3970
3971 3971 Without any arguments, this command prints a list of the changesets in each
3972 3972 backup bundle.
3973 3973
3974 3974 --recover takes a changeset hash and unbundles the first bundle that
3975 3975 contains that hash, which puts that changeset back in your repository.
3976 3976
3977 3977 --verbose will print the entire commit message and the bundle path for that
3978 3978 backup.
3979 3979 """
3980 3980 backups = list(
3981 3981 filter(
3982 3982 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3983 3983 )
3984 3984 )
3985 3985 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3986 3986
3987 3987 opts = pycompat.byteskwargs(opts)
3988 3988 opts[b"bundle"] = b""
3989 3989 opts[b"force"] = None
3990 3990 limit = logcmdutil.getlimit(opts)
3991 3991
3992 3992 def display(other, chlist, displayer):
3993 3993 if opts.get(b"newest_first"):
3994 3994 chlist.reverse()
3995 3995 count = 0
3996 3996 for n in chlist:
3997 3997 if limit is not None and count >= limit:
3998 3998 break
3999 3999 parents = [
4000 4000 True for p in other.changelog.parents(n) if p != repo.nullid
4001 4001 ]
4002 4002 if opts.get(b"no_merges") and len(parents) == 2:
4003 4003 continue
4004 4004 count += 1
4005 4005 displayer.show(other[n])
4006 4006
4007 4007 recovernode = opts.get(b"recover")
4008 4008 if recovernode:
4009 4009 if scmutil.isrevsymbol(repo, recovernode):
4010 4010 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4011 4011 return
4012 4012 elif backups:
4013 4013 msg = _(
4014 4014 b"Recover changesets using: hg debugbackupbundle --recover "
4015 4015 b"<changeset hash>\n\nAvailable backup changesets:"
4016 4016 )
4017 4017 ui.status(msg, label=b"status.removed")
4018 4018 else:
4019 4019 ui.status(_(b"no backup changesets found\n"))
4020 4020 return
4021 4021
4022 4022 for backup in backups:
4023 4023 # Much of this is copied from the hg incoming logic
4024 4024 source = os.path.relpath(backup, encoding.getcwd())
4025 4025 source, branches = urlutil.get_unique_pull_path(
4026 4026 b'debugbackupbundle',
4027 4027 repo,
4028 4028 ui,
4029 4029 source,
4030 4030 default_branches=opts.get(b'branch'),
4031 4031 )
4032 4032 try:
4033 4033 other = hg.peer(repo, opts, source)
4034 4034 except error.LookupError as ex:
4035 4035 msg = _(b"\nwarning: unable to open bundle %s") % source
4036 4036 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4037 4037 ui.warn(msg, hint=hint)
4038 4038 continue
4039 4039 revs, checkout = hg.addbranchrevs(
4040 4040 repo, other, branches, opts.get(b"rev")
4041 4041 )
4042 4042
4043 4043 if revs:
4044 4044 revs = [other.lookup(rev) for rev in revs]
4045 4045
4046 4046 with ui.silent():
4047 4047 try:
4048 4048 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4049 4049 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4050 4050 )
4051 4051 except error.LookupError:
4052 4052 continue
4053 4053
4054 4054 try:
4055 4055 if not chlist:
4056 4056 continue
4057 4057 if recovernode:
4058 4058 with repo.lock(), repo.transaction(b"unbundle") as tr:
4059 4059 if scmutil.isrevsymbol(other, recovernode):
4060 4060 ui.status(_(b"Unbundling %s\n") % (recovernode))
4061 4061 f = hg.openpath(ui, source)
4062 4062 gen = exchange.readbundle(ui, f, source)
4063 4063 if isinstance(gen, bundle2.unbundle20):
4064 4064 bundle2.applybundle(
4065 4065 repo,
4066 4066 gen,
4067 4067 tr,
4068 4068 source=b"unbundle",
4069 4069 url=b"bundle:" + source,
4070 4070 )
4071 4071 else:
4072 4072 gen.apply(repo, b"unbundle", b"bundle:" + source)
4073 4073 break
4074 4074 else:
4075 4075 backupdate = encoding.strtolocal(
4076 4076 time.strftime(
4077 4077 "%a %H:%M, %Y-%m-%d",
4078 4078 time.localtime(os.path.getmtime(source)),
4079 4079 )
4080 4080 )
4081 4081 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4082 4082 if ui.verbose:
4083 4083 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4084 4084 else:
4085 4085 opts[
4086 4086 b"template"
4087 4087 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4088 4088 displayer = logcmdutil.changesetdisplayer(
4089 4089 ui, other, opts, False
4090 4090 )
4091 4091 display(other, chlist, displayer)
4092 4092 displayer.close()
4093 4093 finally:
4094 4094 cleanupfn()
4095 4095
4096 4096
4097 4097 @command(
4098 4098 b'debugsub',
4099 4099 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4100 4100 _(b'[-r REV] [REV]'),
4101 4101 )
4102 4102 def debugsub(ui, repo, rev=None):
4103 4103 ctx = scmutil.revsingle(repo, rev, None)
4104 4104 for k, v in sorted(ctx.substate.items()):
4105 4105 ui.writenoi18n(b'path %s\n' % k)
4106 4106 ui.writenoi18n(b' source %s\n' % v[0])
4107 4107 ui.writenoi18n(b' revision %s\n' % v[1])
4108 4108
4109 4109
4110 4110 @command(b'debugshell', optionalrepo=True)
4111 4111 def debugshell(ui, repo):
4112 4112 """run an interactive Python interpreter
4113 4113
4114 4114 The local namespace is provided with a reference to the ui and
4115 4115 the repo instance (if available).
4116 4116 """
4117 4117 import code
4118 4118
4119 4119 imported_objects = {
4120 4120 'ui': ui,
4121 4121 'repo': repo,
4122 4122 }
4123 4123
4124 4124 code.interact(local=imported_objects)
4125 4125
4126 4126
4127 4127 @command(
4128 4128 b'debugsuccessorssets',
4129 4129 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4130 4130 _(b'[REV]'),
4131 4131 )
4132 4132 def debugsuccessorssets(ui, repo, *revs, **opts):
4133 4133 """show set of successors for revision
4134 4134
4135 4135 A successors set of changeset A is a consistent group of revisions that
4136 4136 succeed A. It contains non-obsolete changesets only unless closests
4137 4137 successors set is set.
4138 4138
4139 4139 In most cases a changeset A has a single successors set containing a single
4140 4140 successor (changeset A replaced by A').
4141 4141
4142 4142 A changeset that is made obsolete with no successors are called "pruned".
4143 4143 Such changesets have no successors sets at all.
4144 4144
4145 4145 A changeset that has been "split" will have a successors set containing
4146 4146 more than one successor.
4147 4147
4148 4148 A changeset that has been rewritten in multiple different ways is called
4149 4149 "divergent". Such changesets have multiple successor sets (each of which
4150 4150 may also be split, i.e. have multiple successors).
4151 4151
4152 4152 Results are displayed as follows::
4153 4153
4154 4154 <rev1>
4155 4155 <successors-1A>
4156 4156 <rev2>
4157 4157 <successors-2A>
4158 4158 <successors-2B1> <successors-2B2> <successors-2B3>
4159 4159
4160 4160 Here rev2 has two possible (i.e. divergent) successors sets. The first
4161 4161 holds one element, whereas the second holds three (i.e. the changeset has
4162 4162 been split).
4163 4163 """
4164 4164 # passed to successorssets caching computation from one call to another
4165 4165 cache = {}
4166 4166 ctx2str = bytes
4167 4167 node2str = short
4168 4168 for rev in logcmdutil.revrange(repo, revs):
4169 4169 ctx = repo[rev]
4170 4170 ui.write(b'%s\n' % ctx2str(ctx))
4171 4171 for succsset in obsutil.successorssets(
4172 4172 repo, ctx.node(), closest=opts['closest'], cache=cache
4173 4173 ):
4174 4174 if succsset:
4175 4175 ui.write(b' ')
4176 4176 ui.write(node2str(succsset[0]))
4177 4177 for node in succsset[1:]:
4178 4178 ui.write(b' ')
4179 4179 ui.write(node2str(node))
4180 4180 ui.write(b'\n')
4181 4181
4182 4182
4183 4183 @command(b'debugtagscache', [])
4184 4184 def debugtagscache(ui, repo):
4185 4185 """display the contents of .hg/cache/hgtagsfnodes1"""
4186 4186 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4187 4187 flog = repo.file(b'.hgtags')
4188 4188 for r in repo:
4189 4189 node = repo[r].node()
4190 4190 tagsnode = cache.getfnode(node, computemissing=False)
4191 4191 if tagsnode:
4192 4192 tagsnodedisplay = hex(tagsnode)
4193 4193 if not flog.hasnode(tagsnode):
4194 4194 tagsnodedisplay += b' (unknown node)'
4195 4195 elif tagsnode is None:
4196 4196 tagsnodedisplay = b'missing'
4197 4197 else:
4198 4198 tagsnodedisplay = b'invalid'
4199 4199
4200 4200 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4201 4201
4202 4202
4203 4203 @command(
4204 4204 b'debugtemplate',
4205 4205 [
4206 4206 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4207 4207 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4208 4208 ],
4209 4209 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4210 4210 optionalrepo=True,
4211 4211 )
4212 4212 def debugtemplate(ui, repo, tmpl, **opts):
4213 4213 """parse and apply a template
4214 4214
4215 4215 If -r/--rev is given, the template is processed as a log template and
4216 4216 applied to the given changesets. Otherwise, it is processed as a generic
4217 4217 template.
4218 4218
4219 4219 Use --verbose to print the parsed tree.
4220 4220 """
4221 4221 revs = None
4222 4222 if opts['rev']:
4223 4223 if repo is None:
4224 4224 raise error.RepoError(
4225 4225 _(b'there is no Mercurial repository here (.hg not found)')
4226 4226 )
4227 4227 revs = logcmdutil.revrange(repo, opts['rev'])
4228 4228
4229 4229 props = {}
4230 4230 for d in opts['define']:
4231 4231 try:
4232 4232 k, v = (e.strip() for e in d.split(b'=', 1))
4233 4233 if not k or k == b'ui':
4234 4234 raise ValueError
4235 4235 props[k] = v
4236 4236 except ValueError:
4237 4237 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4238 4238
4239 4239 if ui.verbose:
4240 4240 aliases = ui.configitems(b'templatealias')
4241 4241 tree = templater.parse(tmpl)
4242 4242 ui.note(templater.prettyformat(tree), b'\n')
4243 4243 newtree = templater.expandaliases(tree, aliases)
4244 4244 if newtree != tree:
4245 4245 ui.notenoi18n(
4246 4246 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4247 4247 )
4248 4248
4249 4249 if revs is None:
4250 4250 tres = formatter.templateresources(ui, repo)
4251 4251 t = formatter.maketemplater(ui, tmpl, resources=tres)
4252 4252 if ui.verbose:
4253 4253 kwds, funcs = t.symbolsuseddefault()
4254 4254 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4255 4255 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4256 4256 ui.write(t.renderdefault(props))
4257 4257 else:
4258 4258 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4259 4259 if ui.verbose:
4260 4260 kwds, funcs = displayer.t.symbolsuseddefault()
4261 4261 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4262 4262 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4263 4263 for r in revs:
4264 4264 displayer.show(repo[r], **pycompat.strkwargs(props))
4265 4265 displayer.close()
4266 4266
4267 4267
4268 4268 @command(
4269 4269 b'debuguigetpass',
4270 4270 [
4271 4271 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4272 4272 ],
4273 4273 _(b'[-p TEXT]'),
4274 4274 norepo=True,
4275 4275 )
4276 4276 def debuguigetpass(ui, prompt=b''):
4277 4277 """show prompt to type password"""
4278 4278 r = ui.getpass(prompt)
4279 4279 if r is None:
4280 4280 r = b"<default response>"
4281 4281 ui.writenoi18n(b'response: %s\n' % r)
4282 4282
4283 4283
4284 4284 @command(
4285 4285 b'debuguiprompt',
4286 4286 [
4287 4287 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4288 4288 ],
4289 4289 _(b'[-p TEXT]'),
4290 4290 norepo=True,
4291 4291 )
4292 4292 def debuguiprompt(ui, prompt=b''):
4293 4293 """show plain prompt"""
4294 4294 r = ui.prompt(prompt)
4295 4295 ui.writenoi18n(b'response: %s\n' % r)
4296 4296
4297 4297
4298 4298 @command(b'debugupdatecaches', [])
4299 4299 def debugupdatecaches(ui, repo, *pats, **opts):
4300 4300 """warm all known caches in the repository"""
4301 4301 with repo.wlock(), repo.lock():
4302 4302 repo.updatecaches(caches=repository.CACHES_ALL)
4303 4303
4304 4304
4305 4305 @command(
4306 4306 b'debugupgraderepo',
4307 4307 [
4308 4308 (
4309 4309 b'o',
4310 4310 b'optimize',
4311 4311 [],
4312 4312 _(b'extra optimization to perform'),
4313 4313 _(b'NAME'),
4314 4314 ),
4315 4315 (b'', b'run', False, _(b'performs an upgrade')),
4316 4316 (b'', b'backup', True, _(b'keep the old repository content around')),
4317 4317 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4318 4318 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4319 4319 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4320 4320 ],
4321 4321 )
4322 4322 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4323 4323 """upgrade a repository to use different features
4324 4324
4325 4325 If no arguments are specified, the repository is evaluated for upgrade
4326 4326 and a list of problems and potential optimizations is printed.
4327 4327
4328 4328 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4329 4329 can be influenced via additional arguments. More details will be provided
4330 4330 by the command output when run without ``--run``.
4331 4331
4332 4332 During the upgrade, the repository will be locked and no writes will be
4333 4333 allowed.
4334 4334
4335 4335 At the end of the upgrade, the repository may not be readable while new
4336 4336 repository data is swapped in. This window will be as long as it takes to
4337 4337 rename some directories inside the ``.hg`` directory. On most machines, this
4338 4338 should complete almost instantaneously and the chances of a consumer being
4339 4339 unable to access the repository should be low.
4340 4340
4341 4341 By default, all revlogs will be upgraded. You can restrict this using flags
4342 4342 such as `--manifest`:
4343 4343
4344 4344 * `--manifest`: only optimize the manifest
4345 4345 * `--no-manifest`: optimize all revlog but the manifest
4346 4346 * `--changelog`: optimize the changelog only
4347 4347 * `--no-changelog --no-manifest`: optimize filelogs only
4348 4348 * `--filelogs`: optimize the filelogs only
4349 4349 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4350 4350 """
4351 4351 return upgrade.upgraderepo(
4352 4352 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4353 4353 )
4354 4354
4355 4355
4356 4356 @command(
4357 4357 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4358 4358 )
4359 4359 def debugwalk(ui, repo, *pats, **opts):
4360 4360 """show how files match on given patterns"""
4361 4361 opts = pycompat.byteskwargs(opts)
4362 4362 m = scmutil.match(repo[None], pats, opts)
4363 4363 if ui.verbose:
4364 4364 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4365 4365 items = list(repo[None].walk(m))
4366 4366 if not items:
4367 4367 return
4368 4368 f = lambda fn: fn
4369 4369 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4370 4370 f = lambda fn: util.normpath(fn)
4371 4371 fmt = b'f %%-%ds %%-%ds %%s' % (
4372 4372 max([len(abs) for abs in items]),
4373 4373 max([len(repo.pathto(abs)) for abs in items]),
4374 4374 )
4375 4375 for abs in items:
4376 4376 line = fmt % (
4377 4377 abs,
4378 4378 f(repo.pathto(abs)),
4379 4379 m.exact(abs) and b'exact' or b'',
4380 4380 )
4381 4381 ui.write(b"%s\n" % line.rstrip())
4382 4382
4383 4383
4384 4384 @command(b'debugwhyunstable', [], _(b'REV'))
4385 4385 def debugwhyunstable(ui, repo, rev):
4386 4386 """explain instabilities of a changeset"""
4387 4387 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4388 4388 dnodes = b''
4389 4389 if entry.get(b'divergentnodes'):
4390 4390 dnodes = (
4391 4391 b' '.join(
4392 4392 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4393 4393 for ctx in entry[b'divergentnodes']
4394 4394 )
4395 4395 + b' '
4396 4396 )
4397 4397 ui.write(
4398 4398 b'%s: %s%s %s\n'
4399 4399 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4400 4400 )
4401 4401
4402 4402
4403 4403 @command(
4404 4404 b'debugwireargs',
4405 4405 [
4406 4406 (b'', b'three', b'', b'three'),
4407 4407 (b'', b'four', b'', b'four'),
4408 4408 (b'', b'five', b'', b'five'),
4409 4409 ]
4410 4410 + cmdutil.remoteopts,
4411 4411 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4412 4412 norepo=True,
4413 4413 )
4414 4414 def debugwireargs(ui, repopath, *vals, **opts):
4415 4415 opts = pycompat.byteskwargs(opts)
4416 4416 repo = hg.peer(ui, opts, repopath)
4417 4417 try:
4418 4418 for opt in cmdutil.remoteopts:
4419 4419 del opts[opt[1]]
4420 4420 args = {}
4421 4421 for k, v in opts.items():
4422 4422 if v:
4423 4423 args[k] = v
4424 4424 args = pycompat.strkwargs(args)
4425 4425 # run twice to check that we don't mess up the stream for the next command
4426 4426 res1 = repo.debugwireargs(*vals, **args)
4427 4427 res2 = repo.debugwireargs(*vals, **args)
4428 4428 ui.write(b"%s\n" % res1)
4429 4429 if res1 != res2:
4430 4430 ui.warn(b"%s\n" % res2)
4431 4431 finally:
4432 4432 repo.close()
4433 4433
4434 4434
4435 4435 def _parsewirelangblocks(fh):
4436 4436 activeaction = None
4437 4437 blocklines = []
4438 4438 lastindent = 0
4439 4439
4440 4440 for line in fh:
4441 4441 line = line.rstrip()
4442 4442 if not line:
4443 4443 continue
4444 4444
4445 4445 if line.startswith(b'#'):
4446 4446 continue
4447 4447
4448 4448 if not line.startswith(b' '):
4449 4449 # New block. Flush previous one.
4450 4450 if activeaction:
4451 4451 yield activeaction, blocklines
4452 4452
4453 4453 activeaction = line
4454 4454 blocklines = []
4455 4455 lastindent = 0
4456 4456 continue
4457 4457
4458 4458 # Else we start with an indent.
4459 4459
4460 4460 if not activeaction:
4461 4461 raise error.Abort(_(b'indented line outside of block'))
4462 4462
4463 4463 indent = len(line) - len(line.lstrip())
4464 4464
4465 4465 # If this line is indented more than the last line, concatenate it.
4466 4466 if indent > lastindent and blocklines:
4467 4467 blocklines[-1] += line.lstrip()
4468 4468 else:
4469 4469 blocklines.append(line)
4470 4470 lastindent = indent
4471 4471
4472 4472 # Flush last block.
4473 4473 if activeaction:
4474 4474 yield activeaction, blocklines
4475 4475
4476 4476
4477 4477 @command(
4478 4478 b'debugwireproto',
4479 4479 [
4480 4480 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4481 4481 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4482 4482 (
4483 4483 b'',
4484 4484 b'noreadstderr',
4485 4485 False,
4486 4486 _(b'do not read from stderr of the remote'),
4487 4487 ),
4488 4488 (
4489 4489 b'',
4490 4490 b'nologhandshake',
4491 4491 False,
4492 4492 _(b'do not log I/O related to the peer handshake'),
4493 4493 ),
4494 4494 ]
4495 4495 + cmdutil.remoteopts,
4496 4496 _(b'[PATH]'),
4497 4497 optionalrepo=True,
4498 4498 )
4499 4499 def debugwireproto(ui, repo, path=None, **opts):
4500 4500 """send wire protocol commands to a server
4501 4501
4502 4502 This command can be used to issue wire protocol commands to remote
4503 4503 peers and to debug the raw data being exchanged.
4504 4504
4505 4505 ``--localssh`` will start an SSH server against the current repository
4506 4506 and connect to that. By default, the connection will perform a handshake
4507 4507 and establish an appropriate peer instance.
4508 4508
4509 4509 ``--peer`` can be used to bypass the handshake protocol and construct a
4510 4510 peer instance using the specified class type. Valid values are ``raw``,
4511 4511 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4512 4512 don't support higher-level command actions.
4513 4513
4514 4514 ``--noreadstderr`` can be used to disable automatic reading from stderr
4515 4515 of the peer (for SSH connections only). Disabling automatic reading of
4516 4516 stderr is useful for making output more deterministic.
4517 4517
4518 4518 Commands are issued via a mini language which is specified via stdin.
4519 4519 The language consists of individual actions to perform. An action is
4520 4520 defined by a block. A block is defined as a line with no leading
4521 4521 space followed by 0 or more lines with leading space. Blocks are
4522 4522 effectively a high-level command with additional metadata.
4523 4523
4524 4524 Lines beginning with ``#`` are ignored.
4525 4525
4526 4526 The following sections denote available actions.
4527 4527
4528 4528 raw
4529 4529 ---
4530 4530
4531 4531 Send raw data to the server.
4532 4532
4533 4533 The block payload contains the raw data to send as one atomic send
4534 4534 operation. The data may not actually be delivered in a single system
4535 4535 call: it depends on the abilities of the transport being used.
4536 4536
4537 4537 Each line in the block is de-indented and concatenated. Then, that
4538 4538 value is evaluated as a Python b'' literal. This allows the use of
4539 4539 backslash escaping, etc.
4540 4540
4541 4541 raw+
4542 4542 ----
4543 4543
4544 4544 Behaves like ``raw`` except flushes output afterwards.
4545 4545
4546 4546 command <X>
4547 4547 -----------
4548 4548
4549 4549 Send a request to run a named command, whose name follows the ``command``
4550 4550 string.
4551 4551
4552 4552 Arguments to the command are defined as lines in this block. The format of
4553 4553 each line is ``<key> <value>``. e.g.::
4554 4554
4555 4555 command listkeys
4556 4556 namespace bookmarks
4557 4557
4558 4558 If the value begins with ``eval:``, it will be interpreted as a Python
4559 4559 literal expression. Otherwise values are interpreted as Python b'' literals.
4560 4560 This allows sending complex types and encoding special byte sequences via
4561 4561 backslash escaping.
4562 4562
4563 4563 The following arguments have special meaning:
4564 4564
4565 4565 ``PUSHFILE``
4566 4566 When defined, the *push* mechanism of the peer will be used instead
4567 4567 of the static request-response mechanism and the content of the
4568 4568 file specified in the value of this argument will be sent as the
4569 4569 command payload.
4570 4570
4571 4571 This can be used to submit a local bundle file to the remote.
4572 4572
4573 4573 batchbegin
4574 4574 ----------
4575 4575
4576 4576 Instruct the peer to begin a batched send.
4577 4577
4578 4578 All ``command`` blocks are queued for execution until the next
4579 4579 ``batchsubmit`` block.
4580 4580
4581 4581 batchsubmit
4582 4582 -----------
4583 4583
4584 4584 Submit previously queued ``command`` blocks as a batch request.
4585 4585
4586 4586 This action MUST be paired with a ``batchbegin`` action.
4587 4587
4588 4588 httprequest <method> <path>
4589 4589 ---------------------------
4590 4590
4591 4591 (HTTP peer only)
4592 4592
4593 4593 Send an HTTP request to the peer.
4594 4594
4595 4595 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4596 4596
4597 4597 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4598 4598 headers to add to the request. e.g. ``Accept: foo``.
4599 4599
4600 4600 The following arguments are special:
4601 4601
4602 4602 ``BODYFILE``
4603 4603 The content of the file defined as the value to this argument will be
4604 4604 transferred verbatim as the HTTP request body.
4605 4605
4606 4606 ``frame <type> <flags> <payload>``
4607 4607 Send a unified protocol frame as part of the request body.
4608 4608
4609 4609 All frames will be collected and sent as the body to the HTTP
4610 4610 request.
4611 4611
4612 4612 close
4613 4613 -----
4614 4614
4615 4615 Close the connection to the server.
4616 4616
4617 4617 flush
4618 4618 -----
4619 4619
4620 4620 Flush data written to the server.
4621 4621
4622 4622 readavailable
4623 4623 -------------
4624 4624
4625 4625 Close the write end of the connection and read all available data from
4626 4626 the server.
4627 4627
4628 4628 If the connection to the server encompasses multiple pipes, we poll both
4629 4629 pipes and read available data.
4630 4630
4631 4631 readline
4632 4632 --------
4633 4633
4634 4634 Read a line of output from the server. If there are multiple output
4635 4635 pipes, reads only the main pipe.
4636 4636
4637 4637 ereadline
4638 4638 ---------
4639 4639
4640 4640 Like ``readline``, but read from the stderr pipe, if available.
4641 4641
4642 4642 read <X>
4643 4643 --------
4644 4644
4645 4645 ``read()`` N bytes from the server's main output pipe.
4646 4646
4647 4647 eread <X>
4648 4648 ---------
4649 4649
4650 4650 ``read()`` N bytes from the server's stderr pipe, if available.
4651 4651
4652 4652 Specifying Unified Frame-Based Protocol Frames
4653 4653 ----------------------------------------------
4654 4654
4655 4655 It is possible to emit a *Unified Frame-Based Protocol* by using special
4656 4656 syntax.
4657 4657
4658 4658 A frame is composed as a type, flags, and payload. These can be parsed
4659 4659 from a string of the form:
4660 4660
4661 4661 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4662 4662
4663 4663 ``request-id`` and ``stream-id`` are integers defining the request and
4664 4664 stream identifiers.
4665 4665
4666 4666 ``type`` can be an integer value for the frame type or the string name
4667 4667 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4668 4668 ``command-name``.
4669 4669
4670 4670 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4671 4671 components. Each component (and there can be just one) can be an integer
4672 4672 or a flag name for stream flags or frame flags, respectively. Values are
4673 4673 resolved to integers and then bitwise OR'd together.
4674 4674
4675 4675 ``payload`` represents the raw frame payload. If it begins with
4676 4676 ``cbor:``, the following string is evaluated as Python code and the
4677 4677 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4678 4678 as a Python byte string literal.
4679 4679 """
4680 4680 opts = pycompat.byteskwargs(opts)
4681 4681
4682 4682 if opts[b'localssh'] and not repo:
4683 4683 raise error.Abort(_(b'--localssh requires a repository'))
4684 4684
4685 4685 if opts[b'peer'] and opts[b'peer'] not in (
4686 4686 b'raw',
4687 4687 b'ssh1',
4688 4688 ):
4689 4689 raise error.Abort(
4690 4690 _(b'invalid value for --peer'),
4691 4691 hint=_(b'valid values are "raw" and "ssh1"'),
4692 4692 )
4693 4693
4694 4694 if path and opts[b'localssh']:
4695 4695 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4696 4696
4697 4697 if ui.interactive():
4698 4698 ui.write(_(b'(waiting for commands on stdin)\n'))
4699 4699
4700 4700 blocks = list(_parsewirelangblocks(ui.fin))
4701 4701
4702 4702 proc = None
4703 4703 stdin = None
4704 4704 stdout = None
4705 4705 stderr = None
4706 4706 opener = None
4707 4707
4708 4708 if opts[b'localssh']:
4709 4709 # We start the SSH server in its own process so there is process
4710 4710 # separation. This prevents a whole class of potential bugs around
4711 4711 # shared state from interfering with server operation.
4712 4712 args = procutil.hgcmd() + [
4713 4713 b'-R',
4714 4714 repo.root,
4715 4715 b'debugserve',
4716 4716 b'--sshstdio',
4717 4717 ]
4718 4718 proc = subprocess.Popen(
4719 4719 pycompat.rapply(procutil.tonativestr, args),
4720 4720 stdin=subprocess.PIPE,
4721 4721 stdout=subprocess.PIPE,
4722 4722 stderr=subprocess.PIPE,
4723 4723 bufsize=0,
4724 4724 )
4725 4725
4726 4726 stdin = proc.stdin
4727 4727 stdout = proc.stdout
4728 4728 stderr = proc.stderr
4729 4729
4730 4730 # We turn the pipes into observers so we can log I/O.
4731 4731 if ui.verbose or opts[b'peer'] == b'raw':
4732 4732 stdin = util.makeloggingfileobject(
4733 4733 ui, proc.stdin, b'i', logdata=True
4734 4734 )
4735 4735 stdout = util.makeloggingfileobject(
4736 4736 ui, proc.stdout, b'o', logdata=True
4737 4737 )
4738 4738 stderr = util.makeloggingfileobject(
4739 4739 ui, proc.stderr, b'e', logdata=True
4740 4740 )
4741 4741
4742 4742 # --localssh also implies the peer connection settings.
4743 4743
4744 4744 url = b'ssh://localserver'
4745 4745 autoreadstderr = not opts[b'noreadstderr']
4746 4746
4747 4747 if opts[b'peer'] == b'ssh1':
4748 4748 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4749 4749 peer = sshpeer.sshv1peer(
4750 4750 ui,
4751 4751 url,
4752 4752 proc,
4753 4753 stdin,
4754 4754 stdout,
4755 4755 stderr,
4756 4756 None,
4757 4757 autoreadstderr=autoreadstderr,
4758 4758 )
4759 4759 elif opts[b'peer'] == b'raw':
4760 4760 ui.write(_(b'using raw connection to peer\n'))
4761 4761 peer = None
4762 4762 else:
4763 4763 ui.write(_(b'creating ssh peer from handshake results\n'))
4764 4764 peer = sshpeer.makepeer(
4765 4765 ui,
4766 4766 url,
4767 4767 proc,
4768 4768 stdin,
4769 4769 stdout,
4770 4770 stderr,
4771 4771 autoreadstderr=autoreadstderr,
4772 4772 )
4773 4773
4774 4774 elif path:
4775 4775 # We bypass hg.peer() so we can proxy the sockets.
4776 4776 # TODO consider not doing this because we skip
4777 4777 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4778 4778 u = urlutil.url(path)
4779 4779 if u.scheme != b'http':
4780 4780 raise error.Abort(_(b'only http:// paths are currently supported'))
4781 4781
4782 4782 url, authinfo = u.authinfo()
4783 4783 openerargs = {
4784 4784 'useragent': b'Mercurial debugwireproto',
4785 4785 }
4786 4786
4787 4787 # Turn pipes/sockets into observers so we can log I/O.
4788 4788 if ui.verbose:
4789 4789 openerargs.update(
4790 4790 {
4791 4791 'loggingfh': ui,
4792 4792 'loggingname': b's',
4793 4793 'loggingopts': {
4794 4794 'logdata': True,
4795 4795 'logdataapis': False,
4796 4796 },
4797 4797 }
4798 4798 )
4799 4799
4800 4800 if ui.debugflag:
4801 4801 openerargs['loggingopts']['logdataapis'] = True
4802 4802
4803 4803 # Don't send default headers when in raw mode. This allows us to
4804 4804 # bypass most of the behavior of our URL handling code so we can
4805 4805 # have near complete control over what's sent on the wire.
4806 4806 if opts[b'peer'] == b'raw':
4807 4807 openerargs['sendaccept'] = False
4808 4808
4809 4809 opener = urlmod.opener(ui, authinfo, **openerargs)
4810 4810
4811 4811 if opts[b'peer'] == b'raw':
4812 4812 ui.write(_(b'using raw connection to peer\n'))
4813 4813 peer = None
4814 4814 elif opts[b'peer']:
4815 4815 raise error.Abort(
4816 4816 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4817 4817 )
4818 4818 else:
4819 4819 peer = httppeer.makepeer(ui, path, opener=opener)
4820 4820
4821 4821 # We /could/ populate stdin/stdout with sock.makefile()...
4822 4822 else:
4823 4823 raise error.Abort(_(b'unsupported connection configuration'))
4824 4824
4825 4825 batchedcommands = None
4826 4826
4827 4827 # Now perform actions based on the parsed wire language instructions.
4828 4828 for action, lines in blocks:
4829 4829 if action in (b'raw', b'raw+'):
4830 4830 if not stdin:
4831 4831 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4832 4832
4833 4833 # Concatenate the data together.
4834 4834 data = b''.join(l.lstrip() for l in lines)
4835 4835 data = stringutil.unescapestr(data)
4836 4836 stdin.write(data)
4837 4837
4838 4838 if action == b'raw+':
4839 4839 stdin.flush()
4840 4840 elif action == b'flush':
4841 4841 if not stdin:
4842 4842 raise error.Abort(_(b'cannot call flush on this peer'))
4843 4843 stdin.flush()
4844 4844 elif action.startswith(b'command'):
4845 4845 if not peer:
4846 4846 raise error.Abort(
4847 4847 _(
4848 4848 b'cannot send commands unless peer instance '
4849 4849 b'is available'
4850 4850 )
4851 4851 )
4852 4852
4853 4853 command = action.split(b' ', 1)[1]
4854 4854
4855 4855 args = {}
4856 4856 for line in lines:
4857 4857 # We need to allow empty values.
4858 4858 fields = line.lstrip().split(b' ', 1)
4859 4859 if len(fields) == 1:
4860 4860 key = fields[0]
4861 4861 value = b''
4862 4862 else:
4863 4863 key, value = fields
4864 4864
4865 4865 if value.startswith(b'eval:'):
4866 4866 value = stringutil.evalpythonliteral(value[5:])
4867 4867 else:
4868 4868 value = stringutil.unescapestr(value)
4869 4869
4870 4870 args[key] = value
4871 4871
4872 4872 if batchedcommands is not None:
4873 4873 batchedcommands.append((command, args))
4874 4874 continue
4875 4875
4876 4876 ui.status(_(b'sending %s command\n') % command)
4877 4877
4878 4878 if b'PUSHFILE' in args:
4879 4879 with open(args[b'PUSHFILE'], 'rb') as fh:
4880 4880 del args[b'PUSHFILE']
4881 4881 res, output = peer._callpush(
4882 4882 command, fh, **pycompat.strkwargs(args)
4883 4883 )
4884 4884 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4885 4885 ui.status(
4886 4886 _(b'remote output: %s\n') % stringutil.escapestr(output)
4887 4887 )
4888 4888 else:
4889 4889 with peer.commandexecutor() as e:
4890 4890 res = e.callcommand(command, args).result()
4891 4891
4892 4892 ui.status(
4893 4893 _(b'response: %s\n')
4894 4894 % stringutil.pprint(res, bprefix=True, indent=2)
4895 4895 )
4896 4896
4897 4897 elif action == b'batchbegin':
4898 4898 if batchedcommands is not None:
4899 4899 raise error.Abort(_(b'nested batchbegin not allowed'))
4900 4900
4901 4901 batchedcommands = []
4902 4902 elif action == b'batchsubmit':
4903 4903 # There is a batching API we could go through. But it would be
4904 4904 # difficult to normalize requests into function calls. It is easier
4905 4905 # to bypass this layer and normalize to commands + args.
4906 4906 ui.status(
4907 4907 _(b'sending batch with %d sub-commands\n')
4908 4908 % len(batchedcommands)
4909 4909 )
4910 4910 assert peer is not None
4911 4911 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4912 4912 ui.status(
4913 4913 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4914 4914 )
4915 4915
4916 4916 batchedcommands = None
4917 4917
4918 4918 elif action.startswith(b'httprequest '):
4919 4919 if not opener:
4920 4920 raise error.Abort(
4921 4921 _(b'cannot use httprequest without an HTTP peer')
4922 4922 )
4923 4923
4924 4924 request = action.split(b' ', 2)
4925 4925 if len(request) != 3:
4926 4926 raise error.Abort(
4927 4927 _(
4928 4928 b'invalid httprequest: expected format is '
4929 4929 b'"httprequest <method> <path>'
4930 4930 )
4931 4931 )
4932 4932
4933 4933 method, httppath = request[1:]
4934 4934 headers = {}
4935 4935 body = None
4936 4936 frames = []
4937 4937 for line in lines:
4938 4938 line = line.lstrip()
4939 4939 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4940 4940 if m:
4941 4941 # Headers need to use native strings.
4942 4942 key = pycompat.strurl(m.group(1))
4943 4943 value = pycompat.strurl(m.group(2))
4944 4944 headers[key] = value
4945 4945 continue
4946 4946
4947 4947 if line.startswith(b'BODYFILE '):
4948 4948 with open(line.split(b' ', 1), b'rb') as fh:
4949 4949 body = fh.read()
4950 4950 elif line.startswith(b'frame '):
4951 4951 frame = wireprotoframing.makeframefromhumanstring(
4952 4952 line[len(b'frame ') :]
4953 4953 )
4954 4954
4955 4955 frames.append(frame)
4956 4956 else:
4957 4957 raise error.Abort(
4958 4958 _(b'unknown argument to httprequest: %s') % line
4959 4959 )
4960 4960
4961 4961 url = path + httppath
4962 4962
4963 4963 if frames:
4964 4964 body = b''.join(bytes(f) for f in frames)
4965 4965
4966 4966 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4967 4967
4968 4968 # urllib.Request insists on using has_data() as a proxy for
4969 4969 # determining the request method. Override that to use our
4970 4970 # explicitly requested method.
4971 4971 req.get_method = lambda: pycompat.sysstr(method)
4972 4972
4973 4973 try:
4974 4974 res = opener.open(req)
4975 4975 body = res.read()
4976 4976 except util.urlerr.urlerror as e:
4977 4977 # read() method must be called, but only exists in Python 2
4978 4978 getattr(e, 'read', lambda: None)()
4979 4979 continue
4980 4980
4981 4981 ct = res.headers.get('Content-Type')
4982 4982 if ct == 'application/mercurial-cbor':
4983 4983 ui.write(
4984 4984 _(b'cbor> %s\n')
4985 4985 % stringutil.pprint(
4986 4986 cborutil.decodeall(body), bprefix=True, indent=2
4987 4987 )
4988 4988 )
4989 4989
4990 4990 elif action == b'close':
4991 4991 assert peer is not None
4992 4992 peer.close()
4993 4993 elif action == b'readavailable':
4994 4994 if not stdout or not stderr:
4995 4995 raise error.Abort(
4996 4996 _(b'readavailable not available on this peer')
4997 4997 )
4998 4998
4999 4999 stdin.close()
5000 5000 stdout.read()
5001 5001 stderr.read()
5002 5002
5003 5003 elif action == b'readline':
5004 5004 if not stdout:
5005 5005 raise error.Abort(_(b'readline not available on this peer'))
5006 5006 stdout.readline()
5007 5007 elif action == b'ereadline':
5008 5008 if not stderr:
5009 5009 raise error.Abort(_(b'ereadline not available on this peer'))
5010 5010 stderr.readline()
5011 5011 elif action.startswith(b'read '):
5012 5012 count = int(action.split(b' ', 1)[1])
5013 5013 if not stdout:
5014 5014 raise error.Abort(_(b'read not available on this peer'))
5015 5015 stdout.read(count)
5016 5016 elif action.startswith(b'eread '):
5017 5017 count = int(action.split(b' ', 1)[1])
5018 5018 if not stderr:
5019 5019 raise error.Abort(_(b'eread not available on this peer'))
5020 5020 stderr.read(count)
5021 5021 else:
5022 5022 raise error.Abort(_(b'unknown action: %s') % action)
5023 5023
5024 5024 if batchedcommands is not None:
5025 5025 raise error.Abort(_(b'unclosed "batchbegin" request'))
5026 5026
5027 5027 if peer:
5028 5028 peer.close()
5029 5029
5030 5030 if proc:
5031 5031 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now