##// END OF EJS Templates
debugbuilddag: add a flag to allow running it from a non-empty repository...
marmoute -
r49540:b4bc9c4f default
parent child Browse files
Show More
@@ -1,4877 +1,4884 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import string
26 26 import subprocess
27 27 import sys
28 28 import time
29 29
30 30 from .i18n import _
31 31 from .node import (
32 32 bin,
33 33 hex,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 106 deltas as deltautil,
107 107 nodemap,
108 108 rewrite,
109 109 sidedata,
110 110 )
111 111
112 112 release = lockmod.release
113 113
114 114 table = {}
115 115 table.update(strip.command._table)
116 116 command = registrar.command(table)
117 117
118 118
119 119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 120 def debugancestor(ui, repo, *args):
121 121 """find the ancestor revision of two revisions in a given index"""
122 122 if len(args) == 3:
123 123 index, rev1, rev2 = args
124 124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 125 lookup = r.lookup
126 126 elif len(args) == 2:
127 127 if not repo:
128 128 raise error.Abort(
129 129 _(b'there is no Mercurial repository here (.hg not found)')
130 130 )
131 131 rev1, rev2 = args
132 132 r = repo.changelog
133 133 lookup = repo.lookup
134 134 else:
135 135 raise error.Abort(_(b'either two or three arguments required'))
136 136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 138
139 139
140 140 @command(b'debugantivirusrunning', [])
141 141 def debugantivirusrunning(ui, repo):
142 142 """attempt to trigger an antivirus scanner to see if one is active"""
143 143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 144 f.write(
145 145 util.b85decode(
146 146 # This is a base85-armored version of the EICAR test file. See
147 147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 150 )
151 151 )
152 152 # Give an AV engine time to scan the file.
153 153 time.sleep(2)
154 154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 155
156 156
157 157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 158 def debugapplystreamclonebundle(ui, repo, fname):
159 159 """apply a stream clone bundle file"""
160 160 f = hg.openpath(ui, fname)
161 161 gen = exchange.readbundle(ui, f, fname)
162 162 gen.apply(repo)
163 163
164 164
165 165 @command(
166 166 b'debugbuilddag',
167 167 [
168 168 (
169 169 b'm',
170 170 b'mergeable-file',
171 171 None,
172 172 _(b'add single file mergeable changes'),
173 173 ),
174 174 (
175 175 b'o',
176 176 b'overwritten-file',
177 177 None,
178 178 _(b'add single file all revs overwrite'),
179 179 ),
180 180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 (
182 b'',
183 b'from-existing',
184 None,
185 _(b'continue from a non-empty repository'),
186 ),
181 187 ],
182 188 _(b'[OPTION]... [TEXT]'),
183 189 )
184 190 def debugbuilddag(
185 191 ui,
186 192 repo,
187 193 text=None,
188 194 mergeable_file=False,
189 195 overwritten_file=False,
190 196 new_file=False,
197 from_existing=False,
191 198 ):
192 199 """builds a repo with a given DAG from scratch in the current empty repo
193 200
194 201 The description of the DAG is read from stdin if not given on the
195 202 command line.
196 203
197 204 Elements:
198 205
199 206 - "+n" is a linear run of n nodes based on the current default parent
200 207 - "." is a single node based on the current default parent
201 208 - "$" resets the default parent to null (implied at the start);
202 209 otherwise the default parent is always the last node created
203 210 - "<p" sets the default parent to the backref p
204 211 - "*p" is a fork at parent p, which is a backref
205 212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
206 213 - "/p2" is a merge of the preceding node and p2
207 214 - ":tag" defines a local tag for the preceding node
208 215 - "@branch" sets the named branch for subsequent nodes
209 216 - "#...\\n" is a comment up to the end of the line
210 217
211 218 Whitespace between the above elements is ignored.
212 219
213 220 A backref is either
214 221
215 222 - a number n, which references the node curr-n, where curr is the current
216 223 node, or
217 224 - the name of a local tag you placed earlier using ":tag", or
218 225 - empty to denote the default parent.
219 226
220 227 All string valued-elements are either strictly alphanumeric, or must
221 228 be enclosed in double quotes ("..."), with "\\" as escape character.
222 229 """
223 230
224 231 if text is None:
225 232 ui.status(_(b"reading DAG from stdin\n"))
226 233 text = ui.fin.read()
227 234
228 235 cl = repo.changelog
229 if len(cl) > 0:
236 if len(cl) > 0 and not from_existing:
230 237 raise error.Abort(_(b'repository is not empty'))
231 238
232 239 # determine number of revs in DAG
233 240 total = 0
234 241 for type, data in dagparser.parsedag(text):
235 242 if type == b'n':
236 243 total += 1
237 244
238 245 if mergeable_file:
239 246 linesperrev = 2
240 247 # make a file with k lines per rev
241 248 initialmergedlines = [
242 249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
243 250 ]
244 251 initialmergedlines.append(b"")
245 252
246 253 tags = []
247 254 progress = ui.makeprogress(
248 255 _(b'building'), unit=_(b'revisions'), total=total
249 256 )
250 257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
251 258 at = -1
252 259 atbranch = b'default'
253 260 nodeids = []
254 261 id = 0
255 262 progress.update(id)
256 263 for type, data in dagparser.parsedag(text):
257 264 if type == b'n':
258 265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
259 266 id, ps = data
260 267
261 268 files = []
262 269 filecontent = {}
263 270
264 271 p2 = None
265 272 if mergeable_file:
266 273 fn = b"mf"
267 274 p1 = repo[ps[0]]
268 275 if len(ps) > 1:
269 276 p2 = repo[ps[1]]
270 277 pa = p1.ancestor(p2)
271 278 base, local, other = [
272 279 x[fn].data() for x in (pa, p1, p2)
273 280 ]
274 281 m3 = simplemerge.Merge3Text(base, local, other)
275 282 ml = [
276 283 l.strip()
277 284 for l in simplemerge.render_minimized(m3)[0]
278 285 ]
279 286 ml.append(b"")
280 287 elif at > 0:
281 288 ml = p1[fn].data().split(b"\n")
282 289 else:
283 290 ml = initialmergedlines
284 291 ml[id * linesperrev] += b" r%i" % id
285 292 mergedtext = b"\n".join(ml)
286 293 files.append(fn)
287 294 filecontent[fn] = mergedtext
288 295
289 296 if overwritten_file:
290 297 fn = b"of"
291 298 files.append(fn)
292 299 filecontent[fn] = b"r%i\n" % id
293 300
294 301 if new_file:
295 302 fn = b"nf%i" % id
296 303 files.append(fn)
297 304 filecontent[fn] = b"r%i\n" % id
298 305 if len(ps) > 1:
299 306 if not p2:
300 307 p2 = repo[ps[1]]
301 308 for fn in p2:
302 309 if fn.startswith(b"nf"):
303 310 files.append(fn)
304 311 filecontent[fn] = p2[fn].data()
305 312
306 313 def fctxfn(repo, cx, path):
307 314 if path in filecontent:
308 315 return context.memfilectx(
309 316 repo, cx, path, filecontent[path]
310 317 )
311 318 return None
312 319
313 320 if len(ps) == 0 or ps[0] < 0:
314 321 pars = [None, None]
315 322 elif len(ps) == 1:
316 323 pars = [nodeids[ps[0]], None]
317 324 else:
318 325 pars = [nodeids[p] for p in ps]
319 326 cx = context.memctx(
320 327 repo,
321 328 pars,
322 329 b"r%i" % id,
323 330 files,
324 331 fctxfn,
325 332 date=(id, 0),
326 333 user=b"debugbuilddag",
327 334 extra={b'branch': atbranch},
328 335 )
329 336 nodeid = repo.commitctx(cx)
330 337 nodeids.append(nodeid)
331 338 at = id
332 339 elif type == b'l':
333 340 id, name = data
334 341 ui.note((b'tag %s\n' % name))
335 342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
336 343 elif type == b'a':
337 344 ui.note((b'branch %s\n' % data))
338 345 atbranch = data
339 346 progress.update(id)
340 347
341 348 if tags:
342 349 repo.vfs.write(b"localtags", b"".join(tags))
343 350
344 351
345 352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
346 353 indent_string = b' ' * indent
347 354 if all:
348 355 ui.writenoi18n(
349 356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
350 357 % indent_string
351 358 )
352 359
353 360 def showchunks(named):
354 361 ui.write(b"\n%s%s\n" % (indent_string, named))
355 362 for deltadata in gen.deltaiter():
356 363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
357 364 ui.write(
358 365 b"%s%s %s %s %s %s %d\n"
359 366 % (
360 367 indent_string,
361 368 hex(node),
362 369 hex(p1),
363 370 hex(p2),
364 371 hex(cs),
365 372 hex(deltabase),
366 373 len(delta),
367 374 )
368 375 )
369 376
370 377 gen.changelogheader()
371 378 showchunks(b"changelog")
372 379 gen.manifestheader()
373 380 showchunks(b"manifest")
374 381 for chunkdata in iter(gen.filelogheader, {}):
375 382 fname = chunkdata[b'filename']
376 383 showchunks(fname)
377 384 else:
378 385 if isinstance(gen, bundle2.unbundle20):
379 386 raise error.Abort(_(b'use debugbundle2 for this file'))
380 387 gen.changelogheader()
381 388 for deltadata in gen.deltaiter():
382 389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
383 390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
384 391
385 392
386 393 def _debugobsmarkers(ui, part, indent=0, **opts):
387 394 """display version and markers contained in 'data'"""
388 395 opts = pycompat.byteskwargs(opts)
389 396 data = part.read()
390 397 indent_string = b' ' * indent
391 398 try:
392 399 version, markers = obsolete._readmarkers(data)
393 400 except error.UnknownVersion as exc:
394 401 msg = b"%sunsupported version: %s (%d bytes)\n"
395 402 msg %= indent_string, exc.version, len(data)
396 403 ui.write(msg)
397 404 else:
398 405 msg = b"%sversion: %d (%d bytes)\n"
399 406 msg %= indent_string, version, len(data)
400 407 ui.write(msg)
401 408 fm = ui.formatter(b'debugobsolete', opts)
402 409 for rawmarker in sorted(markers):
403 410 m = obsutil.marker(None, rawmarker)
404 411 fm.startitem()
405 412 fm.plain(indent_string)
406 413 cmdutil.showmarker(fm, m)
407 414 fm.end()
408 415
409 416
410 417 def _debugphaseheads(ui, data, indent=0):
411 418 """display version and markers contained in 'data'"""
412 419 indent_string = b' ' * indent
413 420 headsbyphase = phases.binarydecode(data)
414 421 for phase in phases.allphases:
415 422 for head in headsbyphase[phase]:
416 423 ui.write(indent_string)
417 424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
418 425
419 426
420 427 def _quasirepr(thing):
421 428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
422 429 return b'{%s}' % (
423 430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
424 431 )
425 432 return pycompat.bytestr(repr(thing))
426 433
427 434
428 435 def _debugbundle2(ui, gen, all=None, **opts):
429 436 """lists the contents of a bundle2"""
430 437 if not isinstance(gen, bundle2.unbundle20):
431 438 raise error.Abort(_(b'not a bundle2 file'))
432 439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
433 440 parttypes = opts.get('part_type', [])
434 441 for part in gen.iterparts():
435 442 if parttypes and part.type not in parttypes:
436 443 continue
437 444 msg = b'%s -- %s (mandatory: %r)\n'
438 445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
439 446 if part.type == b'changegroup':
440 447 version = part.params.get(b'version', b'01')
441 448 cg = changegroup.getunbundler(version, part, b'UN')
442 449 if not ui.quiet:
443 450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
444 451 if part.type == b'obsmarkers':
445 452 if not ui.quiet:
446 453 _debugobsmarkers(ui, part, indent=4, **opts)
447 454 if part.type == b'phase-heads':
448 455 if not ui.quiet:
449 456 _debugphaseheads(ui, part, indent=4)
450 457
451 458
452 459 @command(
453 460 b'debugbundle',
454 461 [
455 462 (b'a', b'all', None, _(b'show all details')),
456 463 (b'', b'part-type', [], _(b'show only the named part type')),
457 464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
458 465 ],
459 466 _(b'FILE'),
460 467 norepo=True,
461 468 )
462 469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
463 470 """lists the contents of a bundle"""
464 471 with hg.openpath(ui, bundlepath) as f:
465 472 if spec:
466 473 spec = exchange.getbundlespec(ui, f)
467 474 ui.write(b'%s\n' % spec)
468 475 return
469 476
470 477 gen = exchange.readbundle(ui, f, bundlepath)
471 478 if isinstance(gen, bundle2.unbundle20):
472 479 return _debugbundle2(ui, gen, all=all, **opts)
473 480 _debugchangegroup(ui, gen, all=all, **opts)
474 481
475 482
476 483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
477 484 def debugcapabilities(ui, path, **opts):
478 485 """lists the capabilities of a remote peer"""
479 486 opts = pycompat.byteskwargs(opts)
480 487 peer = hg.peer(ui, opts, path)
481 488 try:
482 489 caps = peer.capabilities()
483 490 ui.writenoi18n(b'Main capabilities:\n')
484 491 for c in sorted(caps):
485 492 ui.write(b' %s\n' % c)
486 493 b2caps = bundle2.bundle2caps(peer)
487 494 if b2caps:
488 495 ui.writenoi18n(b'Bundle2 capabilities:\n')
489 496 for key, values in sorted(pycompat.iteritems(b2caps)):
490 497 ui.write(b' %s\n' % key)
491 498 for v in values:
492 499 ui.write(b' %s\n' % v)
493 500 finally:
494 501 peer.close()
495 502
496 503
497 504 @command(
498 505 b'debugchangedfiles',
499 506 [
500 507 (
501 508 b'',
502 509 b'compute',
503 510 False,
504 511 b"compute information instead of reading it from storage",
505 512 ),
506 513 ],
507 514 b'REV',
508 515 )
509 516 def debugchangedfiles(ui, repo, rev, **opts):
510 517 """list the stored files changes for a revision"""
511 518 ctx = logcmdutil.revsingle(repo, rev, None)
512 519 files = None
513 520
514 521 if opts['compute']:
515 522 files = metadata.compute_all_files_changes(ctx)
516 523 else:
517 524 sd = repo.changelog.sidedata(ctx.rev())
518 525 files_block = sd.get(sidedata.SD_FILES)
519 526 if files_block is not None:
520 527 files = metadata.decode_files_sidedata(sd)
521 528 if files is not None:
522 529 for f in sorted(files.touched):
523 530 if f in files.added:
524 531 action = b"added"
525 532 elif f in files.removed:
526 533 action = b"removed"
527 534 elif f in files.merged:
528 535 action = b"merged"
529 536 elif f in files.salvaged:
530 537 action = b"salvaged"
531 538 else:
532 539 action = b"touched"
533 540
534 541 copy_parent = b""
535 542 copy_source = b""
536 543 if f in files.copied_from_p1:
537 544 copy_parent = b"p1"
538 545 copy_source = files.copied_from_p1[f]
539 546 elif f in files.copied_from_p2:
540 547 copy_parent = b"p2"
541 548 copy_source = files.copied_from_p2[f]
542 549
543 550 data = (action, copy_parent, f, copy_source)
544 551 template = b"%-8s %2s: %s, %s;\n"
545 552 ui.write(template % data)
546 553
547 554
548 555 @command(b'debugcheckstate', [], b'')
549 556 def debugcheckstate(ui, repo):
550 557 """validate the correctness of the current dirstate"""
551 558 parent1, parent2 = repo.dirstate.parents()
552 559 m1 = repo[parent1].manifest()
553 560 m2 = repo[parent2].manifest()
554 561 errors = 0
555 562 for err in repo.dirstate.verify(m1, m2):
556 563 ui.warn(err[0] % err[1:])
557 564 errors += 1
558 565 if errors:
559 566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
560 567 raise error.Abort(errstr)
561 568
562 569
563 570 @command(
564 571 b'debugcolor',
565 572 [(b'', b'style', None, _(b'show all configured styles'))],
566 573 b'hg debugcolor',
567 574 )
568 575 def debugcolor(ui, repo, **opts):
569 576 """show available color, effects or style"""
570 577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
571 578 if opts.get('style'):
572 579 return _debugdisplaystyle(ui)
573 580 else:
574 581 return _debugdisplaycolor(ui)
575 582
576 583
577 584 def _debugdisplaycolor(ui):
578 585 ui = ui.copy()
579 586 ui._styles.clear()
580 587 for effect in color._activeeffects(ui).keys():
581 588 ui._styles[effect] = effect
582 589 if ui._terminfoparams:
583 590 for k, v in ui.configitems(b'color'):
584 591 if k.startswith(b'color.'):
585 592 ui._styles[k] = k[6:]
586 593 elif k.startswith(b'terminfo.'):
587 594 ui._styles[k] = k[9:]
588 595 ui.write(_(b'available colors:\n'))
589 596 # sort label with a '_' after the other to group '_background' entry.
590 597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
591 598 for colorname, label in items:
592 599 ui.write(b'%s\n' % colorname, label=label)
593 600
594 601
595 602 def _debugdisplaystyle(ui):
596 603 ui.write(_(b'available style:\n'))
597 604 if not ui._styles:
598 605 return
599 606 width = max(len(s) for s in ui._styles)
600 607 for label, effects in sorted(ui._styles.items()):
601 608 ui.write(b'%s' % label, label=label)
602 609 if effects:
603 610 # 50
604 611 ui.write(b': ')
605 612 ui.write(b' ' * (max(0, width - len(label))))
606 613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
607 614 ui.write(b'\n')
608 615
609 616
610 617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
611 618 def debugcreatestreamclonebundle(ui, repo, fname):
612 619 """create a stream clone bundle file
613 620
614 621 Stream bundles are special bundles that are essentially archives of
615 622 revlog files. They are commonly used for cloning very quickly.
616 623 """
617 624 # TODO we may want to turn this into an abort when this functionality
618 625 # is moved into `hg bundle`.
619 626 if phases.hassecret(repo):
620 627 ui.warn(
621 628 _(
622 629 b'(warning: stream clone bundle will contain secret '
623 630 b'revisions)\n'
624 631 )
625 632 )
626 633
627 634 requirements, gen = streamclone.generatebundlev1(repo)
628 635 changegroup.writechunks(ui, gen, fname)
629 636
630 637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
631 638
632 639
633 640 @command(
634 641 b'debugdag',
635 642 [
636 643 (b't', b'tags', None, _(b'use tags as labels')),
637 644 (b'b', b'branches', None, _(b'annotate with branch names')),
638 645 (b'', b'dots', None, _(b'use dots for runs')),
639 646 (b's', b'spaces', None, _(b'separate elements by spaces')),
640 647 ],
641 648 _(b'[OPTION]... [FILE [REV]...]'),
642 649 optionalrepo=True,
643 650 )
644 651 def debugdag(ui, repo, file_=None, *revs, **opts):
645 652 """format the changelog or an index DAG as a concise textual description
646 653
647 654 If you pass a revlog index, the revlog's DAG is emitted. If you list
648 655 revision numbers, they get labeled in the output as rN.
649 656
650 657 Otherwise, the changelog DAG of the current repo is emitted.
651 658 """
652 659 spaces = opts.get('spaces')
653 660 dots = opts.get('dots')
654 661 if file_:
655 662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
656 663 revs = {int(r) for r in revs}
657 664
658 665 def events():
659 666 for r in rlog:
660 667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
661 668 if r in revs:
662 669 yield b'l', (r, b"r%i" % r)
663 670
664 671 elif repo:
665 672 cl = repo.changelog
666 673 tags = opts.get('tags')
667 674 branches = opts.get('branches')
668 675 if tags:
669 676 labels = {}
670 677 for l, n in repo.tags().items():
671 678 labels.setdefault(cl.rev(n), []).append(l)
672 679
673 680 def events():
674 681 b = b"default"
675 682 for r in cl:
676 683 if branches:
677 684 newb = cl.read(cl.node(r))[5][b'branch']
678 685 if newb != b:
679 686 yield b'a', newb
680 687 b = newb
681 688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
682 689 if tags:
683 690 ls = labels.get(r)
684 691 if ls:
685 692 for l in ls:
686 693 yield b'l', (r, l)
687 694
688 695 else:
689 696 raise error.Abort(_(b'need repo for changelog dag'))
690 697
691 698 for line in dagparser.dagtextlines(
692 699 events(),
693 700 addspaces=spaces,
694 701 wraplabels=True,
695 702 wrapannotations=True,
696 703 wrapnonlinear=dots,
697 704 usedots=dots,
698 705 maxlinewidth=70,
699 706 ):
700 707 ui.write(line)
701 708 ui.write(b"\n")
702 709
703 710
704 711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
705 712 def debugdata(ui, repo, file_, rev=None, **opts):
706 713 """dump the contents of a data file revision"""
707 714 opts = pycompat.byteskwargs(opts)
708 715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
709 716 if rev is not None:
710 717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
711 718 file_, rev = None, file_
712 719 elif rev is None:
713 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
714 721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
715 722 try:
716 723 ui.write(r.rawdata(r.lookup(rev)))
717 724 except KeyError:
718 725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
719 726
720 727
721 728 @command(
722 729 b'debugdate',
723 730 [(b'e', b'extended', None, _(b'try extended date formats'))],
724 731 _(b'[-e] DATE [RANGE]'),
725 732 norepo=True,
726 733 optionalrepo=True,
727 734 )
728 735 def debugdate(ui, date, range=None, **opts):
729 736 """parse and display a date"""
730 737 if opts["extended"]:
731 738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
732 739 else:
733 740 d = dateutil.parsedate(date)
734 741 ui.writenoi18n(b"internal: %d %d\n" % d)
735 742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
736 743 if range:
737 744 m = dateutil.matchdate(range)
738 745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
739 746
740 747
741 748 @command(
742 749 b'debugdeltachain',
743 750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
744 751 _(b'-c|-m|FILE'),
745 752 optionalrepo=True,
746 753 )
747 754 def debugdeltachain(ui, repo, file_=None, **opts):
748 755 """dump information about delta chains in a revlog
749 756
750 757 Output can be templatized. Available template keywords are:
751 758
752 759 :``rev``: revision number
753 760 :``chainid``: delta chain identifier (numbered by unique base)
754 761 :``chainlen``: delta chain length to this revision
755 762 :``prevrev``: previous revision in delta chain
756 763 :``deltatype``: role of delta / how it was computed
757 764 :``compsize``: compressed size of revision
758 765 :``uncompsize``: uncompressed size of revision
759 766 :``chainsize``: total size of compressed revisions in chain
760 767 :``chainratio``: total chain size divided by uncompressed revision size
761 768 (new delta chains typically start at ratio 2.00)
762 769 :``lindist``: linear distance from base revision in delta chain to end
763 770 of this revision
764 771 :``extradist``: total size of revisions not part of this delta chain from
765 772 base of delta chain to end of this revision; a measurement
766 773 of how much extra data we need to read/seek across to read
767 774 the delta chain for this revision
768 775 :``extraratio``: extradist divided by chainsize; another representation of
769 776 how much unrelated data is needed to load this delta chain
770 777
771 778 If the repository is configured to use the sparse read, additional keywords
772 779 are available:
773 780
774 781 :``readsize``: total size of data read from the disk for a revision
775 782 (sum of the sizes of all the blocks)
776 783 :``largestblock``: size of the largest block of data read from the disk
777 784 :``readdensity``: density of useful bytes in the data read from the disk
778 785 :``srchunks``: in how many data hunks the whole revision would be read
779 786
780 787 The sparse read can be enabled with experimental.sparse-read = True
781 788 """
782 789 opts = pycompat.byteskwargs(opts)
783 790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
784 791 index = r.index
785 792 start = r.start
786 793 length = r.length
787 794 generaldelta = r._generaldelta
788 795 withsparseread = getattr(r, '_withsparseread', False)
789 796
790 797 def revinfo(rev):
791 798 e = index[rev]
792 799 compsize = e[1]
793 800 uncompsize = e[2]
794 801 chainsize = 0
795 802
796 803 if generaldelta:
797 804 if e[3] == e[5]:
798 805 deltatype = b'p1'
799 806 elif e[3] == e[6]:
800 807 deltatype = b'p2'
801 808 elif e[3] == rev - 1:
802 809 deltatype = b'prev'
803 810 elif e[3] == rev:
804 811 deltatype = b'base'
805 812 else:
806 813 deltatype = b'other'
807 814 else:
808 815 if e[3] == rev:
809 816 deltatype = b'base'
810 817 else:
811 818 deltatype = b'prev'
812 819
813 820 chain = r._deltachain(rev)[0]
814 821 for iterrev in chain:
815 822 e = index[iterrev]
816 823 chainsize += e[1]
817 824
818 825 return compsize, uncompsize, deltatype, chain, chainsize
819 826
820 827 fm = ui.formatter(b'debugdeltachain', opts)
821 828
822 829 fm.plain(
823 830 b' rev chain# chainlen prev delta '
824 831 b'size rawsize chainsize ratio lindist extradist '
825 832 b'extraratio'
826 833 )
827 834 if withsparseread:
828 835 fm.plain(b' readsize largestblk rddensity srchunks')
829 836 fm.plain(b'\n')
830 837
831 838 chainbases = {}
832 839 for rev in r:
833 840 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
834 841 chainbase = chain[0]
835 842 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
836 843 basestart = start(chainbase)
837 844 revstart = start(rev)
838 845 lineardist = revstart + comp - basestart
839 846 extradist = lineardist - chainsize
840 847 try:
841 848 prevrev = chain[-2]
842 849 except IndexError:
843 850 prevrev = -1
844 851
845 852 if uncomp != 0:
846 853 chainratio = float(chainsize) / float(uncomp)
847 854 else:
848 855 chainratio = chainsize
849 856
850 857 if chainsize != 0:
851 858 extraratio = float(extradist) / float(chainsize)
852 859 else:
853 860 extraratio = extradist
854 861
855 862 fm.startitem()
856 863 fm.write(
857 864 b'rev chainid chainlen prevrev deltatype compsize '
858 865 b'uncompsize chainsize chainratio lindist extradist '
859 866 b'extraratio',
860 867 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
861 868 rev,
862 869 chainid,
863 870 len(chain),
864 871 prevrev,
865 872 deltatype,
866 873 comp,
867 874 uncomp,
868 875 chainsize,
869 876 chainratio,
870 877 lineardist,
871 878 extradist,
872 879 extraratio,
873 880 rev=rev,
874 881 chainid=chainid,
875 882 chainlen=len(chain),
876 883 prevrev=prevrev,
877 884 deltatype=deltatype,
878 885 compsize=comp,
879 886 uncompsize=uncomp,
880 887 chainsize=chainsize,
881 888 chainratio=chainratio,
882 889 lindist=lineardist,
883 890 extradist=extradist,
884 891 extraratio=extraratio,
885 892 )
886 893 if withsparseread:
887 894 readsize = 0
888 895 largestblock = 0
889 896 srchunks = 0
890 897
891 898 for revschunk in deltautil.slicechunk(r, chain):
892 899 srchunks += 1
893 900 blkend = start(revschunk[-1]) + length(revschunk[-1])
894 901 blksize = blkend - start(revschunk[0])
895 902
896 903 readsize += blksize
897 904 if largestblock < blksize:
898 905 largestblock = blksize
899 906
900 907 if readsize:
901 908 readdensity = float(chainsize) / float(readsize)
902 909 else:
903 910 readdensity = 1
904 911
905 912 fm.write(
906 913 b'readsize largestblock readdensity srchunks',
907 914 b' %10d %10d %9.5f %8d',
908 915 readsize,
909 916 largestblock,
910 917 readdensity,
911 918 srchunks,
912 919 readsize=readsize,
913 920 largestblock=largestblock,
914 921 readdensity=readdensity,
915 922 srchunks=srchunks,
916 923 )
917 924
918 925 fm.plain(b'\n')
919 926
920 927 fm.end()
921 928
922 929
923 930 @command(
924 931 b'debugdirstate|debugstate',
925 932 [
926 933 (
927 934 b'',
928 935 b'nodates',
929 936 None,
930 937 _(b'do not display the saved mtime (DEPRECATED)'),
931 938 ),
932 939 (b'', b'dates', True, _(b'display the saved mtime')),
933 940 (b'', b'datesort', None, _(b'sort by saved mtime')),
934 941 (
935 942 b'',
936 943 b'all',
937 944 False,
938 945 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
939 946 ),
940 947 ],
941 948 _(b'[OPTION]...'),
942 949 )
943 950 def debugstate(ui, repo, **opts):
944 951 """show the contents of the current dirstate"""
945 952
946 953 nodates = not opts['dates']
947 954 if opts.get('nodates') is not None:
948 955 nodates = True
949 956 datesort = opts.get('datesort')
950 957
951 958 if datesort:
952 959
953 960 def keyfunc(entry):
954 961 filename, _state, _mode, _size, mtime = entry
955 962 return (mtime, filename)
956 963
957 964 else:
958 965 keyfunc = None # sort by filename
959 966 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
960 967 entries.sort(key=keyfunc)
961 968 for entry in entries:
962 969 filename, state, mode, size, mtime = entry
963 970 if mtime == -1:
964 971 timestr = b'unset '
965 972 elif nodates:
966 973 timestr = b'set '
967 974 else:
968 975 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
969 976 timestr = encoding.strtolocal(timestr)
970 977 if mode & 0o20000:
971 978 mode = b'lnk'
972 979 else:
973 980 mode = b'%3o' % (mode & 0o777 & ~util.umask)
974 981 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
975 982 for f in repo.dirstate.copies():
976 983 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
977 984
978 985
979 986 @command(
980 987 b'debugdirstateignorepatternshash',
981 988 [],
982 989 _(b''),
983 990 )
984 991 def debugdirstateignorepatternshash(ui, repo, **opts):
985 992 """show the hash of ignore patterns stored in dirstate if v2,
986 993 or nothing for dirstate-v2
987 994 """
988 995 if repo.dirstate._use_dirstate_v2:
989 996 docket = repo.dirstate._map.docket
990 997 hash_len = 20 # 160 bits for SHA-1
991 998 hash_bytes = docket.tree_metadata[-hash_len:]
992 999 ui.write(binascii.hexlify(hash_bytes) + b'\n')
993 1000
994 1001
995 1002 @command(
996 1003 b'debugdiscovery',
997 1004 [
998 1005 (b'', b'old', None, _(b'use old-style discovery')),
999 1006 (
1000 1007 b'',
1001 1008 b'nonheads',
1002 1009 None,
1003 1010 _(b'use old-style discovery with non-heads included'),
1004 1011 ),
1005 1012 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1006 1013 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1007 1014 (
1008 1015 b'',
1009 1016 b'local-as-revs',
1010 1017 b"",
1011 1018 b'treat local has having these revisions only',
1012 1019 ),
1013 1020 (
1014 1021 b'',
1015 1022 b'remote-as-revs',
1016 1023 b"",
1017 1024 b'use local as remote, with only these these revisions',
1018 1025 ),
1019 1026 ]
1020 1027 + cmdutil.remoteopts
1021 1028 + cmdutil.formatteropts,
1022 1029 _(b'[--rev REV] [OTHER]'),
1023 1030 )
1024 1031 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1025 1032 """runs the changeset discovery protocol in isolation
1026 1033
1027 1034 The local peer can be "replaced" by a subset of the local repository by
1028 1035 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1029 1036 be "replaced" by a subset of the local repository using the
1030 1037 `--local-as-revs` flag. This is useful to efficiently debug pathological
1031 1038 discovery situation.
1032 1039
1033 1040 The following developer oriented config are relevant for people playing with this command:
1034 1041
1035 1042 * devel.discovery.exchange-heads=True
1036 1043
1037 1044 If False, the discovery will not start with
1038 1045 remote head fetching and local head querying.
1039 1046
1040 1047 * devel.discovery.grow-sample=True
1041 1048
1042 1049 If False, the sample size used in set discovery will not be increased
1043 1050 through the process
1044 1051
1045 1052 * devel.discovery.grow-sample.dynamic=True
1046 1053
1047 1054 When discovery.grow-sample.dynamic is True, the default, the sample size is
1048 1055 adapted to the shape of the undecided set (it is set to the max of:
1049 1056 <target-size>, len(roots(undecided)), len(heads(undecided)
1050 1057
1051 1058 * devel.discovery.grow-sample.rate=1.05
1052 1059
1053 1060 the rate at which the sample grow
1054 1061
1055 1062 * devel.discovery.randomize=True
1056 1063
1057 1064 If andom sampling during discovery are deterministic. It is meant for
1058 1065 integration tests.
1059 1066
1060 1067 * devel.discovery.sample-size=200
1061 1068
1062 1069 Control the initial size of the discovery sample
1063 1070
1064 1071 * devel.discovery.sample-size.initial=100
1065 1072
1066 1073 Control the initial size of the discovery for initial change
1067 1074 """
1068 1075 opts = pycompat.byteskwargs(opts)
1069 1076 unfi = repo.unfiltered()
1070 1077
1071 1078 # setup potential extra filtering
1072 1079 local_revs = opts[b"local_as_revs"]
1073 1080 remote_revs = opts[b"remote_as_revs"]
1074 1081
1075 1082 # make sure tests are repeatable
1076 1083 random.seed(int(opts[b'seed']))
1077 1084
1078 1085 if not remote_revs:
1079 1086
1080 1087 remoteurl, branches = urlutil.get_unique_pull_path(
1081 1088 b'debugdiscovery', repo, ui, remoteurl
1082 1089 )
1083 1090 remote = hg.peer(repo, opts, remoteurl)
1084 1091 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1085 1092 else:
1086 1093 branches = (None, [])
1087 1094 remote_filtered_revs = logcmdutil.revrange(
1088 1095 unfi, [b"not (::(%s))" % remote_revs]
1089 1096 )
1090 1097 remote_filtered_revs = frozenset(remote_filtered_revs)
1091 1098
1092 1099 def remote_func(x):
1093 1100 return remote_filtered_revs
1094 1101
1095 1102 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1096 1103
1097 1104 remote = repo.peer()
1098 1105 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1099 1106
1100 1107 if local_revs:
1101 1108 local_filtered_revs = logcmdutil.revrange(
1102 1109 unfi, [b"not (::(%s))" % local_revs]
1103 1110 )
1104 1111 local_filtered_revs = frozenset(local_filtered_revs)
1105 1112
1106 1113 def local_func(x):
1107 1114 return local_filtered_revs
1108 1115
1109 1116 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1110 1117 repo = repo.filtered(b'debug-discovery-local-filter')
1111 1118
1112 1119 data = {}
1113 1120 if opts.get(b'old'):
1114 1121
1115 1122 def doit(pushedrevs, remoteheads, remote=remote):
1116 1123 if not util.safehasattr(remote, b'branches'):
1117 1124 # enable in-client legacy support
1118 1125 remote = localrepo.locallegacypeer(remote.local())
1119 1126 common, _in, hds = treediscovery.findcommonincoming(
1120 1127 repo, remote, force=True, audit=data
1121 1128 )
1122 1129 common = set(common)
1123 1130 if not opts.get(b'nonheads'):
1124 1131 ui.writenoi18n(
1125 1132 b"unpruned common: %s\n"
1126 1133 % b" ".join(sorted(short(n) for n in common))
1127 1134 )
1128 1135
1129 1136 clnode = repo.changelog.node
1130 1137 common = repo.revs(b'heads(::%ln)', common)
1131 1138 common = {clnode(r) for r in common}
1132 1139 return common, hds
1133 1140
1134 1141 else:
1135 1142
1136 1143 def doit(pushedrevs, remoteheads, remote=remote):
1137 1144 nodes = None
1138 1145 if pushedrevs:
1139 1146 revs = logcmdutil.revrange(repo, pushedrevs)
1140 1147 nodes = [repo[r].node() for r in revs]
1141 1148 common, any, hds = setdiscovery.findcommonheads(
1142 1149 ui, repo, remote, ancestorsof=nodes, audit=data
1143 1150 )
1144 1151 return common, hds
1145 1152
1146 1153 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1147 1154 localrevs = opts[b'rev']
1148 1155
1149 1156 fm = ui.formatter(b'debugdiscovery', opts)
1150 1157 if fm.strict_format:
1151 1158
1152 1159 @contextlib.contextmanager
1153 1160 def may_capture_output():
1154 1161 ui.pushbuffer()
1155 1162 yield
1156 1163 data[b'output'] = ui.popbuffer()
1157 1164
1158 1165 else:
1159 1166 may_capture_output = util.nullcontextmanager
1160 1167 with may_capture_output():
1161 1168 with util.timedcm('debug-discovery') as t:
1162 1169 common, hds = doit(localrevs, remoterevs)
1163 1170
1164 1171 # compute all statistics
1165 1172 heads_common = set(common)
1166 1173 heads_remote = set(hds)
1167 1174 heads_local = set(repo.heads())
1168 1175 # note: they cannot be a local or remote head that is in common and not
1169 1176 # itself a head of common.
1170 1177 heads_common_local = heads_common & heads_local
1171 1178 heads_common_remote = heads_common & heads_remote
1172 1179 heads_common_both = heads_common & heads_remote & heads_local
1173 1180
1174 1181 all = repo.revs(b'all()')
1175 1182 common = repo.revs(b'::%ln', common)
1176 1183 roots_common = repo.revs(b'roots(::%ld)', common)
1177 1184 missing = repo.revs(b'not ::%ld', common)
1178 1185 heads_missing = repo.revs(b'heads(%ld)', missing)
1179 1186 roots_missing = repo.revs(b'roots(%ld)', missing)
1180 1187 assert len(common) + len(missing) == len(all)
1181 1188
1182 1189 initial_undecided = repo.revs(
1183 1190 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1184 1191 )
1185 1192 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1186 1193 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1187 1194 common_initial_undecided = initial_undecided & common
1188 1195 missing_initial_undecided = initial_undecided & missing
1189 1196
1190 1197 data[b'elapsed'] = t.elapsed
1191 1198 data[b'nb-common-heads'] = len(heads_common)
1192 1199 data[b'nb-common-heads-local'] = len(heads_common_local)
1193 1200 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1194 1201 data[b'nb-common-heads-both'] = len(heads_common_both)
1195 1202 data[b'nb-common-roots'] = len(roots_common)
1196 1203 data[b'nb-head-local'] = len(heads_local)
1197 1204 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1198 1205 data[b'nb-head-remote'] = len(heads_remote)
1199 1206 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1200 1207 heads_common_remote
1201 1208 )
1202 1209 data[b'nb-revs'] = len(all)
1203 1210 data[b'nb-revs-common'] = len(common)
1204 1211 data[b'nb-revs-missing'] = len(missing)
1205 1212 data[b'nb-missing-heads'] = len(heads_missing)
1206 1213 data[b'nb-missing-roots'] = len(roots_missing)
1207 1214 data[b'nb-ini_und'] = len(initial_undecided)
1208 1215 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1209 1216 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1210 1217 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1211 1218 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1212 1219
1213 1220 fm.startitem()
1214 1221 fm.data(**pycompat.strkwargs(data))
1215 1222 # display discovery summary
1216 1223 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1217 1224 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1218 1225 fm.plain(b"heads summary:\n")
1219 1226 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1220 1227 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1221 1228 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1222 1229 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1223 1230 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1224 1231 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1225 1232 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1226 1233 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1227 1234 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1228 1235 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1229 1236 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1230 1237 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1231 1238 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1232 1239 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1233 1240 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1234 1241 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1235 1242 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1236 1243 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1237 1244 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1238 1245 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1239 1246 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1240 1247 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1241 1248
1242 1249 if ui.verbose:
1243 1250 fm.plain(
1244 1251 b"common heads: %s\n"
1245 1252 % b" ".join(sorted(short(n) for n in heads_common))
1246 1253 )
1247 1254 fm.end()
1248 1255
1249 1256
1250 1257 _chunksize = 4 << 10
1251 1258
1252 1259
1253 1260 @command(
1254 1261 b'debugdownload',
1255 1262 [
1256 1263 (b'o', b'output', b'', _(b'path')),
1257 1264 ],
1258 1265 optionalrepo=True,
1259 1266 )
1260 1267 def debugdownload(ui, repo, url, output=None, **opts):
1261 1268 """download a resource using Mercurial logic and config"""
1262 1269 fh = urlmod.open(ui, url, output)
1263 1270
1264 1271 dest = ui
1265 1272 if output:
1266 1273 dest = open(output, b"wb", _chunksize)
1267 1274 try:
1268 1275 data = fh.read(_chunksize)
1269 1276 while data:
1270 1277 dest.write(data)
1271 1278 data = fh.read(_chunksize)
1272 1279 finally:
1273 1280 if output:
1274 1281 dest.close()
1275 1282
1276 1283
1277 1284 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1278 1285 def debugextensions(ui, repo, **opts):
1279 1286 '''show information about active extensions'''
1280 1287 opts = pycompat.byteskwargs(opts)
1281 1288 exts = extensions.extensions(ui)
1282 1289 hgver = util.version()
1283 1290 fm = ui.formatter(b'debugextensions', opts)
1284 1291 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1285 1292 isinternal = extensions.ismoduleinternal(extmod)
1286 1293 extsource = None
1287 1294
1288 1295 if util.safehasattr(extmod, '__file__'):
1289 1296 extsource = pycompat.fsencode(extmod.__file__)
1290 1297 elif getattr(sys, 'oxidized', False):
1291 1298 extsource = pycompat.sysexecutable
1292 1299 if isinternal:
1293 1300 exttestedwith = [] # never expose magic string to users
1294 1301 else:
1295 1302 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1296 1303 extbuglink = getattr(extmod, 'buglink', None)
1297 1304
1298 1305 fm.startitem()
1299 1306
1300 1307 if ui.quiet or ui.verbose:
1301 1308 fm.write(b'name', b'%s\n', extname)
1302 1309 else:
1303 1310 fm.write(b'name', b'%s', extname)
1304 1311 if isinternal or hgver in exttestedwith:
1305 1312 fm.plain(b'\n')
1306 1313 elif not exttestedwith:
1307 1314 fm.plain(_(b' (untested!)\n'))
1308 1315 else:
1309 1316 lasttestedversion = exttestedwith[-1]
1310 1317 fm.plain(b' (%s!)\n' % lasttestedversion)
1311 1318
1312 1319 fm.condwrite(
1313 1320 ui.verbose and extsource,
1314 1321 b'source',
1315 1322 _(b' location: %s\n'),
1316 1323 extsource or b"",
1317 1324 )
1318 1325
1319 1326 if ui.verbose:
1320 1327 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1321 1328 fm.data(bundled=isinternal)
1322 1329
1323 1330 fm.condwrite(
1324 1331 ui.verbose and exttestedwith,
1325 1332 b'testedwith',
1326 1333 _(b' tested with: %s\n'),
1327 1334 fm.formatlist(exttestedwith, name=b'ver'),
1328 1335 )
1329 1336
1330 1337 fm.condwrite(
1331 1338 ui.verbose and extbuglink,
1332 1339 b'buglink',
1333 1340 _(b' bug reporting: %s\n'),
1334 1341 extbuglink or b"",
1335 1342 )
1336 1343
1337 1344 fm.end()
1338 1345
1339 1346
1340 1347 @command(
1341 1348 b'debugfileset',
1342 1349 [
1343 1350 (
1344 1351 b'r',
1345 1352 b'rev',
1346 1353 b'',
1347 1354 _(b'apply the filespec on this revision'),
1348 1355 _(b'REV'),
1349 1356 ),
1350 1357 (
1351 1358 b'',
1352 1359 b'all-files',
1353 1360 False,
1354 1361 _(b'test files from all revisions and working directory'),
1355 1362 ),
1356 1363 (
1357 1364 b's',
1358 1365 b'show-matcher',
1359 1366 None,
1360 1367 _(b'print internal representation of matcher'),
1361 1368 ),
1362 1369 (
1363 1370 b'p',
1364 1371 b'show-stage',
1365 1372 [],
1366 1373 _(b'print parsed tree at the given stage'),
1367 1374 _(b'NAME'),
1368 1375 ),
1369 1376 ],
1370 1377 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1371 1378 )
1372 1379 def debugfileset(ui, repo, expr, **opts):
1373 1380 '''parse and apply a fileset specification'''
1374 1381 from . import fileset
1375 1382
1376 1383 fileset.symbols # force import of fileset so we have predicates to optimize
1377 1384 opts = pycompat.byteskwargs(opts)
1378 1385 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1379 1386
1380 1387 stages = [
1381 1388 (b'parsed', pycompat.identity),
1382 1389 (b'analyzed', filesetlang.analyze),
1383 1390 (b'optimized', filesetlang.optimize),
1384 1391 ]
1385 1392 stagenames = {n for n, f in stages}
1386 1393
1387 1394 showalways = set()
1388 1395 if ui.verbose and not opts[b'show_stage']:
1389 1396 # show parsed tree by --verbose (deprecated)
1390 1397 showalways.add(b'parsed')
1391 1398 if opts[b'show_stage'] == [b'all']:
1392 1399 showalways.update(stagenames)
1393 1400 else:
1394 1401 for n in opts[b'show_stage']:
1395 1402 if n not in stagenames:
1396 1403 raise error.Abort(_(b'invalid stage name: %s') % n)
1397 1404 showalways.update(opts[b'show_stage'])
1398 1405
1399 1406 tree = filesetlang.parse(expr)
1400 1407 for n, f in stages:
1401 1408 tree = f(tree)
1402 1409 if n in showalways:
1403 1410 if opts[b'show_stage'] or n != b'parsed':
1404 1411 ui.write(b"* %s:\n" % n)
1405 1412 ui.write(filesetlang.prettyformat(tree), b"\n")
1406 1413
1407 1414 files = set()
1408 1415 if opts[b'all_files']:
1409 1416 for r in repo:
1410 1417 c = repo[r]
1411 1418 files.update(c.files())
1412 1419 files.update(c.substate)
1413 1420 if opts[b'all_files'] or ctx.rev() is None:
1414 1421 wctx = repo[None]
1415 1422 files.update(
1416 1423 repo.dirstate.walk(
1417 1424 scmutil.matchall(repo),
1418 1425 subrepos=list(wctx.substate),
1419 1426 unknown=True,
1420 1427 ignored=True,
1421 1428 )
1422 1429 )
1423 1430 files.update(wctx.substate)
1424 1431 else:
1425 1432 files.update(ctx.files())
1426 1433 files.update(ctx.substate)
1427 1434
1428 1435 m = ctx.matchfileset(repo.getcwd(), expr)
1429 1436 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1430 1437 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1431 1438 for f in sorted(files):
1432 1439 if not m(f):
1433 1440 continue
1434 1441 ui.write(b"%s\n" % f)
1435 1442
1436 1443
1437 1444 @command(
1438 1445 b"debug-repair-issue6528",
1439 1446 [
1440 1447 (
1441 1448 b'',
1442 1449 b'to-report',
1443 1450 b'',
1444 1451 _(b'build a report of affected revisions to this file'),
1445 1452 _(b'FILE'),
1446 1453 ),
1447 1454 (
1448 1455 b'',
1449 1456 b'from-report',
1450 1457 b'',
1451 1458 _(b'repair revisions listed in this report file'),
1452 1459 _(b'FILE'),
1453 1460 ),
1454 1461 (
1455 1462 b'',
1456 1463 b'paranoid',
1457 1464 False,
1458 1465 _(b'check that both detection methods do the same thing'),
1459 1466 ),
1460 1467 ]
1461 1468 + cmdutil.dryrunopts,
1462 1469 )
1463 1470 def debug_repair_issue6528(ui, repo, **opts):
1464 1471 """find affected revisions and repair them. See issue6528 for more details.
1465 1472
1466 1473 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1467 1474 computation of affected revisions for a given repository across clones.
1468 1475 The report format is line-based (with empty lines ignored):
1469 1476
1470 1477 ```
1471 1478 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1472 1479 ```
1473 1480
1474 1481 There can be multiple broken revisions per filelog, they are separated by
1475 1482 a comma with no spaces. The only space is between the revision(s) and the
1476 1483 filename.
1477 1484
1478 1485 Note that this does *not* mean that this repairs future affected revisions,
1479 1486 that needs a separate fix at the exchange level that was introduced in
1480 1487 Mercurial 5.9.1.
1481 1488
1482 1489 There is a `--paranoid` flag to test that the fast implementation is correct
1483 1490 by checking it against the slow implementation. Since this matter is quite
1484 1491 urgent and testing every edge-case is probably quite costly, we use this
1485 1492 method to test on large repositories as a fuzzing method of sorts.
1486 1493 """
1487 1494 cmdutil.check_incompatible_arguments(
1488 1495 opts, 'to_report', ['from_report', 'dry_run']
1489 1496 )
1490 1497 dry_run = opts.get('dry_run')
1491 1498 to_report = opts.get('to_report')
1492 1499 from_report = opts.get('from_report')
1493 1500 paranoid = opts.get('paranoid')
1494 1501 # TODO maybe add filelog pattern and revision pattern parameters to help
1495 1502 # narrow down the search for users that know what they're looking for?
1496 1503
1497 1504 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1498 1505 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1499 1506 raise error.Abort(_(msg))
1500 1507
1501 1508 rewrite.repair_issue6528(
1502 1509 ui,
1503 1510 repo,
1504 1511 dry_run=dry_run,
1505 1512 to_report=to_report,
1506 1513 from_report=from_report,
1507 1514 paranoid=paranoid,
1508 1515 )
1509 1516
1510 1517
1511 1518 @command(b'debugformat', [] + cmdutil.formatteropts)
1512 1519 def debugformat(ui, repo, **opts):
1513 1520 """display format information about the current repository
1514 1521
1515 1522 Use --verbose to get extra information about current config value and
1516 1523 Mercurial default."""
1517 1524 opts = pycompat.byteskwargs(opts)
1518 1525 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1519 1526 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1520 1527
1521 1528 def makeformatname(name):
1522 1529 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1523 1530
1524 1531 fm = ui.formatter(b'debugformat', opts)
1525 1532 if fm.isplain():
1526 1533
1527 1534 def formatvalue(value):
1528 1535 if util.safehasattr(value, b'startswith'):
1529 1536 return value
1530 1537 if value:
1531 1538 return b'yes'
1532 1539 else:
1533 1540 return b'no'
1534 1541
1535 1542 else:
1536 1543 formatvalue = pycompat.identity
1537 1544
1538 1545 fm.plain(b'format-variant')
1539 1546 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1540 1547 fm.plain(b' repo')
1541 1548 if ui.verbose:
1542 1549 fm.plain(b' config default')
1543 1550 fm.plain(b'\n')
1544 1551 for fv in upgrade.allformatvariant:
1545 1552 fm.startitem()
1546 1553 repovalue = fv.fromrepo(repo)
1547 1554 configvalue = fv.fromconfig(repo)
1548 1555
1549 1556 if repovalue != configvalue:
1550 1557 namelabel = b'formatvariant.name.mismatchconfig'
1551 1558 repolabel = b'formatvariant.repo.mismatchconfig'
1552 1559 elif repovalue != fv.default:
1553 1560 namelabel = b'formatvariant.name.mismatchdefault'
1554 1561 repolabel = b'formatvariant.repo.mismatchdefault'
1555 1562 else:
1556 1563 namelabel = b'formatvariant.name.uptodate'
1557 1564 repolabel = b'formatvariant.repo.uptodate'
1558 1565
1559 1566 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1560 1567 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1561 1568 if fv.default != configvalue:
1562 1569 configlabel = b'formatvariant.config.special'
1563 1570 else:
1564 1571 configlabel = b'formatvariant.config.default'
1565 1572 fm.condwrite(
1566 1573 ui.verbose,
1567 1574 b'config',
1568 1575 b' %6s',
1569 1576 formatvalue(configvalue),
1570 1577 label=configlabel,
1571 1578 )
1572 1579 fm.condwrite(
1573 1580 ui.verbose,
1574 1581 b'default',
1575 1582 b' %7s',
1576 1583 formatvalue(fv.default),
1577 1584 label=b'formatvariant.default',
1578 1585 )
1579 1586 fm.plain(b'\n')
1580 1587 fm.end()
1581 1588
1582 1589
1583 1590 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1584 1591 def debugfsinfo(ui, path=b"."):
1585 1592 """show information detected about current filesystem"""
1586 1593 ui.writenoi18n(b'path: %s\n' % path)
1587 1594 ui.writenoi18n(
1588 1595 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1589 1596 )
1590 1597 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1591 1598 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1592 1599 ui.writenoi18n(
1593 1600 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1594 1601 )
1595 1602 ui.writenoi18n(
1596 1603 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1597 1604 )
1598 1605 casesensitive = b'(unknown)'
1599 1606 try:
1600 1607 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1601 1608 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1602 1609 except OSError:
1603 1610 pass
1604 1611 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1605 1612
1606 1613
1607 1614 @command(
1608 1615 b'debuggetbundle',
1609 1616 [
1610 1617 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1611 1618 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1612 1619 (
1613 1620 b't',
1614 1621 b'type',
1615 1622 b'bzip2',
1616 1623 _(b'bundle compression type to use'),
1617 1624 _(b'TYPE'),
1618 1625 ),
1619 1626 ],
1620 1627 _(b'REPO FILE [-H|-C ID]...'),
1621 1628 norepo=True,
1622 1629 )
1623 1630 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1624 1631 """retrieves a bundle from a repo
1625 1632
1626 1633 Every ID must be a full-length hex node id string. Saves the bundle to the
1627 1634 given file.
1628 1635 """
1629 1636 opts = pycompat.byteskwargs(opts)
1630 1637 repo = hg.peer(ui, opts, repopath)
1631 1638 if not repo.capable(b'getbundle'):
1632 1639 raise error.Abort(b"getbundle() not supported by target repository")
1633 1640 args = {}
1634 1641 if common:
1635 1642 args['common'] = [bin(s) for s in common]
1636 1643 if head:
1637 1644 args['heads'] = [bin(s) for s in head]
1638 1645 # TODO: get desired bundlecaps from command line.
1639 1646 args['bundlecaps'] = None
1640 1647 bundle = repo.getbundle(b'debug', **args)
1641 1648
1642 1649 bundletype = opts.get(b'type', b'bzip2').lower()
1643 1650 btypes = {
1644 1651 b'none': b'HG10UN',
1645 1652 b'bzip2': b'HG10BZ',
1646 1653 b'gzip': b'HG10GZ',
1647 1654 b'bundle2': b'HG20',
1648 1655 }
1649 1656 bundletype = btypes.get(bundletype)
1650 1657 if bundletype not in bundle2.bundletypes:
1651 1658 raise error.Abort(_(b'unknown bundle type specified with --type'))
1652 1659 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1653 1660
1654 1661
1655 1662 @command(b'debugignore', [], b'[FILE]')
1656 1663 def debugignore(ui, repo, *files, **opts):
1657 1664 """display the combined ignore pattern and information about ignored files
1658 1665
1659 1666 With no argument display the combined ignore pattern.
1660 1667
1661 1668 Given space separated file names, shows if the given file is ignored and
1662 1669 if so, show the ignore rule (file and line number) that matched it.
1663 1670 """
1664 1671 ignore = repo.dirstate._ignore
1665 1672 if not files:
1666 1673 # Show all the patterns
1667 1674 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1668 1675 else:
1669 1676 m = scmutil.match(repo[None], pats=files)
1670 1677 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1671 1678 for f in m.files():
1672 1679 nf = util.normpath(f)
1673 1680 ignored = None
1674 1681 ignoredata = None
1675 1682 if nf != b'.':
1676 1683 if ignore(nf):
1677 1684 ignored = nf
1678 1685 ignoredata = repo.dirstate._ignorefileandline(nf)
1679 1686 else:
1680 1687 for p in pathutil.finddirs(nf):
1681 1688 if ignore(p):
1682 1689 ignored = p
1683 1690 ignoredata = repo.dirstate._ignorefileandline(p)
1684 1691 break
1685 1692 if ignored:
1686 1693 if ignored == nf:
1687 1694 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1688 1695 else:
1689 1696 ui.write(
1690 1697 _(
1691 1698 b"%s is ignored because of "
1692 1699 b"containing directory %s\n"
1693 1700 )
1694 1701 % (uipathfn(f), ignored)
1695 1702 )
1696 1703 ignorefile, lineno, line = ignoredata
1697 1704 ui.write(
1698 1705 _(b"(ignore rule in %s, line %d: '%s')\n")
1699 1706 % (ignorefile, lineno, line)
1700 1707 )
1701 1708 else:
1702 1709 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1703 1710
1704 1711
1705 1712 @command(
1706 1713 b'debugindex',
1707 1714 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1708 1715 _(b'-c|-m|FILE'),
1709 1716 )
1710 1717 def debugindex(ui, repo, file_=None, **opts):
1711 1718 """dump index data for a storage primitive"""
1712 1719 opts = pycompat.byteskwargs(opts)
1713 1720 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1714 1721
1715 1722 if ui.debugflag:
1716 1723 shortfn = hex
1717 1724 else:
1718 1725 shortfn = short
1719 1726
1720 1727 idlen = 12
1721 1728 for i in store:
1722 1729 idlen = len(shortfn(store.node(i)))
1723 1730 break
1724 1731
1725 1732 fm = ui.formatter(b'debugindex', opts)
1726 1733 fm.plain(
1727 1734 b' rev linkrev %s %s p2\n'
1728 1735 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1729 1736 )
1730 1737
1731 1738 for rev in store:
1732 1739 node = store.node(rev)
1733 1740 parents = store.parents(node)
1734 1741
1735 1742 fm.startitem()
1736 1743 fm.write(b'rev', b'%6d ', rev)
1737 1744 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1738 1745 fm.write(b'node', b'%s ', shortfn(node))
1739 1746 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1740 1747 fm.write(b'p2', b'%s', shortfn(parents[1]))
1741 1748 fm.plain(b'\n')
1742 1749
1743 1750 fm.end()
1744 1751
1745 1752
1746 1753 @command(
1747 1754 b'debugindexdot',
1748 1755 cmdutil.debugrevlogopts,
1749 1756 _(b'-c|-m|FILE'),
1750 1757 optionalrepo=True,
1751 1758 )
1752 1759 def debugindexdot(ui, repo, file_=None, **opts):
1753 1760 """dump an index DAG as a graphviz dot file"""
1754 1761 opts = pycompat.byteskwargs(opts)
1755 1762 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1756 1763 ui.writenoi18n(b"digraph G {\n")
1757 1764 for i in r:
1758 1765 node = r.node(i)
1759 1766 pp = r.parents(node)
1760 1767 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1761 1768 if pp[1] != repo.nullid:
1762 1769 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1763 1770 ui.write(b"}\n")
1764 1771
1765 1772
1766 1773 @command(b'debugindexstats', [])
1767 1774 def debugindexstats(ui, repo):
1768 1775 """show stats related to the changelog index"""
1769 1776 repo.changelog.shortest(repo.nullid, 1)
1770 1777 index = repo.changelog.index
1771 1778 if not util.safehasattr(index, b'stats'):
1772 1779 raise error.Abort(_(b'debugindexstats only works with native code'))
1773 1780 for k, v in sorted(index.stats().items()):
1774 1781 ui.write(b'%s: %d\n' % (k, v))
1775 1782
1776 1783
1777 1784 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1778 1785 def debuginstall(ui, **opts):
1779 1786 """test Mercurial installation
1780 1787
1781 1788 Returns 0 on success.
1782 1789 """
1783 1790 opts = pycompat.byteskwargs(opts)
1784 1791
1785 1792 problems = 0
1786 1793
1787 1794 fm = ui.formatter(b'debuginstall', opts)
1788 1795 fm.startitem()
1789 1796
1790 1797 # encoding might be unknown or wrong. don't translate these messages.
1791 1798 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1792 1799 err = None
1793 1800 try:
1794 1801 codecs.lookup(pycompat.sysstr(encoding.encoding))
1795 1802 except LookupError as inst:
1796 1803 err = stringutil.forcebytestr(inst)
1797 1804 problems += 1
1798 1805 fm.condwrite(
1799 1806 err,
1800 1807 b'encodingerror',
1801 1808 b" %s\n (check that your locale is properly set)\n",
1802 1809 err,
1803 1810 )
1804 1811
1805 1812 # Python
1806 1813 pythonlib = None
1807 1814 if util.safehasattr(os, '__file__'):
1808 1815 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1809 1816 elif getattr(sys, 'oxidized', False):
1810 1817 pythonlib = pycompat.sysexecutable
1811 1818
1812 1819 fm.write(
1813 1820 b'pythonexe',
1814 1821 _(b"checking Python executable (%s)\n"),
1815 1822 pycompat.sysexecutable or _(b"unknown"),
1816 1823 )
1817 1824 fm.write(
1818 1825 b'pythonimplementation',
1819 1826 _(b"checking Python implementation (%s)\n"),
1820 1827 pycompat.sysbytes(platform.python_implementation()),
1821 1828 )
1822 1829 fm.write(
1823 1830 b'pythonver',
1824 1831 _(b"checking Python version (%s)\n"),
1825 1832 (b"%d.%d.%d" % sys.version_info[:3]),
1826 1833 )
1827 1834 fm.write(
1828 1835 b'pythonlib',
1829 1836 _(b"checking Python lib (%s)...\n"),
1830 1837 pythonlib or _(b"unknown"),
1831 1838 )
1832 1839
1833 1840 try:
1834 1841 from . import rustext # pytype: disable=import-error
1835 1842
1836 1843 rustext.__doc__ # trigger lazy import
1837 1844 except ImportError:
1838 1845 rustext = None
1839 1846
1840 1847 security = set(sslutil.supportedprotocols)
1841 1848 if sslutil.hassni:
1842 1849 security.add(b'sni')
1843 1850
1844 1851 fm.write(
1845 1852 b'pythonsecurity',
1846 1853 _(b"checking Python security support (%s)\n"),
1847 1854 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1848 1855 )
1849 1856
1850 1857 # These are warnings, not errors. So don't increment problem count. This
1851 1858 # may change in the future.
1852 1859 if b'tls1.2' not in security:
1853 1860 fm.plain(
1854 1861 _(
1855 1862 b' TLS 1.2 not supported by Python install; '
1856 1863 b'network connections lack modern security\n'
1857 1864 )
1858 1865 )
1859 1866 if b'sni' not in security:
1860 1867 fm.plain(
1861 1868 _(
1862 1869 b' SNI not supported by Python install; may have '
1863 1870 b'connectivity issues with some servers\n'
1864 1871 )
1865 1872 )
1866 1873
1867 1874 fm.plain(
1868 1875 _(
1869 1876 b"checking Rust extensions (%s)\n"
1870 1877 % (b'missing' if rustext is None else b'installed')
1871 1878 ),
1872 1879 )
1873 1880
1874 1881 # TODO print CA cert info
1875 1882
1876 1883 # hg version
1877 1884 hgver = util.version()
1878 1885 fm.write(
1879 1886 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1880 1887 )
1881 1888 fm.write(
1882 1889 b'hgverextra',
1883 1890 _(b"checking Mercurial custom build (%s)\n"),
1884 1891 b'+'.join(hgver.split(b'+')[1:]),
1885 1892 )
1886 1893
1887 1894 # compiled modules
1888 1895 hgmodules = None
1889 1896 if util.safehasattr(sys.modules[__name__], '__file__'):
1890 1897 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1891 1898 elif getattr(sys, 'oxidized', False):
1892 1899 hgmodules = pycompat.sysexecutable
1893 1900
1894 1901 fm.write(
1895 1902 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1896 1903 )
1897 1904 fm.write(
1898 1905 b'hgmodules',
1899 1906 _(b"checking installed modules (%s)...\n"),
1900 1907 hgmodules or _(b"unknown"),
1901 1908 )
1902 1909
1903 1910 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1904 1911 rustext = rustandc # for now, that's the only case
1905 1912 cext = policy.policy in (b'c', b'allow') or rustandc
1906 1913 nopure = cext or rustext
1907 1914 if nopure:
1908 1915 err = None
1909 1916 try:
1910 1917 if cext:
1911 1918 from .cext import ( # pytype: disable=import-error
1912 1919 base85,
1913 1920 bdiff,
1914 1921 mpatch,
1915 1922 osutil,
1916 1923 )
1917 1924
1918 1925 # quiet pyflakes
1919 1926 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1920 1927 if rustext:
1921 1928 from .rustext import ( # pytype: disable=import-error
1922 1929 ancestor,
1923 1930 dirstate,
1924 1931 )
1925 1932
1926 1933 dir(ancestor), dir(dirstate) # quiet pyflakes
1927 1934 except Exception as inst:
1928 1935 err = stringutil.forcebytestr(inst)
1929 1936 problems += 1
1930 1937 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1931 1938
1932 1939 compengines = util.compengines._engines.values()
1933 1940 fm.write(
1934 1941 b'compengines',
1935 1942 _(b'checking registered compression engines (%s)\n'),
1936 1943 fm.formatlist(
1937 1944 sorted(e.name() for e in compengines),
1938 1945 name=b'compengine',
1939 1946 fmt=b'%s',
1940 1947 sep=b', ',
1941 1948 ),
1942 1949 )
1943 1950 fm.write(
1944 1951 b'compenginesavail',
1945 1952 _(b'checking available compression engines (%s)\n'),
1946 1953 fm.formatlist(
1947 1954 sorted(e.name() for e in compengines if e.available()),
1948 1955 name=b'compengine',
1949 1956 fmt=b'%s',
1950 1957 sep=b', ',
1951 1958 ),
1952 1959 )
1953 1960 wirecompengines = compression.compengines.supportedwireengines(
1954 1961 compression.SERVERROLE
1955 1962 )
1956 1963 fm.write(
1957 1964 b'compenginesserver',
1958 1965 _(
1959 1966 b'checking available compression engines '
1960 1967 b'for wire protocol (%s)\n'
1961 1968 ),
1962 1969 fm.formatlist(
1963 1970 [e.name() for e in wirecompengines if e.wireprotosupport()],
1964 1971 name=b'compengine',
1965 1972 fmt=b'%s',
1966 1973 sep=b', ',
1967 1974 ),
1968 1975 )
1969 1976 re2 = b'missing'
1970 1977 if util._re2:
1971 1978 re2 = b'available'
1972 1979 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1973 1980 fm.data(re2=bool(util._re2))
1974 1981
1975 1982 # templates
1976 1983 p = templater.templatedir()
1977 1984 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1978 1985 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1979 1986 if p:
1980 1987 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1981 1988 if m:
1982 1989 # template found, check if it is working
1983 1990 err = None
1984 1991 try:
1985 1992 templater.templater.frommapfile(m)
1986 1993 except Exception as inst:
1987 1994 err = stringutil.forcebytestr(inst)
1988 1995 p = None
1989 1996 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1990 1997 else:
1991 1998 p = None
1992 1999 fm.condwrite(
1993 2000 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1994 2001 )
1995 2002 fm.condwrite(
1996 2003 not m,
1997 2004 b'defaulttemplatenotfound',
1998 2005 _(b" template '%s' not found\n"),
1999 2006 b"default",
2000 2007 )
2001 2008 if not p:
2002 2009 problems += 1
2003 2010 fm.condwrite(
2004 2011 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2005 2012 )
2006 2013
2007 2014 # editor
2008 2015 editor = ui.geteditor()
2009 2016 editor = util.expandpath(editor)
2010 2017 editorbin = procutil.shellsplit(editor)[0]
2011 2018 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2012 2019 cmdpath = procutil.findexe(editorbin)
2013 2020 fm.condwrite(
2014 2021 not cmdpath and editor == b'vi',
2015 2022 b'vinotfound',
2016 2023 _(
2017 2024 b" No commit editor set and can't find %s in PATH\n"
2018 2025 b" (specify a commit editor in your configuration"
2019 2026 b" file)\n"
2020 2027 ),
2021 2028 not cmdpath and editor == b'vi' and editorbin,
2022 2029 )
2023 2030 fm.condwrite(
2024 2031 not cmdpath and editor != b'vi',
2025 2032 b'editornotfound',
2026 2033 _(
2027 2034 b" Can't find editor '%s' in PATH\n"
2028 2035 b" (specify a commit editor in your configuration"
2029 2036 b" file)\n"
2030 2037 ),
2031 2038 not cmdpath and editorbin,
2032 2039 )
2033 2040 if not cmdpath and editor != b'vi':
2034 2041 problems += 1
2035 2042
2036 2043 # check username
2037 2044 username = None
2038 2045 err = None
2039 2046 try:
2040 2047 username = ui.username()
2041 2048 except error.Abort as e:
2042 2049 err = e.message
2043 2050 problems += 1
2044 2051
2045 2052 fm.condwrite(
2046 2053 username, b'username', _(b"checking username (%s)\n"), username
2047 2054 )
2048 2055 fm.condwrite(
2049 2056 err,
2050 2057 b'usernameerror',
2051 2058 _(
2052 2059 b"checking username...\n %s\n"
2053 2060 b" (specify a username in your configuration file)\n"
2054 2061 ),
2055 2062 err,
2056 2063 )
2057 2064
2058 2065 for name, mod in extensions.extensions():
2059 2066 handler = getattr(mod, 'debuginstall', None)
2060 2067 if handler is not None:
2061 2068 problems += handler(ui, fm)
2062 2069
2063 2070 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2064 2071 if not problems:
2065 2072 fm.data(problems=problems)
2066 2073 fm.condwrite(
2067 2074 problems,
2068 2075 b'problems',
2069 2076 _(b"%d problems detected, please check your install!\n"),
2070 2077 problems,
2071 2078 )
2072 2079 fm.end()
2073 2080
2074 2081 return problems
2075 2082
2076 2083
2077 2084 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2078 2085 def debugknown(ui, repopath, *ids, **opts):
2079 2086 """test whether node ids are known to a repo
2080 2087
2081 2088 Every ID must be a full-length hex node id string. Returns a list of 0s
2082 2089 and 1s indicating unknown/known.
2083 2090 """
2084 2091 opts = pycompat.byteskwargs(opts)
2085 2092 repo = hg.peer(ui, opts, repopath)
2086 2093 if not repo.capable(b'known'):
2087 2094 raise error.Abort(b"known() not supported by target repository")
2088 2095 flags = repo.known([bin(s) for s in ids])
2089 2096 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2090 2097
2091 2098
2092 2099 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2093 2100 def debuglabelcomplete(ui, repo, *args):
2094 2101 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2095 2102 debugnamecomplete(ui, repo, *args)
2096 2103
2097 2104
2098 2105 @command(
2099 2106 b'debuglocks',
2100 2107 [
2101 2108 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2102 2109 (
2103 2110 b'W',
2104 2111 b'force-free-wlock',
2105 2112 None,
2106 2113 _(b'free the working state lock (DANGEROUS)'),
2107 2114 ),
2108 2115 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2109 2116 (
2110 2117 b'S',
2111 2118 b'set-wlock',
2112 2119 None,
2113 2120 _(b'set the working state lock until stopped'),
2114 2121 ),
2115 2122 ],
2116 2123 _(b'[OPTION]...'),
2117 2124 )
2118 2125 def debuglocks(ui, repo, **opts):
2119 2126 """show or modify state of locks
2120 2127
2121 2128 By default, this command will show which locks are held. This
2122 2129 includes the user and process holding the lock, the amount of time
2123 2130 the lock has been held, and the machine name where the process is
2124 2131 running if it's not local.
2125 2132
2126 2133 Locks protect the integrity of Mercurial's data, so should be
2127 2134 treated with care. System crashes or other interruptions may cause
2128 2135 locks to not be properly released, though Mercurial will usually
2129 2136 detect and remove such stale locks automatically.
2130 2137
2131 2138 However, detecting stale locks may not always be possible (for
2132 2139 instance, on a shared filesystem). Removing locks may also be
2133 2140 blocked by filesystem permissions.
2134 2141
2135 2142 Setting a lock will prevent other commands from changing the data.
2136 2143 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2137 2144 The set locks are removed when the command exits.
2138 2145
2139 2146 Returns 0 if no locks are held.
2140 2147
2141 2148 """
2142 2149
2143 2150 if opts.get('force_free_lock'):
2144 2151 repo.svfs.unlink(b'lock')
2145 2152 if opts.get('force_free_wlock'):
2146 2153 repo.vfs.unlink(b'wlock')
2147 2154 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2148 2155 return 0
2149 2156
2150 2157 locks = []
2151 2158 try:
2152 2159 if opts.get('set_wlock'):
2153 2160 try:
2154 2161 locks.append(repo.wlock(False))
2155 2162 except error.LockHeld:
2156 2163 raise error.Abort(_(b'wlock is already held'))
2157 2164 if opts.get('set_lock'):
2158 2165 try:
2159 2166 locks.append(repo.lock(False))
2160 2167 except error.LockHeld:
2161 2168 raise error.Abort(_(b'lock is already held'))
2162 2169 if len(locks):
2163 2170 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2164 2171 return 0
2165 2172 finally:
2166 2173 release(*locks)
2167 2174
2168 2175 now = time.time()
2169 2176 held = 0
2170 2177
2171 2178 def report(vfs, name, method):
2172 2179 # this causes stale locks to get reaped for more accurate reporting
2173 2180 try:
2174 2181 l = method(False)
2175 2182 except error.LockHeld:
2176 2183 l = None
2177 2184
2178 2185 if l:
2179 2186 l.release()
2180 2187 else:
2181 2188 try:
2182 2189 st = vfs.lstat(name)
2183 2190 age = now - st[stat.ST_MTIME]
2184 2191 user = util.username(st.st_uid)
2185 2192 locker = vfs.readlock(name)
2186 2193 if b":" in locker:
2187 2194 host, pid = locker.split(b':')
2188 2195 if host == socket.gethostname():
2189 2196 locker = b'user %s, process %s' % (user or b'None', pid)
2190 2197 else:
2191 2198 locker = b'user %s, process %s, host %s' % (
2192 2199 user or b'None',
2193 2200 pid,
2194 2201 host,
2195 2202 )
2196 2203 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2197 2204 return 1
2198 2205 except OSError as e:
2199 2206 if e.errno != errno.ENOENT:
2200 2207 raise
2201 2208
2202 2209 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2203 2210 return 0
2204 2211
2205 2212 held += report(repo.svfs, b"lock", repo.lock)
2206 2213 held += report(repo.vfs, b"wlock", repo.wlock)
2207 2214
2208 2215 return held
2209 2216
2210 2217
2211 2218 @command(
2212 2219 b'debugmanifestfulltextcache',
2213 2220 [
2214 2221 (b'', b'clear', False, _(b'clear the cache')),
2215 2222 (
2216 2223 b'a',
2217 2224 b'add',
2218 2225 [],
2219 2226 _(b'add the given manifest nodes to the cache'),
2220 2227 _(b'NODE'),
2221 2228 ),
2222 2229 ],
2223 2230 b'',
2224 2231 )
2225 2232 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2226 2233 """show, clear or amend the contents of the manifest fulltext cache"""
2227 2234
2228 2235 def getcache():
2229 2236 r = repo.manifestlog.getstorage(b'')
2230 2237 try:
2231 2238 return r._fulltextcache
2232 2239 except AttributeError:
2233 2240 msg = _(
2234 2241 b"Current revlog implementation doesn't appear to have a "
2235 2242 b"manifest fulltext cache\n"
2236 2243 )
2237 2244 raise error.Abort(msg)
2238 2245
2239 2246 if opts.get('clear'):
2240 2247 with repo.wlock():
2241 2248 cache = getcache()
2242 2249 cache.clear(clear_persisted_data=True)
2243 2250 return
2244 2251
2245 2252 if add:
2246 2253 with repo.wlock():
2247 2254 m = repo.manifestlog
2248 2255 store = m.getstorage(b'')
2249 2256 for n in add:
2250 2257 try:
2251 2258 manifest = m[store.lookup(n)]
2252 2259 except error.LookupError as e:
2253 2260 raise error.Abort(
2254 2261 bytes(e), hint=b"Check your manifest node id"
2255 2262 )
2256 2263 manifest.read() # stores revisision in cache too
2257 2264 return
2258 2265
2259 2266 cache = getcache()
2260 2267 if not len(cache):
2261 2268 ui.write(_(b'cache empty\n'))
2262 2269 else:
2263 2270 ui.write(
2264 2271 _(
2265 2272 b'cache contains %d manifest entries, in order of most to '
2266 2273 b'least recent:\n'
2267 2274 )
2268 2275 % (len(cache),)
2269 2276 )
2270 2277 totalsize = 0
2271 2278 for nodeid in cache:
2272 2279 # Use cache.get to not update the LRU order
2273 2280 data = cache.peek(nodeid)
2274 2281 size = len(data)
2275 2282 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2276 2283 ui.write(
2277 2284 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2278 2285 )
2279 2286 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2280 2287 ui.write(
2281 2288 _(b'total cache data size %s, on-disk %s\n')
2282 2289 % (util.bytecount(totalsize), util.bytecount(ondisk))
2283 2290 )
2284 2291
2285 2292
2286 2293 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2287 2294 def debugmergestate(ui, repo, *args, **opts):
2288 2295 """print merge state
2289 2296
2290 2297 Use --verbose to print out information about whether v1 or v2 merge state
2291 2298 was chosen."""
2292 2299
2293 2300 if ui.verbose:
2294 2301 ms = mergestatemod.mergestate(repo)
2295 2302
2296 2303 # sort so that reasonable information is on top
2297 2304 v1records = ms._readrecordsv1()
2298 2305 v2records = ms._readrecordsv2()
2299 2306
2300 2307 if not v1records and not v2records:
2301 2308 pass
2302 2309 elif not v2records:
2303 2310 ui.writenoi18n(b'no version 2 merge state\n')
2304 2311 elif ms._v1v2match(v1records, v2records):
2305 2312 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2306 2313 else:
2307 2314 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2308 2315
2309 2316 opts = pycompat.byteskwargs(opts)
2310 2317 if not opts[b'template']:
2311 2318 opts[b'template'] = (
2312 2319 b'{if(commits, "", "no merge state found\n")}'
2313 2320 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2314 2321 b'{files % "file: {path} (state \\"{state}\\")\n'
2315 2322 b'{if(local_path, "'
2316 2323 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2317 2324 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2318 2325 b' other path: {other_path} (node {other_node})\n'
2319 2326 b'")}'
2320 2327 b'{if(rename_side, "'
2321 2328 b' rename side: {rename_side}\n'
2322 2329 b' renamed path: {renamed_path}\n'
2323 2330 b'")}'
2324 2331 b'{extras % " extra: {key} = {value}\n"}'
2325 2332 b'"}'
2326 2333 b'{extras % "extra: {file} ({key} = {value})\n"}'
2327 2334 )
2328 2335
2329 2336 ms = mergestatemod.mergestate.read(repo)
2330 2337
2331 2338 fm = ui.formatter(b'debugmergestate', opts)
2332 2339 fm.startitem()
2333 2340
2334 2341 fm_commits = fm.nested(b'commits')
2335 2342 if ms.active():
2336 2343 for name, node, label_index in (
2337 2344 (b'local', ms.local, 0),
2338 2345 (b'other', ms.other, 1),
2339 2346 ):
2340 2347 fm_commits.startitem()
2341 2348 fm_commits.data(name=name)
2342 2349 fm_commits.data(node=hex(node))
2343 2350 if ms._labels and len(ms._labels) > label_index:
2344 2351 fm_commits.data(label=ms._labels[label_index])
2345 2352 fm_commits.end()
2346 2353
2347 2354 fm_files = fm.nested(b'files')
2348 2355 if ms.active():
2349 2356 for f in ms:
2350 2357 fm_files.startitem()
2351 2358 fm_files.data(path=f)
2352 2359 state = ms._state[f]
2353 2360 fm_files.data(state=state[0])
2354 2361 if state[0] in (
2355 2362 mergestatemod.MERGE_RECORD_UNRESOLVED,
2356 2363 mergestatemod.MERGE_RECORD_RESOLVED,
2357 2364 ):
2358 2365 fm_files.data(local_key=state[1])
2359 2366 fm_files.data(local_path=state[2])
2360 2367 fm_files.data(ancestor_path=state[3])
2361 2368 fm_files.data(ancestor_node=state[4])
2362 2369 fm_files.data(other_path=state[5])
2363 2370 fm_files.data(other_node=state[6])
2364 2371 fm_files.data(local_flags=state[7])
2365 2372 elif state[0] in (
2366 2373 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2367 2374 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2368 2375 ):
2369 2376 fm_files.data(renamed_path=state[1])
2370 2377 fm_files.data(rename_side=state[2])
2371 2378 fm_extras = fm_files.nested(b'extras')
2372 2379 for k, v in sorted(ms.extras(f).items()):
2373 2380 fm_extras.startitem()
2374 2381 fm_extras.data(key=k)
2375 2382 fm_extras.data(value=v)
2376 2383 fm_extras.end()
2377 2384
2378 2385 fm_files.end()
2379 2386
2380 2387 fm_extras = fm.nested(b'extras')
2381 2388 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2382 2389 if f in ms:
2383 2390 # If file is in mergestate, we have already processed it's extras
2384 2391 continue
2385 2392 for k, v in pycompat.iteritems(d):
2386 2393 fm_extras.startitem()
2387 2394 fm_extras.data(file=f)
2388 2395 fm_extras.data(key=k)
2389 2396 fm_extras.data(value=v)
2390 2397 fm_extras.end()
2391 2398
2392 2399 fm.end()
2393 2400
2394 2401
2395 2402 @command(b'debugnamecomplete', [], _(b'NAME...'))
2396 2403 def debugnamecomplete(ui, repo, *args):
2397 2404 '''complete "names" - tags, open branch names, bookmark names'''
2398 2405
2399 2406 names = set()
2400 2407 # since we previously only listed open branches, we will handle that
2401 2408 # specially (after this for loop)
2402 2409 for name, ns in pycompat.iteritems(repo.names):
2403 2410 if name != b'branches':
2404 2411 names.update(ns.listnames(repo))
2405 2412 names.update(
2406 2413 tag
2407 2414 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2408 2415 if not closed
2409 2416 )
2410 2417 completions = set()
2411 2418 if not args:
2412 2419 args = [b'']
2413 2420 for a in args:
2414 2421 completions.update(n for n in names if n.startswith(a))
2415 2422 ui.write(b'\n'.join(sorted(completions)))
2416 2423 ui.write(b'\n')
2417 2424
2418 2425
2419 2426 @command(
2420 2427 b'debugnodemap',
2421 2428 [
2422 2429 (
2423 2430 b'',
2424 2431 b'dump-new',
2425 2432 False,
2426 2433 _(b'write a (new) persistent binary nodemap on stdout'),
2427 2434 ),
2428 2435 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2429 2436 (
2430 2437 b'',
2431 2438 b'check',
2432 2439 False,
2433 2440 _(b'check that the data on disk data are correct.'),
2434 2441 ),
2435 2442 (
2436 2443 b'',
2437 2444 b'metadata',
2438 2445 False,
2439 2446 _(b'display the on disk meta data for the nodemap'),
2440 2447 ),
2441 2448 ],
2442 2449 )
2443 2450 def debugnodemap(ui, repo, **opts):
2444 2451 """write and inspect on disk nodemap"""
2445 2452 if opts['dump_new']:
2446 2453 unfi = repo.unfiltered()
2447 2454 cl = unfi.changelog
2448 2455 if util.safehasattr(cl.index, "nodemap_data_all"):
2449 2456 data = cl.index.nodemap_data_all()
2450 2457 else:
2451 2458 data = nodemap.persistent_data(cl.index)
2452 2459 ui.write(data)
2453 2460 elif opts['dump_disk']:
2454 2461 unfi = repo.unfiltered()
2455 2462 cl = unfi.changelog
2456 2463 nm_data = nodemap.persisted_data(cl)
2457 2464 if nm_data is not None:
2458 2465 docket, data = nm_data
2459 2466 ui.write(data[:])
2460 2467 elif opts['check']:
2461 2468 unfi = repo.unfiltered()
2462 2469 cl = unfi.changelog
2463 2470 nm_data = nodemap.persisted_data(cl)
2464 2471 if nm_data is not None:
2465 2472 docket, data = nm_data
2466 2473 return nodemap.check_data(ui, cl.index, data)
2467 2474 elif opts['metadata']:
2468 2475 unfi = repo.unfiltered()
2469 2476 cl = unfi.changelog
2470 2477 nm_data = nodemap.persisted_data(cl)
2471 2478 if nm_data is not None:
2472 2479 docket, data = nm_data
2473 2480 ui.write((b"uid: %s\n") % docket.uid)
2474 2481 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2475 2482 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2476 2483 ui.write((b"data-length: %d\n") % docket.data_length)
2477 2484 ui.write((b"data-unused: %d\n") % docket.data_unused)
2478 2485 unused_perc = docket.data_unused * 100.0 / docket.data_length
2479 2486 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2480 2487
2481 2488
2482 2489 @command(
2483 2490 b'debugobsolete',
2484 2491 [
2485 2492 (b'', b'flags', 0, _(b'markers flag')),
2486 2493 (
2487 2494 b'',
2488 2495 b'record-parents',
2489 2496 False,
2490 2497 _(b'record parent information for the precursor'),
2491 2498 ),
2492 2499 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2493 2500 (
2494 2501 b'',
2495 2502 b'exclusive',
2496 2503 False,
2497 2504 _(b'restrict display to markers only relevant to REV'),
2498 2505 ),
2499 2506 (b'', b'index', False, _(b'display index of the marker')),
2500 2507 (b'', b'delete', [], _(b'delete markers specified by indices')),
2501 2508 ]
2502 2509 + cmdutil.commitopts2
2503 2510 + cmdutil.formatteropts,
2504 2511 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2505 2512 )
2506 2513 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2507 2514 """create arbitrary obsolete marker
2508 2515
2509 2516 With no arguments, displays the list of obsolescence markers."""
2510 2517
2511 2518 opts = pycompat.byteskwargs(opts)
2512 2519
2513 2520 def parsenodeid(s):
2514 2521 try:
2515 2522 # We do not use revsingle/revrange functions here to accept
2516 2523 # arbitrary node identifiers, possibly not present in the
2517 2524 # local repository.
2518 2525 n = bin(s)
2519 2526 if len(n) != repo.nodeconstants.nodelen:
2520 2527 raise TypeError()
2521 2528 return n
2522 2529 except TypeError:
2523 2530 raise error.InputError(
2524 2531 b'changeset references must be full hexadecimal '
2525 2532 b'node identifiers'
2526 2533 )
2527 2534
2528 2535 if opts.get(b'delete'):
2529 2536 indices = []
2530 2537 for v in opts.get(b'delete'):
2531 2538 try:
2532 2539 indices.append(int(v))
2533 2540 except ValueError:
2534 2541 raise error.InputError(
2535 2542 _(b'invalid index value: %r') % v,
2536 2543 hint=_(b'use integers for indices'),
2537 2544 )
2538 2545
2539 2546 if repo.currenttransaction():
2540 2547 raise error.Abort(
2541 2548 _(b'cannot delete obsmarkers in the middle of transaction.')
2542 2549 )
2543 2550
2544 2551 with repo.lock():
2545 2552 n = repair.deleteobsmarkers(repo.obsstore, indices)
2546 2553 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2547 2554
2548 2555 return
2549 2556
2550 2557 if precursor is not None:
2551 2558 if opts[b'rev']:
2552 2559 raise error.InputError(
2553 2560 b'cannot select revision when creating marker'
2554 2561 )
2555 2562 metadata = {}
2556 2563 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2557 2564 succs = tuple(parsenodeid(succ) for succ in successors)
2558 2565 l = repo.lock()
2559 2566 try:
2560 2567 tr = repo.transaction(b'debugobsolete')
2561 2568 try:
2562 2569 date = opts.get(b'date')
2563 2570 if date:
2564 2571 date = dateutil.parsedate(date)
2565 2572 else:
2566 2573 date = None
2567 2574 prec = parsenodeid(precursor)
2568 2575 parents = None
2569 2576 if opts[b'record_parents']:
2570 2577 if prec not in repo.unfiltered():
2571 2578 raise error.Abort(
2572 2579 b'cannot used --record-parents on '
2573 2580 b'unknown changesets'
2574 2581 )
2575 2582 parents = repo.unfiltered()[prec].parents()
2576 2583 parents = tuple(p.node() for p in parents)
2577 2584 repo.obsstore.create(
2578 2585 tr,
2579 2586 prec,
2580 2587 succs,
2581 2588 opts[b'flags'],
2582 2589 parents=parents,
2583 2590 date=date,
2584 2591 metadata=metadata,
2585 2592 ui=ui,
2586 2593 )
2587 2594 tr.close()
2588 2595 except ValueError as exc:
2589 2596 raise error.Abort(
2590 2597 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2591 2598 )
2592 2599 finally:
2593 2600 tr.release()
2594 2601 finally:
2595 2602 l.release()
2596 2603 else:
2597 2604 if opts[b'rev']:
2598 2605 revs = logcmdutil.revrange(repo, opts[b'rev'])
2599 2606 nodes = [repo[r].node() for r in revs]
2600 2607 markers = list(
2601 2608 obsutil.getmarkers(
2602 2609 repo, nodes=nodes, exclusive=opts[b'exclusive']
2603 2610 )
2604 2611 )
2605 2612 markers.sort(key=lambda x: x._data)
2606 2613 else:
2607 2614 markers = obsutil.getmarkers(repo)
2608 2615
2609 2616 markerstoiter = markers
2610 2617 isrelevant = lambda m: True
2611 2618 if opts.get(b'rev') and opts.get(b'index'):
2612 2619 markerstoiter = obsutil.getmarkers(repo)
2613 2620 markerset = set(markers)
2614 2621 isrelevant = lambda m: m in markerset
2615 2622
2616 2623 fm = ui.formatter(b'debugobsolete', opts)
2617 2624 for i, m in enumerate(markerstoiter):
2618 2625 if not isrelevant(m):
2619 2626 # marker can be irrelevant when we're iterating over a set
2620 2627 # of markers (markerstoiter) which is bigger than the set
2621 2628 # of markers we want to display (markers)
2622 2629 # this can happen if both --index and --rev options are
2623 2630 # provided and thus we need to iterate over all of the markers
2624 2631 # to get the correct indices, but only display the ones that
2625 2632 # are relevant to --rev value
2626 2633 continue
2627 2634 fm.startitem()
2628 2635 ind = i if opts.get(b'index') else None
2629 2636 cmdutil.showmarker(fm, m, index=ind)
2630 2637 fm.end()
2631 2638
2632 2639
2633 2640 @command(
2634 2641 b'debugp1copies',
2635 2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2636 2643 _(b'[-r REV]'),
2637 2644 )
2638 2645 def debugp1copies(ui, repo, **opts):
2639 2646 """dump copy information compared to p1"""
2640 2647
2641 2648 opts = pycompat.byteskwargs(opts)
2642 2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2643 2650 for dst, src in ctx.p1copies().items():
2644 2651 ui.write(b'%s -> %s\n' % (src, dst))
2645 2652
2646 2653
2647 2654 @command(
2648 2655 b'debugp2copies',
2649 2656 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2650 2657 _(b'[-r REV]'),
2651 2658 )
2652 2659 def debugp1copies(ui, repo, **opts):
2653 2660 """dump copy information compared to p2"""
2654 2661
2655 2662 opts = pycompat.byteskwargs(opts)
2656 2663 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2657 2664 for dst, src in ctx.p2copies().items():
2658 2665 ui.write(b'%s -> %s\n' % (src, dst))
2659 2666
2660 2667
2661 2668 @command(
2662 2669 b'debugpathcomplete',
2663 2670 [
2664 2671 (b'f', b'full', None, _(b'complete an entire path')),
2665 2672 (b'n', b'normal', None, _(b'show only normal files')),
2666 2673 (b'a', b'added', None, _(b'show only added files')),
2667 2674 (b'r', b'removed', None, _(b'show only removed files')),
2668 2675 ],
2669 2676 _(b'FILESPEC...'),
2670 2677 )
2671 2678 def debugpathcomplete(ui, repo, *specs, **opts):
2672 2679 """complete part or all of a tracked path
2673 2680
2674 2681 This command supports shells that offer path name completion. It
2675 2682 currently completes only files already known to the dirstate.
2676 2683
2677 2684 Completion extends only to the next path segment unless
2678 2685 --full is specified, in which case entire paths are used."""
2679 2686
2680 2687 def complete(path, acceptable):
2681 2688 dirstate = repo.dirstate
2682 2689 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2683 2690 rootdir = repo.root + pycompat.ossep
2684 2691 if spec != repo.root and not spec.startswith(rootdir):
2685 2692 return [], []
2686 2693 if os.path.isdir(spec):
2687 2694 spec += b'/'
2688 2695 spec = spec[len(rootdir) :]
2689 2696 fixpaths = pycompat.ossep != b'/'
2690 2697 if fixpaths:
2691 2698 spec = spec.replace(pycompat.ossep, b'/')
2692 2699 speclen = len(spec)
2693 2700 fullpaths = opts['full']
2694 2701 files, dirs = set(), set()
2695 2702 adddir, addfile = dirs.add, files.add
2696 2703 for f, st in pycompat.iteritems(dirstate):
2697 2704 if f.startswith(spec) and st.state in acceptable:
2698 2705 if fixpaths:
2699 2706 f = f.replace(b'/', pycompat.ossep)
2700 2707 if fullpaths:
2701 2708 addfile(f)
2702 2709 continue
2703 2710 s = f.find(pycompat.ossep, speclen)
2704 2711 if s >= 0:
2705 2712 adddir(f[:s])
2706 2713 else:
2707 2714 addfile(f)
2708 2715 return files, dirs
2709 2716
2710 2717 acceptable = b''
2711 2718 if opts['normal']:
2712 2719 acceptable += b'nm'
2713 2720 if opts['added']:
2714 2721 acceptable += b'a'
2715 2722 if opts['removed']:
2716 2723 acceptable += b'r'
2717 2724 cwd = repo.getcwd()
2718 2725 if not specs:
2719 2726 specs = [b'.']
2720 2727
2721 2728 files, dirs = set(), set()
2722 2729 for spec in specs:
2723 2730 f, d = complete(spec, acceptable or b'nmar')
2724 2731 files.update(f)
2725 2732 dirs.update(d)
2726 2733 files.update(dirs)
2727 2734 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2728 2735 ui.write(b'\n')
2729 2736
2730 2737
2731 2738 @command(
2732 2739 b'debugpathcopies',
2733 2740 cmdutil.walkopts,
2734 2741 b'hg debugpathcopies REV1 REV2 [FILE]',
2735 2742 inferrepo=True,
2736 2743 )
2737 2744 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2738 2745 """show copies between two revisions"""
2739 2746 ctx1 = scmutil.revsingle(repo, rev1)
2740 2747 ctx2 = scmutil.revsingle(repo, rev2)
2741 2748 m = scmutil.match(ctx1, pats, opts)
2742 2749 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2743 2750 ui.write(b'%s -> %s\n' % (src, dst))
2744 2751
2745 2752
2746 2753 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2747 2754 def debugpeer(ui, path):
2748 2755 """establish a connection to a peer repository"""
2749 2756 # Always enable peer request logging. Requires --debug to display
2750 2757 # though.
2751 2758 overrides = {
2752 2759 (b'devel', b'debug.peer-request'): True,
2753 2760 }
2754 2761
2755 2762 with ui.configoverride(overrides):
2756 2763 peer = hg.peer(ui, {}, path)
2757 2764
2758 2765 try:
2759 2766 local = peer.local() is not None
2760 2767 canpush = peer.canpush()
2761 2768
2762 2769 ui.write(_(b'url: %s\n') % peer.url())
2763 2770 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2764 2771 ui.write(
2765 2772 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2766 2773 )
2767 2774 finally:
2768 2775 peer.close()
2769 2776
2770 2777
2771 2778 @command(
2772 2779 b'debugpickmergetool',
2773 2780 [
2774 2781 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2775 2782 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2776 2783 ]
2777 2784 + cmdutil.walkopts
2778 2785 + cmdutil.mergetoolopts,
2779 2786 _(b'[PATTERN]...'),
2780 2787 inferrepo=True,
2781 2788 )
2782 2789 def debugpickmergetool(ui, repo, *pats, **opts):
2783 2790 """examine which merge tool is chosen for specified file
2784 2791
2785 2792 As described in :hg:`help merge-tools`, Mercurial examines
2786 2793 configurations below in this order to decide which merge tool is
2787 2794 chosen for specified file.
2788 2795
2789 2796 1. ``--tool`` option
2790 2797 2. ``HGMERGE`` environment variable
2791 2798 3. configurations in ``merge-patterns`` section
2792 2799 4. configuration of ``ui.merge``
2793 2800 5. configurations in ``merge-tools`` section
2794 2801 6. ``hgmerge`` tool (for historical reason only)
2795 2802 7. default tool for fallback (``:merge`` or ``:prompt``)
2796 2803
2797 2804 This command writes out examination result in the style below::
2798 2805
2799 2806 FILE = MERGETOOL
2800 2807
2801 2808 By default, all files known in the first parent context of the
2802 2809 working directory are examined. Use file patterns and/or -I/-X
2803 2810 options to limit target files. -r/--rev is also useful to examine
2804 2811 files in another context without actual updating to it.
2805 2812
2806 2813 With --debug, this command shows warning messages while matching
2807 2814 against ``merge-patterns`` and so on, too. It is recommended to
2808 2815 use this option with explicit file patterns and/or -I/-X options,
2809 2816 because this option increases amount of output per file according
2810 2817 to configurations in hgrc.
2811 2818
2812 2819 With -v/--verbose, this command shows configurations below at
2813 2820 first (only if specified).
2814 2821
2815 2822 - ``--tool`` option
2816 2823 - ``HGMERGE`` environment variable
2817 2824 - configuration of ``ui.merge``
2818 2825
2819 2826 If merge tool is chosen before matching against
2820 2827 ``merge-patterns``, this command can't show any helpful
2821 2828 information, even with --debug. In such case, information above is
2822 2829 useful to know why a merge tool is chosen.
2823 2830 """
2824 2831 opts = pycompat.byteskwargs(opts)
2825 2832 overrides = {}
2826 2833 if opts[b'tool']:
2827 2834 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2828 2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2829 2836
2830 2837 with ui.configoverride(overrides, b'debugmergepatterns'):
2831 2838 hgmerge = encoding.environ.get(b"HGMERGE")
2832 2839 if hgmerge is not None:
2833 2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2834 2841 uimerge = ui.config(b"ui", b"merge")
2835 2842 if uimerge:
2836 2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2837 2844
2838 2845 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2839 2846 m = scmutil.match(ctx, pats, opts)
2840 2847 changedelete = opts[b'changedelete']
2841 2848 for path in ctx.walk(m):
2842 2849 fctx = ctx[path]
2843 2850 with ui.silent(
2844 2851 error=True
2845 2852 ) if not ui.debugflag else util.nullcontextmanager():
2846 2853 tool, toolpath = filemerge._picktool(
2847 2854 repo,
2848 2855 ui,
2849 2856 path,
2850 2857 fctx.isbinary(),
2851 2858 b'l' in fctx.flags(),
2852 2859 changedelete,
2853 2860 )
2854 2861 ui.write(b'%s = %s\n' % (path, tool))
2855 2862
2856 2863
2857 2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2858 2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2859 2866 """access the pushkey key/value protocol
2860 2867
2861 2868 With two args, list the keys in the given namespace.
2862 2869
2863 2870 With five args, set a key to new if it currently is set to old.
2864 2871 Reports success or failure.
2865 2872 """
2866 2873
2867 2874 target = hg.peer(ui, {}, repopath)
2868 2875 try:
2869 2876 if keyinfo:
2870 2877 key, old, new = keyinfo
2871 2878 with target.commandexecutor() as e:
2872 2879 r = e.callcommand(
2873 2880 b'pushkey',
2874 2881 {
2875 2882 b'namespace': namespace,
2876 2883 b'key': key,
2877 2884 b'old': old,
2878 2885 b'new': new,
2879 2886 },
2880 2887 ).result()
2881 2888
2882 2889 ui.status(pycompat.bytestr(r) + b'\n')
2883 2890 return not r
2884 2891 else:
2885 2892 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2886 2893 ui.write(
2887 2894 b"%s\t%s\n"
2888 2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
2889 2896 )
2890 2897 finally:
2891 2898 target.close()
2892 2899
2893 2900
2894 2901 @command(b'debugpvec', [], _(b'A B'))
2895 2902 def debugpvec(ui, repo, a, b=None):
2896 2903 ca = scmutil.revsingle(repo, a)
2897 2904 cb = scmutil.revsingle(repo, b)
2898 2905 pa = pvec.ctxpvec(ca)
2899 2906 pb = pvec.ctxpvec(cb)
2900 2907 if pa == pb:
2901 2908 rel = b"="
2902 2909 elif pa > pb:
2903 2910 rel = b">"
2904 2911 elif pa < pb:
2905 2912 rel = b"<"
2906 2913 elif pa | pb:
2907 2914 rel = b"|"
2908 2915 ui.write(_(b"a: %s\n") % pa)
2909 2916 ui.write(_(b"b: %s\n") % pb)
2910 2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2911 2918 ui.write(
2912 2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2913 2920 % (
2914 2921 abs(pa._depth - pb._depth),
2915 2922 pvec._hamming(pa._vec, pb._vec),
2916 2923 pa.distance(pb),
2917 2924 rel,
2918 2925 )
2919 2926 )
2920 2927
2921 2928
2922 2929 @command(
2923 2930 b'debugrebuilddirstate|debugrebuildstate',
2924 2931 [
2925 2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2926 2933 (
2927 2934 b'',
2928 2935 b'minimal',
2929 2936 None,
2930 2937 _(
2931 2938 b'only rebuild files that are inconsistent with '
2932 2939 b'the working copy parent'
2933 2940 ),
2934 2941 ),
2935 2942 ],
2936 2943 _(b'[-r REV]'),
2937 2944 )
2938 2945 def debugrebuilddirstate(ui, repo, rev, **opts):
2939 2946 """rebuild the dirstate as it would look like for the given revision
2940 2947
2941 2948 If no revision is specified the first current parent will be used.
2942 2949
2943 2950 The dirstate will be set to the files of the given revision.
2944 2951 The actual working directory content or existing dirstate
2945 2952 information such as adds or removes is not considered.
2946 2953
2947 2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
2948 2955 tracked but are not in the parent manifest, or that exist in the parent
2949 2956 manifest but are not in the dirstate. It will not change adds, removes, or
2950 2957 modified files that are in the working copy parent.
2951 2958
2952 2959 One use of this command is to make the next :hg:`status` invocation
2953 2960 check the actual file content.
2954 2961 """
2955 2962 ctx = scmutil.revsingle(repo, rev)
2956 2963 with repo.wlock():
2957 2964 dirstate = repo.dirstate
2958 2965 changedfiles = None
2959 2966 # See command doc for what minimal does.
2960 2967 if opts.get('minimal'):
2961 2968 manifestfiles = set(ctx.manifest().keys())
2962 2969 dirstatefiles = set(dirstate)
2963 2970 manifestonly = manifestfiles - dirstatefiles
2964 2971 dsonly = dirstatefiles - manifestfiles
2965 2972 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2966 2973 changedfiles = manifestonly | dsnotadded
2967 2974
2968 2975 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2969 2976
2970 2977
2971 2978 @command(
2972 2979 b'debugrebuildfncache',
2973 2980 [
2974 2981 (
2975 2982 b'',
2976 2983 b'only-data',
2977 2984 False,
2978 2985 _(b'only look for wrong .d files (much faster)'),
2979 2986 )
2980 2987 ],
2981 2988 b'',
2982 2989 )
2983 2990 def debugrebuildfncache(ui, repo, **opts):
2984 2991 """rebuild the fncache file"""
2985 2992 opts = pycompat.byteskwargs(opts)
2986 2993 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2987 2994
2988 2995
2989 2996 @command(
2990 2997 b'debugrename',
2991 2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2992 2999 _(b'[-r REV] [FILE]...'),
2993 3000 )
2994 3001 def debugrename(ui, repo, *pats, **opts):
2995 3002 """dump rename information"""
2996 3003
2997 3004 opts = pycompat.byteskwargs(opts)
2998 3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2999 3006 m = scmutil.match(ctx, pats, opts)
3000 3007 for abs in ctx.walk(m):
3001 3008 fctx = ctx[abs]
3002 3009 o = fctx.filelog().renamed(fctx.filenode())
3003 3010 rel = repo.pathto(abs)
3004 3011 if o:
3005 3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3006 3013 else:
3007 3014 ui.write(_(b"%s not renamed\n") % rel)
3008 3015
3009 3016
3010 3017 @command(b'debugrequires|debugrequirements', [], b'')
3011 3018 def debugrequirements(ui, repo):
3012 3019 """print the current repo requirements"""
3013 3020 for r in sorted(repo.requirements):
3014 3021 ui.write(b"%s\n" % r)
3015 3022
3016 3023
3017 3024 @command(
3018 3025 b'debugrevlog',
3019 3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3020 3027 _(b'-c|-m|FILE'),
3021 3028 optionalrepo=True,
3022 3029 )
3023 3030 def debugrevlog(ui, repo, file_=None, **opts):
3024 3031 """show data and statistics about a revlog"""
3025 3032 opts = pycompat.byteskwargs(opts)
3026 3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3027 3034
3028 3035 if opts.get(b"dump"):
3029 3036 numrevs = len(r)
3030 3037 ui.write(
3031 3038 (
3032 3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3033 3040 b" rawsize totalsize compression heads chainlen\n"
3034 3041 )
3035 3042 )
3036 3043 ts = 0
3037 3044 heads = set()
3038 3045
3039 3046 for rev in pycompat.xrange(numrevs):
3040 3047 dbase = r.deltaparent(rev)
3041 3048 if dbase == -1:
3042 3049 dbase = rev
3043 3050 cbase = r.chainbase(rev)
3044 3051 clen = r.chainlen(rev)
3045 3052 p1, p2 = r.parentrevs(rev)
3046 3053 rs = r.rawsize(rev)
3047 3054 ts = ts + rs
3048 3055 heads -= set(r.parentrevs(rev))
3049 3056 heads.add(rev)
3050 3057 try:
3051 3058 compression = ts / r.end(rev)
3052 3059 except ZeroDivisionError:
3053 3060 compression = 0
3054 3061 ui.write(
3055 3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3056 3063 b"%11d %5d %8d\n"
3057 3064 % (
3058 3065 rev,
3059 3066 p1,
3060 3067 p2,
3061 3068 r.start(rev),
3062 3069 r.end(rev),
3063 3070 r.start(dbase),
3064 3071 r.start(cbase),
3065 3072 r.start(p1),
3066 3073 r.start(p2),
3067 3074 rs,
3068 3075 ts,
3069 3076 compression,
3070 3077 len(heads),
3071 3078 clen,
3072 3079 )
3073 3080 )
3074 3081 return 0
3075 3082
3076 3083 format = r._format_version
3077 3084 v = r._format_flags
3078 3085 flags = []
3079 3086 gdelta = False
3080 3087 if v & revlog.FLAG_INLINE_DATA:
3081 3088 flags.append(b'inline')
3082 3089 if v & revlog.FLAG_GENERALDELTA:
3083 3090 gdelta = True
3084 3091 flags.append(b'generaldelta')
3085 3092 if not flags:
3086 3093 flags = [b'(none)']
3087 3094
3088 3095 ### tracks merge vs single parent
3089 3096 nummerges = 0
3090 3097
3091 3098 ### tracks ways the "delta" are build
3092 3099 # nodelta
3093 3100 numempty = 0
3094 3101 numemptytext = 0
3095 3102 numemptydelta = 0
3096 3103 # full file content
3097 3104 numfull = 0
3098 3105 # intermediate snapshot against a prior snapshot
3099 3106 numsemi = 0
3100 3107 # snapshot count per depth
3101 3108 numsnapdepth = collections.defaultdict(lambda: 0)
3102 3109 # delta against previous revision
3103 3110 numprev = 0
3104 3111 # delta against first or second parent (not prev)
3105 3112 nump1 = 0
3106 3113 nump2 = 0
3107 3114 # delta against neither prev nor parents
3108 3115 numother = 0
3109 3116 # delta against prev that are also first or second parent
3110 3117 # (details of `numprev`)
3111 3118 nump1prev = 0
3112 3119 nump2prev = 0
3113 3120
3114 3121 # data about delta chain of each revs
3115 3122 chainlengths = []
3116 3123 chainbases = []
3117 3124 chainspans = []
3118 3125
3119 3126 # data about each revision
3120 3127 datasize = [None, 0, 0]
3121 3128 fullsize = [None, 0, 0]
3122 3129 semisize = [None, 0, 0]
3123 3130 # snapshot count per depth
3124 3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3125 3132 deltasize = [None, 0, 0]
3126 3133 chunktypecounts = {}
3127 3134 chunktypesizes = {}
3128 3135
3129 3136 def addsize(size, l):
3130 3137 if l[0] is None or size < l[0]:
3131 3138 l[0] = size
3132 3139 if size > l[1]:
3133 3140 l[1] = size
3134 3141 l[2] += size
3135 3142
3136 3143 numrevs = len(r)
3137 3144 for rev in pycompat.xrange(numrevs):
3138 3145 p1, p2 = r.parentrevs(rev)
3139 3146 delta = r.deltaparent(rev)
3140 3147 if format > 0:
3141 3148 addsize(r.rawsize(rev), datasize)
3142 3149 if p2 != nullrev:
3143 3150 nummerges += 1
3144 3151 size = r.length(rev)
3145 3152 if delta == nullrev:
3146 3153 chainlengths.append(0)
3147 3154 chainbases.append(r.start(rev))
3148 3155 chainspans.append(size)
3149 3156 if size == 0:
3150 3157 numempty += 1
3151 3158 numemptytext += 1
3152 3159 else:
3153 3160 numfull += 1
3154 3161 numsnapdepth[0] += 1
3155 3162 addsize(size, fullsize)
3156 3163 addsize(size, snapsizedepth[0])
3157 3164 else:
3158 3165 chainlengths.append(chainlengths[delta] + 1)
3159 3166 baseaddr = chainbases[delta]
3160 3167 revaddr = r.start(rev)
3161 3168 chainbases.append(baseaddr)
3162 3169 chainspans.append((revaddr - baseaddr) + size)
3163 3170 if size == 0:
3164 3171 numempty += 1
3165 3172 numemptydelta += 1
3166 3173 elif r.issnapshot(rev):
3167 3174 addsize(size, semisize)
3168 3175 numsemi += 1
3169 3176 depth = r.snapshotdepth(rev)
3170 3177 numsnapdepth[depth] += 1
3171 3178 addsize(size, snapsizedepth[depth])
3172 3179 else:
3173 3180 addsize(size, deltasize)
3174 3181 if delta == rev - 1:
3175 3182 numprev += 1
3176 3183 if delta == p1:
3177 3184 nump1prev += 1
3178 3185 elif delta == p2:
3179 3186 nump2prev += 1
3180 3187 elif delta == p1:
3181 3188 nump1 += 1
3182 3189 elif delta == p2:
3183 3190 nump2 += 1
3184 3191 elif delta != nullrev:
3185 3192 numother += 1
3186 3193
3187 3194 # Obtain data on the raw chunks in the revlog.
3188 3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3189 3196 segment = r._getsegmentforrevs(rev, rev)[1]
3190 3197 else:
3191 3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3192 3199 if segment:
3193 3200 chunktype = bytes(segment[0:1])
3194 3201 else:
3195 3202 chunktype = b'empty'
3196 3203
3197 3204 if chunktype not in chunktypecounts:
3198 3205 chunktypecounts[chunktype] = 0
3199 3206 chunktypesizes[chunktype] = 0
3200 3207
3201 3208 chunktypecounts[chunktype] += 1
3202 3209 chunktypesizes[chunktype] += size
3203 3210
3204 3211 # Adjust size min value for empty cases
3205 3212 for size in (datasize, fullsize, semisize, deltasize):
3206 3213 if size[0] is None:
3207 3214 size[0] = 0
3208 3215
3209 3216 numdeltas = numrevs - numfull - numempty - numsemi
3210 3217 numoprev = numprev - nump1prev - nump2prev
3211 3218 totalrawsize = datasize[2]
3212 3219 datasize[2] /= numrevs
3213 3220 fulltotal = fullsize[2]
3214 3221 if numfull == 0:
3215 3222 fullsize[2] = 0
3216 3223 else:
3217 3224 fullsize[2] /= numfull
3218 3225 semitotal = semisize[2]
3219 3226 snaptotal = {}
3220 3227 if numsemi > 0:
3221 3228 semisize[2] /= numsemi
3222 3229 for depth in snapsizedepth:
3223 3230 snaptotal[depth] = snapsizedepth[depth][2]
3224 3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3225 3232
3226 3233 deltatotal = deltasize[2]
3227 3234 if numdeltas > 0:
3228 3235 deltasize[2] /= numdeltas
3229 3236 totalsize = fulltotal + semitotal + deltatotal
3230 3237 avgchainlen = sum(chainlengths) / numrevs
3231 3238 maxchainlen = max(chainlengths)
3232 3239 maxchainspan = max(chainspans)
3233 3240 compratio = 1
3234 3241 if totalsize:
3235 3242 compratio = totalrawsize / totalsize
3236 3243
3237 3244 basedfmtstr = b'%%%dd\n'
3238 3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3239 3246
3240 3247 def dfmtstr(max):
3241 3248 return basedfmtstr % len(str(max))
3242 3249
3243 3250 def pcfmtstr(max, padding=0):
3244 3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3245 3252
3246 3253 def pcfmt(value, total):
3247 3254 if total:
3248 3255 return (value, 100 * float(value) / total)
3249 3256 else:
3250 3257 return value, 100.0
3251 3258
3252 3259 ui.writenoi18n(b'format : %d\n' % format)
3253 3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3254 3261
3255 3262 ui.write(b'\n')
3256 3263 fmt = pcfmtstr(totalsize)
3257 3264 fmt2 = dfmtstr(totalsize)
3258 3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3259 3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3260 3267 ui.writenoi18n(
3261 3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3262 3269 )
3263 3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3264 3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3265 3272 ui.writenoi18n(
3266 3273 b' text : '
3267 3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3268 3275 )
3269 3276 ui.writenoi18n(
3270 3277 b' delta : '
3271 3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3272 3279 )
3273 3280 ui.writenoi18n(
3274 3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3275 3282 )
3276 3283 for depth in sorted(numsnapdepth):
3277 3284 ui.write(
3278 3285 (b' lvl-%-3d : ' % depth)
3279 3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3280 3287 )
3281 3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3282 3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3283 3290 ui.writenoi18n(
3284 3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3285 3292 )
3286 3293 for depth in sorted(numsnapdepth):
3287 3294 ui.write(
3288 3295 (b' lvl-%-3d : ' % depth)
3289 3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3290 3297 )
3291 3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3292 3299
3293 3300 def fmtchunktype(chunktype):
3294 3301 if chunktype == b'empty':
3295 3302 return b' %s : ' % chunktype
3296 3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3297 3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3298 3305 else:
3299 3306 return b' 0x%s : ' % hex(chunktype)
3300 3307
3301 3308 ui.write(b'\n')
3302 3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3303 3310 for chunktype in sorted(chunktypecounts):
3304 3311 ui.write(fmtchunktype(chunktype))
3305 3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3306 3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3307 3314 for chunktype in sorted(chunktypecounts):
3308 3315 ui.write(fmtchunktype(chunktype))
3309 3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3310 3317
3311 3318 ui.write(b'\n')
3312 3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3313 3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3314 3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3315 3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3316 3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3317 3324
3318 3325 if format > 0:
3319 3326 ui.write(b'\n')
3320 3327 ui.writenoi18n(
3321 3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3322 3329 % tuple(datasize)
3323 3330 )
3324 3331 ui.writenoi18n(
3325 3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3326 3333 % tuple(fullsize)
3327 3334 )
3328 3335 ui.writenoi18n(
3329 3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3330 3337 % tuple(semisize)
3331 3338 )
3332 3339 for depth in sorted(snapsizedepth):
3333 3340 if depth == 0:
3334 3341 continue
3335 3342 ui.writenoi18n(
3336 3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3337 3344 % ((depth,) + tuple(snapsizedepth[depth]))
3338 3345 )
3339 3346 ui.writenoi18n(
3340 3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3341 3348 % tuple(deltasize)
3342 3349 )
3343 3350
3344 3351 if numdeltas > 0:
3345 3352 ui.write(b'\n')
3346 3353 fmt = pcfmtstr(numdeltas)
3347 3354 fmt2 = pcfmtstr(numdeltas, 4)
3348 3355 ui.writenoi18n(
3349 3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3350 3357 )
3351 3358 if numprev > 0:
3352 3359 ui.writenoi18n(
3353 3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3354 3361 )
3355 3362 ui.writenoi18n(
3356 3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3357 3364 )
3358 3365 ui.writenoi18n(
3359 3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3360 3367 )
3361 3368 if gdelta:
3362 3369 ui.writenoi18n(
3363 3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3364 3371 )
3365 3372 ui.writenoi18n(
3366 3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3367 3374 )
3368 3375 ui.writenoi18n(
3369 3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3370 3377 )
3371 3378
3372 3379
3373 3380 @command(
3374 3381 b'debugrevlogindex',
3375 3382 cmdutil.debugrevlogopts
3376 3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3377 3384 _(b'[-f FORMAT] -c|-m|FILE'),
3378 3385 optionalrepo=True,
3379 3386 )
3380 3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3381 3388 """dump the contents of a revlog index"""
3382 3389 opts = pycompat.byteskwargs(opts)
3383 3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3384 3391 format = opts.get(b'format', 0)
3385 3392 if format not in (0, 1):
3386 3393 raise error.Abort(_(b"unknown format %d") % format)
3387 3394
3388 3395 if ui.debugflag:
3389 3396 shortfn = hex
3390 3397 else:
3391 3398 shortfn = short
3392 3399
3393 3400 # There might not be anything in r, so have a sane default
3394 3401 idlen = 12
3395 3402 for i in r:
3396 3403 idlen = len(shortfn(r.node(i)))
3397 3404 break
3398 3405
3399 3406 if format == 0:
3400 3407 if ui.verbose:
3401 3408 ui.writenoi18n(
3402 3409 b" rev offset length linkrev %s %s p2\n"
3403 3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3404 3411 )
3405 3412 else:
3406 3413 ui.writenoi18n(
3407 3414 b" rev linkrev %s %s p2\n"
3408 3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3409 3416 )
3410 3417 elif format == 1:
3411 3418 if ui.verbose:
3412 3419 ui.writenoi18n(
3413 3420 (
3414 3421 b" rev flag offset length size link p1"
3415 3422 b" p2 %s\n"
3416 3423 )
3417 3424 % b"nodeid".rjust(idlen)
3418 3425 )
3419 3426 else:
3420 3427 ui.writenoi18n(
3421 3428 b" rev flag size link p1 p2 %s\n"
3422 3429 % b"nodeid".rjust(idlen)
3423 3430 )
3424 3431
3425 3432 for i in r:
3426 3433 node = r.node(i)
3427 3434 if format == 0:
3428 3435 try:
3429 3436 pp = r.parents(node)
3430 3437 except Exception:
3431 3438 pp = [repo.nullid, repo.nullid]
3432 3439 if ui.verbose:
3433 3440 ui.write(
3434 3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3435 3442 % (
3436 3443 i,
3437 3444 r.start(i),
3438 3445 r.length(i),
3439 3446 r.linkrev(i),
3440 3447 shortfn(node),
3441 3448 shortfn(pp[0]),
3442 3449 shortfn(pp[1]),
3443 3450 )
3444 3451 )
3445 3452 else:
3446 3453 ui.write(
3447 3454 b"% 6d % 7d %s %s %s\n"
3448 3455 % (
3449 3456 i,
3450 3457 r.linkrev(i),
3451 3458 shortfn(node),
3452 3459 shortfn(pp[0]),
3453 3460 shortfn(pp[1]),
3454 3461 )
3455 3462 )
3456 3463 elif format == 1:
3457 3464 pr = r.parentrevs(i)
3458 3465 if ui.verbose:
3459 3466 ui.write(
3460 3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3461 3468 % (
3462 3469 i,
3463 3470 r.flags(i),
3464 3471 r.start(i),
3465 3472 r.length(i),
3466 3473 r.rawsize(i),
3467 3474 r.linkrev(i),
3468 3475 pr[0],
3469 3476 pr[1],
3470 3477 shortfn(node),
3471 3478 )
3472 3479 )
3473 3480 else:
3474 3481 ui.write(
3475 3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3476 3483 % (
3477 3484 i,
3478 3485 r.flags(i),
3479 3486 r.rawsize(i),
3480 3487 r.linkrev(i),
3481 3488 pr[0],
3482 3489 pr[1],
3483 3490 shortfn(node),
3484 3491 )
3485 3492 )
3486 3493
3487 3494
3488 3495 @command(
3489 3496 b'debugrevspec',
3490 3497 [
3491 3498 (
3492 3499 b'',
3493 3500 b'optimize',
3494 3501 None,
3495 3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3496 3503 ),
3497 3504 (
3498 3505 b'',
3499 3506 b'show-revs',
3500 3507 True,
3501 3508 _(b'print list of result revisions (default)'),
3502 3509 ),
3503 3510 (
3504 3511 b's',
3505 3512 b'show-set',
3506 3513 None,
3507 3514 _(b'print internal representation of result set'),
3508 3515 ),
3509 3516 (
3510 3517 b'p',
3511 3518 b'show-stage',
3512 3519 [],
3513 3520 _(b'print parsed tree at the given stage'),
3514 3521 _(b'NAME'),
3515 3522 ),
3516 3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3517 3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3518 3525 ],
3519 3526 b'REVSPEC',
3520 3527 )
3521 3528 def debugrevspec(ui, repo, expr, **opts):
3522 3529 """parse and apply a revision specification
3523 3530
3524 3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3525 3532 Use -p all to print tree at every stage.
3526 3533
3527 3534 Use --no-show-revs option with -s or -p to print only the set
3528 3535 representation or the parsed tree respectively.
3529 3536
3530 3537 Use --verify-optimized to compare the optimized result with the unoptimized
3531 3538 one. Returns 1 if the optimized result differs.
3532 3539 """
3533 3540 opts = pycompat.byteskwargs(opts)
3534 3541 aliases = ui.configitems(b'revsetalias')
3535 3542 stages = [
3536 3543 (b'parsed', lambda tree: tree),
3537 3544 (
3538 3545 b'expanded',
3539 3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3540 3547 ),
3541 3548 (b'concatenated', revsetlang.foldconcat),
3542 3549 (b'analyzed', revsetlang.analyze),
3543 3550 (b'optimized', revsetlang.optimize),
3544 3551 ]
3545 3552 if opts[b'no_optimized']:
3546 3553 stages = stages[:-1]
3547 3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3548 3555 raise error.Abort(
3549 3556 _(b'cannot use --verify-optimized with --no-optimized')
3550 3557 )
3551 3558 stagenames = {n for n, f in stages}
3552 3559
3553 3560 showalways = set()
3554 3561 showchanged = set()
3555 3562 if ui.verbose and not opts[b'show_stage']:
3556 3563 # show parsed tree by --verbose (deprecated)
3557 3564 showalways.add(b'parsed')
3558 3565 showchanged.update([b'expanded', b'concatenated'])
3559 3566 if opts[b'optimize']:
3560 3567 showalways.add(b'optimized')
3561 3568 if opts[b'show_stage'] and opts[b'optimize']:
3562 3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3563 3570 if opts[b'show_stage'] == [b'all']:
3564 3571 showalways.update(stagenames)
3565 3572 else:
3566 3573 for n in opts[b'show_stage']:
3567 3574 if n not in stagenames:
3568 3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3569 3576 showalways.update(opts[b'show_stage'])
3570 3577
3571 3578 treebystage = {}
3572 3579 printedtree = None
3573 3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3574 3581 for n, f in stages:
3575 3582 treebystage[n] = tree = f(tree)
3576 3583 if n in showalways or (n in showchanged and tree != printedtree):
3577 3584 if opts[b'show_stage'] or n != b'parsed':
3578 3585 ui.write(b"* %s:\n" % n)
3579 3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3580 3587 printedtree = tree
3581 3588
3582 3589 if opts[b'verify_optimized']:
3583 3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3584 3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3585 3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3586 3593 ui.writenoi18n(
3587 3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3588 3595 )
3589 3596 ui.writenoi18n(
3590 3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3591 3598 )
3592 3599 arevs = list(arevs)
3593 3600 brevs = list(brevs)
3594 3601 if arevs == brevs:
3595 3602 return 0
3596 3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3597 3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3598 3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3599 3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3600 3607 if tag in ('delete', 'replace'):
3601 3608 for c in arevs[alo:ahi]:
3602 3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3603 3610 if tag in ('insert', 'replace'):
3604 3611 for c in brevs[blo:bhi]:
3605 3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3606 3613 if tag == 'equal':
3607 3614 for c in arevs[alo:ahi]:
3608 3615 ui.write(b' %d\n' % c)
3609 3616 return 1
3610 3617
3611 3618 func = revset.makematcher(tree)
3612 3619 revs = func(repo)
3613 3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3614 3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3615 3622 if not opts[b'show_revs']:
3616 3623 return
3617 3624 for c in revs:
3618 3625 ui.write(b"%d\n" % c)
3619 3626
3620 3627
3621 3628 @command(
3622 3629 b'debugserve',
3623 3630 [
3624 3631 (
3625 3632 b'',
3626 3633 b'sshstdio',
3627 3634 False,
3628 3635 _(b'run an SSH server bound to process handles'),
3629 3636 ),
3630 3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3631 3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3632 3639 ],
3633 3640 b'',
3634 3641 )
3635 3642 def debugserve(ui, repo, **opts):
3636 3643 """run a server with advanced settings
3637 3644
3638 3645 This command is similar to :hg:`serve`. It exists partially as a
3639 3646 workaround to the fact that ``hg serve --stdio`` must have specific
3640 3647 arguments for security reasons.
3641 3648 """
3642 3649 opts = pycompat.byteskwargs(opts)
3643 3650
3644 3651 if not opts[b'sshstdio']:
3645 3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3646 3653
3647 3654 logfh = None
3648 3655
3649 3656 if opts[b'logiofd'] and opts[b'logiofile']:
3650 3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3651 3658
3652 3659 if opts[b'logiofd']:
3653 3660 # Ideally we would be line buffered. But line buffering in binary
3654 3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3655 3662 # buffering could have performance impacts. But since this isn't
3656 3663 # performance critical code, it should be fine.
3657 3664 try:
3658 3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3659 3666 except OSError as e:
3660 3667 if e.errno != errno.ESPIPE:
3661 3668 raise
3662 3669 # can't seek a pipe, so `ab` mode fails on py3
3663 3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3664 3671 elif opts[b'logiofile']:
3665 3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3666 3673
3667 3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3668 3675 s.serve_forever()
3669 3676
3670 3677
3671 3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3672 3679 def debugsetparents(ui, repo, rev1, rev2=None):
3673 3680 """manually set the parents of the current working directory (DANGEROUS)
3674 3681
3675 3682 This command is not what you are looking for and should not be used. Using
3676 3683 this command will most certainly results in slight corruption of the file
3677 3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3678 3685
3679 3686 The command update the p1 and p2 field in the dirstate, and not touching
3680 3687 anything else. This useful for writing repository conversion tools, but
3681 3688 should be used with extreme care. For example, neither the working
3682 3689 directory nor the dirstate is updated, so file status may be incorrect
3683 3690 after running this command. Only used if you are one of the few people that
3684 3691 deeply unstand both conversion tools and file level histories. If you are
3685 3692 reading this help, you are not one of this people (most of them sailed west
3686 3693 from Mithlond anyway.
3687 3694
3688 3695 So one last time DO NOT USE THIS COMMAND.
3689 3696
3690 3697 Returns 0 on success.
3691 3698 """
3692 3699
3693 3700 node1 = scmutil.revsingle(repo, rev1).node()
3694 3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3695 3702
3696 3703 with repo.wlock():
3697 3704 repo.setparents(node1, node2)
3698 3705
3699 3706
3700 3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3701 3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3702 3709 """dump the side data for a cl/manifest/file revision
3703 3710
3704 3711 Use --verbose to dump the sidedata content."""
3705 3712 opts = pycompat.byteskwargs(opts)
3706 3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3707 3714 if rev is not None:
3708 3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3709 3716 file_, rev = None, file_
3710 3717 elif rev is None:
3711 3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3712 3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3713 3720 r = getattr(r, '_revlog', r)
3714 3721 try:
3715 3722 sidedata = r.sidedata(r.lookup(rev))
3716 3723 except KeyError:
3717 3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3718 3725 if sidedata:
3719 3726 sidedata = list(sidedata.items())
3720 3727 sidedata.sort()
3721 3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3722 3729 for key, value in sidedata:
3723 3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3724 3731 if ui.verbose:
3725 3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3726 3733
3727 3734
3728 3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3729 3736 def debugssl(ui, repo, source=None, **opts):
3730 3737 """test a secure connection to a server
3731 3738
3732 3739 This builds the certificate chain for the server on Windows, installing the
3733 3740 missing intermediates and trusted root via Windows Update if necessary. It
3734 3741 does nothing on other platforms.
3735 3742
3736 3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3737 3744 that server is used. See :hg:`help urls` for more information.
3738 3745
3739 3746 If the update succeeds, retry the original operation. Otherwise, the cause
3740 3747 of the SSL error is likely another issue.
3741 3748 """
3742 3749 if not pycompat.iswindows:
3743 3750 raise error.Abort(
3744 3751 _(b'certificate chain building is only possible on Windows')
3745 3752 )
3746 3753
3747 3754 if not source:
3748 3755 if not repo:
3749 3756 raise error.Abort(
3750 3757 _(
3751 3758 b"there is no Mercurial repository here, and no "
3752 3759 b"server specified"
3753 3760 )
3754 3761 )
3755 3762 source = b"default"
3756 3763
3757 3764 source, branches = urlutil.get_unique_pull_path(
3758 3765 b'debugssl', repo, ui, source
3759 3766 )
3760 3767 url = urlutil.url(source)
3761 3768
3762 3769 defaultport = {b'https': 443, b'ssh': 22}
3763 3770 if url.scheme in defaultport:
3764 3771 try:
3765 3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3766 3773 except ValueError:
3767 3774 raise error.Abort(_(b"malformed port number in URL"))
3768 3775 else:
3769 3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3770 3777
3771 3778 from . import win32
3772 3779
3773 3780 s = ssl.wrap_socket(
3774 3781 socket.socket(),
3775 3782 ssl_version=ssl.PROTOCOL_TLS,
3776 3783 cert_reqs=ssl.CERT_NONE,
3777 3784 ca_certs=None,
3778 3785 )
3779 3786
3780 3787 try:
3781 3788 s.connect(addr)
3782 3789 cert = s.getpeercert(True)
3783 3790
3784 3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3785 3792
3786 3793 complete = win32.checkcertificatechain(cert, build=False)
3787 3794
3788 3795 if not complete:
3789 3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3790 3797
3791 3798 if not win32.checkcertificatechain(cert):
3792 3799 ui.status(_(b'failed.\n'))
3793 3800 else:
3794 3801 ui.status(_(b'done.\n'))
3795 3802 else:
3796 3803 ui.status(_(b'full certificate chain is available\n'))
3797 3804 finally:
3798 3805 s.close()
3799 3806
3800 3807
3801 3808 @command(
3802 3809 b"debugbackupbundle",
3803 3810 [
3804 3811 (
3805 3812 b"",
3806 3813 b"recover",
3807 3814 b"",
3808 3815 b"brings the specified changeset back into the repository",
3809 3816 )
3810 3817 ]
3811 3818 + cmdutil.logopts,
3812 3819 _(b"hg debugbackupbundle [--recover HASH]"),
3813 3820 )
3814 3821 def debugbackupbundle(ui, repo, *pats, **opts):
3815 3822 """lists the changesets available in backup bundles
3816 3823
3817 3824 Without any arguments, this command prints a list of the changesets in each
3818 3825 backup bundle.
3819 3826
3820 3827 --recover takes a changeset hash and unbundles the first bundle that
3821 3828 contains that hash, which puts that changeset back in your repository.
3822 3829
3823 3830 --verbose will print the entire commit message and the bundle path for that
3824 3831 backup.
3825 3832 """
3826 3833 backups = list(
3827 3834 filter(
3828 3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3829 3836 )
3830 3837 )
3831 3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3832 3839
3833 3840 opts = pycompat.byteskwargs(opts)
3834 3841 opts[b"bundle"] = b""
3835 3842 opts[b"force"] = None
3836 3843 limit = logcmdutil.getlimit(opts)
3837 3844
3838 3845 def display(other, chlist, displayer):
3839 3846 if opts.get(b"newest_first"):
3840 3847 chlist.reverse()
3841 3848 count = 0
3842 3849 for n in chlist:
3843 3850 if limit is not None and count >= limit:
3844 3851 break
3845 3852 parents = [
3846 3853 True for p in other.changelog.parents(n) if p != repo.nullid
3847 3854 ]
3848 3855 if opts.get(b"no_merges") and len(parents) == 2:
3849 3856 continue
3850 3857 count += 1
3851 3858 displayer.show(other[n])
3852 3859
3853 3860 recovernode = opts.get(b"recover")
3854 3861 if recovernode:
3855 3862 if scmutil.isrevsymbol(repo, recovernode):
3856 3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3857 3864 return
3858 3865 elif backups:
3859 3866 msg = _(
3860 3867 b"Recover changesets using: hg debugbackupbundle --recover "
3861 3868 b"<changeset hash>\n\nAvailable backup changesets:"
3862 3869 )
3863 3870 ui.status(msg, label=b"status.removed")
3864 3871 else:
3865 3872 ui.status(_(b"no backup changesets found\n"))
3866 3873 return
3867 3874
3868 3875 for backup in backups:
3869 3876 # Much of this is copied from the hg incoming logic
3870 3877 source = os.path.relpath(backup, encoding.getcwd())
3871 3878 source, branches = urlutil.get_unique_pull_path(
3872 3879 b'debugbackupbundle',
3873 3880 repo,
3874 3881 ui,
3875 3882 source,
3876 3883 default_branches=opts.get(b'branch'),
3877 3884 )
3878 3885 try:
3879 3886 other = hg.peer(repo, opts, source)
3880 3887 except error.LookupError as ex:
3881 3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3882 3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3883 3890 ui.warn(msg, hint=hint)
3884 3891 continue
3885 3892 revs, checkout = hg.addbranchrevs(
3886 3893 repo, other, branches, opts.get(b"rev")
3887 3894 )
3888 3895
3889 3896 if revs:
3890 3897 revs = [other.lookup(rev) for rev in revs]
3891 3898
3892 3899 with ui.silent():
3893 3900 try:
3894 3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3895 3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3896 3903 )
3897 3904 except error.LookupError:
3898 3905 continue
3899 3906
3900 3907 try:
3901 3908 if not chlist:
3902 3909 continue
3903 3910 if recovernode:
3904 3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3905 3912 if scmutil.isrevsymbol(other, recovernode):
3906 3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3907 3914 f = hg.openpath(ui, source)
3908 3915 gen = exchange.readbundle(ui, f, source)
3909 3916 if isinstance(gen, bundle2.unbundle20):
3910 3917 bundle2.applybundle(
3911 3918 repo,
3912 3919 gen,
3913 3920 tr,
3914 3921 source=b"unbundle",
3915 3922 url=b"bundle:" + source,
3916 3923 )
3917 3924 else:
3918 3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3919 3926 break
3920 3927 else:
3921 3928 backupdate = encoding.strtolocal(
3922 3929 time.strftime(
3923 3930 "%a %H:%M, %Y-%m-%d",
3924 3931 time.localtime(os.path.getmtime(source)),
3925 3932 )
3926 3933 )
3927 3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3928 3935 if ui.verbose:
3929 3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3930 3937 else:
3931 3938 opts[
3932 3939 b"template"
3933 3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3934 3941 displayer = logcmdutil.changesetdisplayer(
3935 3942 ui, other, opts, False
3936 3943 )
3937 3944 display(other, chlist, displayer)
3938 3945 displayer.close()
3939 3946 finally:
3940 3947 cleanupfn()
3941 3948
3942 3949
3943 3950 @command(
3944 3951 b'debugsub',
3945 3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3946 3953 _(b'[-r REV] [REV]'),
3947 3954 )
3948 3955 def debugsub(ui, repo, rev=None):
3949 3956 ctx = scmutil.revsingle(repo, rev, None)
3950 3957 for k, v in sorted(ctx.substate.items()):
3951 3958 ui.writenoi18n(b'path %s\n' % k)
3952 3959 ui.writenoi18n(b' source %s\n' % v[0])
3953 3960 ui.writenoi18n(b' revision %s\n' % v[1])
3954 3961
3955 3962
3956 3963 @command(b'debugshell', optionalrepo=True)
3957 3964 def debugshell(ui, repo):
3958 3965 """run an interactive Python interpreter
3959 3966
3960 3967 The local namespace is provided with a reference to the ui and
3961 3968 the repo instance (if available).
3962 3969 """
3963 3970 import code
3964 3971
3965 3972 imported_objects = {
3966 3973 'ui': ui,
3967 3974 'repo': repo,
3968 3975 }
3969 3976
3970 3977 code.interact(local=imported_objects)
3971 3978
3972 3979
3973 3980 @command(
3974 3981 b'debugsuccessorssets',
3975 3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3976 3983 _(b'[REV]'),
3977 3984 )
3978 3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3979 3986 """show set of successors for revision
3980 3987
3981 3988 A successors set of changeset A is a consistent group of revisions that
3982 3989 succeed A. It contains non-obsolete changesets only unless closests
3983 3990 successors set is set.
3984 3991
3985 3992 In most cases a changeset A has a single successors set containing a single
3986 3993 successor (changeset A replaced by A').
3987 3994
3988 3995 A changeset that is made obsolete with no successors are called "pruned".
3989 3996 Such changesets have no successors sets at all.
3990 3997
3991 3998 A changeset that has been "split" will have a successors set containing
3992 3999 more than one successor.
3993 4000
3994 4001 A changeset that has been rewritten in multiple different ways is called
3995 4002 "divergent". Such changesets have multiple successor sets (each of which
3996 4003 may also be split, i.e. have multiple successors).
3997 4004
3998 4005 Results are displayed as follows::
3999 4006
4000 4007 <rev1>
4001 4008 <successors-1A>
4002 4009 <rev2>
4003 4010 <successors-2A>
4004 4011 <successors-2B1> <successors-2B2> <successors-2B3>
4005 4012
4006 4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
4007 4014 holds one element, whereas the second holds three (i.e. the changeset has
4008 4015 been split).
4009 4016 """
4010 4017 # passed to successorssets caching computation from one call to another
4011 4018 cache = {}
4012 4019 ctx2str = bytes
4013 4020 node2str = short
4014 4021 for rev in logcmdutil.revrange(repo, revs):
4015 4022 ctx = repo[rev]
4016 4023 ui.write(b'%s\n' % ctx2str(ctx))
4017 4024 for succsset in obsutil.successorssets(
4018 4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4019 4026 ):
4020 4027 if succsset:
4021 4028 ui.write(b' ')
4022 4029 ui.write(node2str(succsset[0]))
4023 4030 for node in succsset[1:]:
4024 4031 ui.write(b' ')
4025 4032 ui.write(node2str(node))
4026 4033 ui.write(b'\n')
4027 4034
4028 4035
4029 4036 @command(b'debugtagscache', [])
4030 4037 def debugtagscache(ui, repo):
4031 4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4032 4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4033 4040 flog = repo.file(b'.hgtags')
4034 4041 for r in repo:
4035 4042 node = repo[r].node()
4036 4043 tagsnode = cache.getfnode(node, computemissing=False)
4037 4044 if tagsnode:
4038 4045 tagsnodedisplay = hex(tagsnode)
4039 4046 if not flog.hasnode(tagsnode):
4040 4047 tagsnodedisplay += b' (unknown node)'
4041 4048 elif tagsnode is None:
4042 4049 tagsnodedisplay = b'missing'
4043 4050 else:
4044 4051 tagsnodedisplay = b'invalid'
4045 4052
4046 4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4047 4054
4048 4055
4049 4056 @command(
4050 4057 b'debugtemplate',
4051 4058 [
4052 4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4053 4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4054 4061 ],
4055 4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4056 4063 optionalrepo=True,
4057 4064 )
4058 4065 def debugtemplate(ui, repo, tmpl, **opts):
4059 4066 """parse and apply a template
4060 4067
4061 4068 If -r/--rev is given, the template is processed as a log template and
4062 4069 applied to the given changesets. Otherwise, it is processed as a generic
4063 4070 template.
4064 4071
4065 4072 Use --verbose to print the parsed tree.
4066 4073 """
4067 4074 revs = None
4068 4075 if opts['rev']:
4069 4076 if repo is None:
4070 4077 raise error.RepoError(
4071 4078 _(b'there is no Mercurial repository here (.hg not found)')
4072 4079 )
4073 4080 revs = logcmdutil.revrange(repo, opts['rev'])
4074 4081
4075 4082 props = {}
4076 4083 for d in opts['define']:
4077 4084 try:
4078 4085 k, v = (e.strip() for e in d.split(b'=', 1))
4079 4086 if not k or k == b'ui':
4080 4087 raise ValueError
4081 4088 props[k] = v
4082 4089 except ValueError:
4083 4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4084 4091
4085 4092 if ui.verbose:
4086 4093 aliases = ui.configitems(b'templatealias')
4087 4094 tree = templater.parse(tmpl)
4088 4095 ui.note(templater.prettyformat(tree), b'\n')
4089 4096 newtree = templater.expandaliases(tree, aliases)
4090 4097 if newtree != tree:
4091 4098 ui.notenoi18n(
4092 4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4093 4100 )
4094 4101
4095 4102 if revs is None:
4096 4103 tres = formatter.templateresources(ui, repo)
4097 4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4098 4105 if ui.verbose:
4099 4106 kwds, funcs = t.symbolsuseddefault()
4100 4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4101 4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4102 4109 ui.write(t.renderdefault(props))
4103 4110 else:
4104 4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4105 4112 if ui.verbose:
4106 4113 kwds, funcs = displayer.t.symbolsuseddefault()
4107 4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4108 4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4109 4116 for r in revs:
4110 4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4111 4118 displayer.close()
4112 4119
4113 4120
4114 4121 @command(
4115 4122 b'debuguigetpass',
4116 4123 [
4117 4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4118 4125 ],
4119 4126 _(b'[-p TEXT]'),
4120 4127 norepo=True,
4121 4128 )
4122 4129 def debuguigetpass(ui, prompt=b''):
4123 4130 """show prompt to type password"""
4124 4131 r = ui.getpass(prompt)
4125 4132 if r is None:
4126 4133 r = b"<default response>"
4127 4134 ui.writenoi18n(b'response: %s\n' % r)
4128 4135
4129 4136
4130 4137 @command(
4131 4138 b'debuguiprompt',
4132 4139 [
4133 4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4134 4141 ],
4135 4142 _(b'[-p TEXT]'),
4136 4143 norepo=True,
4137 4144 )
4138 4145 def debuguiprompt(ui, prompt=b''):
4139 4146 """show plain prompt"""
4140 4147 r = ui.prompt(prompt)
4141 4148 ui.writenoi18n(b'response: %s\n' % r)
4142 4149
4143 4150
4144 4151 @command(b'debugupdatecaches', [])
4145 4152 def debugupdatecaches(ui, repo, *pats, **opts):
4146 4153 """warm all known caches in the repository"""
4147 4154 with repo.wlock(), repo.lock():
4148 4155 repo.updatecaches(caches=repository.CACHES_ALL)
4149 4156
4150 4157
4151 4158 @command(
4152 4159 b'debugupgraderepo',
4153 4160 [
4154 4161 (
4155 4162 b'o',
4156 4163 b'optimize',
4157 4164 [],
4158 4165 _(b'extra optimization to perform'),
4159 4166 _(b'NAME'),
4160 4167 ),
4161 4168 (b'', b'run', False, _(b'performs an upgrade')),
4162 4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4163 4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4164 4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4165 4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4166 4173 ],
4167 4174 )
4168 4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4169 4176 """upgrade a repository to use different features
4170 4177
4171 4178 If no arguments are specified, the repository is evaluated for upgrade
4172 4179 and a list of problems and potential optimizations is printed.
4173 4180
4174 4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4175 4182 can be influenced via additional arguments. More details will be provided
4176 4183 by the command output when run without ``--run``.
4177 4184
4178 4185 During the upgrade, the repository will be locked and no writes will be
4179 4186 allowed.
4180 4187
4181 4188 At the end of the upgrade, the repository may not be readable while new
4182 4189 repository data is swapped in. This window will be as long as it takes to
4183 4190 rename some directories inside the ``.hg`` directory. On most machines, this
4184 4191 should complete almost instantaneously and the chances of a consumer being
4185 4192 unable to access the repository should be low.
4186 4193
4187 4194 By default, all revlogs will be upgraded. You can restrict this using flags
4188 4195 such as `--manifest`:
4189 4196
4190 4197 * `--manifest`: only optimize the manifest
4191 4198 * `--no-manifest`: optimize all revlog but the manifest
4192 4199 * `--changelog`: optimize the changelog only
4193 4200 * `--no-changelog --no-manifest`: optimize filelogs only
4194 4201 * `--filelogs`: optimize the filelogs only
4195 4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4196 4203 """
4197 4204 return upgrade.upgraderepo(
4198 4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4199 4206 )
4200 4207
4201 4208
4202 4209 @command(
4203 4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4204 4211 )
4205 4212 def debugwalk(ui, repo, *pats, **opts):
4206 4213 """show how files match on given patterns"""
4207 4214 opts = pycompat.byteskwargs(opts)
4208 4215 m = scmutil.match(repo[None], pats, opts)
4209 4216 if ui.verbose:
4210 4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4211 4218 items = list(repo[None].walk(m))
4212 4219 if not items:
4213 4220 return
4214 4221 f = lambda fn: fn
4215 4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4216 4223 f = lambda fn: util.normpath(fn)
4217 4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4218 4225 max([len(abs) for abs in items]),
4219 4226 max([len(repo.pathto(abs)) for abs in items]),
4220 4227 )
4221 4228 for abs in items:
4222 4229 line = fmt % (
4223 4230 abs,
4224 4231 f(repo.pathto(abs)),
4225 4232 m.exact(abs) and b'exact' or b'',
4226 4233 )
4227 4234 ui.write(b"%s\n" % line.rstrip())
4228 4235
4229 4236
4230 4237 @command(b'debugwhyunstable', [], _(b'REV'))
4231 4238 def debugwhyunstable(ui, repo, rev):
4232 4239 """explain instabilities of a changeset"""
4233 4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4234 4241 dnodes = b''
4235 4242 if entry.get(b'divergentnodes'):
4236 4243 dnodes = (
4237 4244 b' '.join(
4238 4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4239 4246 for ctx in entry[b'divergentnodes']
4240 4247 )
4241 4248 + b' '
4242 4249 )
4243 4250 ui.write(
4244 4251 b'%s: %s%s %s\n'
4245 4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4246 4253 )
4247 4254
4248 4255
4249 4256 @command(
4250 4257 b'debugwireargs',
4251 4258 [
4252 4259 (b'', b'three', b'', b'three'),
4253 4260 (b'', b'four', b'', b'four'),
4254 4261 (b'', b'five', b'', b'five'),
4255 4262 ]
4256 4263 + cmdutil.remoteopts,
4257 4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4258 4265 norepo=True,
4259 4266 )
4260 4267 def debugwireargs(ui, repopath, *vals, **opts):
4261 4268 opts = pycompat.byteskwargs(opts)
4262 4269 repo = hg.peer(ui, opts, repopath)
4263 4270 try:
4264 4271 for opt in cmdutil.remoteopts:
4265 4272 del opts[opt[1]]
4266 4273 args = {}
4267 4274 for k, v in pycompat.iteritems(opts):
4268 4275 if v:
4269 4276 args[k] = v
4270 4277 args = pycompat.strkwargs(args)
4271 4278 # run twice to check that we don't mess up the stream for the next command
4272 4279 res1 = repo.debugwireargs(*vals, **args)
4273 4280 res2 = repo.debugwireargs(*vals, **args)
4274 4281 ui.write(b"%s\n" % res1)
4275 4282 if res1 != res2:
4276 4283 ui.warn(b"%s\n" % res2)
4277 4284 finally:
4278 4285 repo.close()
4279 4286
4280 4287
4281 4288 def _parsewirelangblocks(fh):
4282 4289 activeaction = None
4283 4290 blocklines = []
4284 4291 lastindent = 0
4285 4292
4286 4293 for line in fh:
4287 4294 line = line.rstrip()
4288 4295 if not line:
4289 4296 continue
4290 4297
4291 4298 if line.startswith(b'#'):
4292 4299 continue
4293 4300
4294 4301 if not line.startswith(b' '):
4295 4302 # New block. Flush previous one.
4296 4303 if activeaction:
4297 4304 yield activeaction, blocklines
4298 4305
4299 4306 activeaction = line
4300 4307 blocklines = []
4301 4308 lastindent = 0
4302 4309 continue
4303 4310
4304 4311 # Else we start with an indent.
4305 4312
4306 4313 if not activeaction:
4307 4314 raise error.Abort(_(b'indented line outside of block'))
4308 4315
4309 4316 indent = len(line) - len(line.lstrip())
4310 4317
4311 4318 # If this line is indented more than the last line, concatenate it.
4312 4319 if indent > lastindent and blocklines:
4313 4320 blocklines[-1] += line.lstrip()
4314 4321 else:
4315 4322 blocklines.append(line)
4316 4323 lastindent = indent
4317 4324
4318 4325 # Flush last block.
4319 4326 if activeaction:
4320 4327 yield activeaction, blocklines
4321 4328
4322 4329
4323 4330 @command(
4324 4331 b'debugwireproto',
4325 4332 [
4326 4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4327 4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4328 4335 (
4329 4336 b'',
4330 4337 b'noreadstderr',
4331 4338 False,
4332 4339 _(b'do not read from stderr of the remote'),
4333 4340 ),
4334 4341 (
4335 4342 b'',
4336 4343 b'nologhandshake',
4337 4344 False,
4338 4345 _(b'do not log I/O related to the peer handshake'),
4339 4346 ),
4340 4347 ]
4341 4348 + cmdutil.remoteopts,
4342 4349 _(b'[PATH]'),
4343 4350 optionalrepo=True,
4344 4351 )
4345 4352 def debugwireproto(ui, repo, path=None, **opts):
4346 4353 """send wire protocol commands to a server
4347 4354
4348 4355 This command can be used to issue wire protocol commands to remote
4349 4356 peers and to debug the raw data being exchanged.
4350 4357
4351 4358 ``--localssh`` will start an SSH server against the current repository
4352 4359 and connect to that. By default, the connection will perform a handshake
4353 4360 and establish an appropriate peer instance.
4354 4361
4355 4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4356 4363 peer instance using the specified class type. Valid values are ``raw``,
4357 4364 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4358 4365 don't support higher-level command actions.
4359 4366
4360 4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4361 4368 of the peer (for SSH connections only). Disabling automatic reading of
4362 4369 stderr is useful for making output more deterministic.
4363 4370
4364 4371 Commands are issued via a mini language which is specified via stdin.
4365 4372 The language consists of individual actions to perform. An action is
4366 4373 defined by a block. A block is defined as a line with no leading
4367 4374 space followed by 0 or more lines with leading space. Blocks are
4368 4375 effectively a high-level command with additional metadata.
4369 4376
4370 4377 Lines beginning with ``#`` are ignored.
4371 4378
4372 4379 The following sections denote available actions.
4373 4380
4374 4381 raw
4375 4382 ---
4376 4383
4377 4384 Send raw data to the server.
4378 4385
4379 4386 The block payload contains the raw data to send as one atomic send
4380 4387 operation. The data may not actually be delivered in a single system
4381 4388 call: it depends on the abilities of the transport being used.
4382 4389
4383 4390 Each line in the block is de-indented and concatenated. Then, that
4384 4391 value is evaluated as a Python b'' literal. This allows the use of
4385 4392 backslash escaping, etc.
4386 4393
4387 4394 raw+
4388 4395 ----
4389 4396
4390 4397 Behaves like ``raw`` except flushes output afterwards.
4391 4398
4392 4399 command <X>
4393 4400 -----------
4394 4401
4395 4402 Send a request to run a named command, whose name follows the ``command``
4396 4403 string.
4397 4404
4398 4405 Arguments to the command are defined as lines in this block. The format of
4399 4406 each line is ``<key> <value>``. e.g.::
4400 4407
4401 4408 command listkeys
4402 4409 namespace bookmarks
4403 4410
4404 4411 If the value begins with ``eval:``, it will be interpreted as a Python
4405 4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4406 4413 This allows sending complex types and encoding special byte sequences via
4407 4414 backslash escaping.
4408 4415
4409 4416 The following arguments have special meaning:
4410 4417
4411 4418 ``PUSHFILE``
4412 4419 When defined, the *push* mechanism of the peer will be used instead
4413 4420 of the static request-response mechanism and the content of the
4414 4421 file specified in the value of this argument will be sent as the
4415 4422 command payload.
4416 4423
4417 4424 This can be used to submit a local bundle file to the remote.
4418 4425
4419 4426 batchbegin
4420 4427 ----------
4421 4428
4422 4429 Instruct the peer to begin a batched send.
4423 4430
4424 4431 All ``command`` blocks are queued for execution until the next
4425 4432 ``batchsubmit`` block.
4426 4433
4427 4434 batchsubmit
4428 4435 -----------
4429 4436
4430 4437 Submit previously queued ``command`` blocks as a batch request.
4431 4438
4432 4439 This action MUST be paired with a ``batchbegin`` action.
4433 4440
4434 4441 httprequest <method> <path>
4435 4442 ---------------------------
4436 4443
4437 4444 (HTTP peer only)
4438 4445
4439 4446 Send an HTTP request to the peer.
4440 4447
4441 4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4442 4449
4443 4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4444 4451 headers to add to the request. e.g. ``Accept: foo``.
4445 4452
4446 4453 The following arguments are special:
4447 4454
4448 4455 ``BODYFILE``
4449 4456 The content of the file defined as the value to this argument will be
4450 4457 transferred verbatim as the HTTP request body.
4451 4458
4452 4459 ``frame <type> <flags> <payload>``
4453 4460 Send a unified protocol frame as part of the request body.
4454 4461
4455 4462 All frames will be collected and sent as the body to the HTTP
4456 4463 request.
4457 4464
4458 4465 close
4459 4466 -----
4460 4467
4461 4468 Close the connection to the server.
4462 4469
4463 4470 flush
4464 4471 -----
4465 4472
4466 4473 Flush data written to the server.
4467 4474
4468 4475 readavailable
4469 4476 -------------
4470 4477
4471 4478 Close the write end of the connection and read all available data from
4472 4479 the server.
4473 4480
4474 4481 If the connection to the server encompasses multiple pipes, we poll both
4475 4482 pipes and read available data.
4476 4483
4477 4484 readline
4478 4485 --------
4479 4486
4480 4487 Read a line of output from the server. If there are multiple output
4481 4488 pipes, reads only the main pipe.
4482 4489
4483 4490 ereadline
4484 4491 ---------
4485 4492
4486 4493 Like ``readline``, but read from the stderr pipe, if available.
4487 4494
4488 4495 read <X>
4489 4496 --------
4490 4497
4491 4498 ``read()`` N bytes from the server's main output pipe.
4492 4499
4493 4500 eread <X>
4494 4501 ---------
4495 4502
4496 4503 ``read()`` N bytes from the server's stderr pipe, if available.
4497 4504
4498 4505 Specifying Unified Frame-Based Protocol Frames
4499 4506 ----------------------------------------------
4500 4507
4501 4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4502 4509 syntax.
4503 4510
4504 4511 A frame is composed as a type, flags, and payload. These can be parsed
4505 4512 from a string of the form:
4506 4513
4507 4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4508 4515
4509 4516 ``request-id`` and ``stream-id`` are integers defining the request and
4510 4517 stream identifiers.
4511 4518
4512 4519 ``type`` can be an integer value for the frame type or the string name
4513 4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4514 4521 ``command-name``.
4515 4522
4516 4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4517 4524 components. Each component (and there can be just one) can be an integer
4518 4525 or a flag name for stream flags or frame flags, respectively. Values are
4519 4526 resolved to integers and then bitwise OR'd together.
4520 4527
4521 4528 ``payload`` represents the raw frame payload. If it begins with
4522 4529 ``cbor:``, the following string is evaluated as Python code and the
4523 4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4524 4531 as a Python byte string literal.
4525 4532 """
4526 4533 opts = pycompat.byteskwargs(opts)
4527 4534
4528 4535 if opts[b'localssh'] and not repo:
4529 4536 raise error.Abort(_(b'--localssh requires a repository'))
4530 4537
4531 4538 if opts[b'peer'] and opts[b'peer'] not in (
4532 4539 b'raw',
4533 4540 b'ssh1',
4534 4541 ):
4535 4542 raise error.Abort(
4536 4543 _(b'invalid value for --peer'),
4537 4544 hint=_(b'valid values are "raw" and "ssh1"'),
4538 4545 )
4539 4546
4540 4547 if path and opts[b'localssh']:
4541 4548 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4542 4549
4543 4550 if ui.interactive():
4544 4551 ui.write(_(b'(waiting for commands on stdin)\n'))
4545 4552
4546 4553 blocks = list(_parsewirelangblocks(ui.fin))
4547 4554
4548 4555 proc = None
4549 4556 stdin = None
4550 4557 stdout = None
4551 4558 stderr = None
4552 4559 opener = None
4553 4560
4554 4561 if opts[b'localssh']:
4555 4562 # We start the SSH server in its own process so there is process
4556 4563 # separation. This prevents a whole class of potential bugs around
4557 4564 # shared state from interfering with server operation.
4558 4565 args = procutil.hgcmd() + [
4559 4566 b'-R',
4560 4567 repo.root,
4561 4568 b'debugserve',
4562 4569 b'--sshstdio',
4563 4570 ]
4564 4571 proc = subprocess.Popen(
4565 4572 pycompat.rapply(procutil.tonativestr, args),
4566 4573 stdin=subprocess.PIPE,
4567 4574 stdout=subprocess.PIPE,
4568 4575 stderr=subprocess.PIPE,
4569 4576 bufsize=0,
4570 4577 )
4571 4578
4572 4579 stdin = proc.stdin
4573 4580 stdout = proc.stdout
4574 4581 stderr = proc.stderr
4575 4582
4576 4583 # We turn the pipes into observers so we can log I/O.
4577 4584 if ui.verbose or opts[b'peer'] == b'raw':
4578 4585 stdin = util.makeloggingfileobject(
4579 4586 ui, proc.stdin, b'i', logdata=True
4580 4587 )
4581 4588 stdout = util.makeloggingfileobject(
4582 4589 ui, proc.stdout, b'o', logdata=True
4583 4590 )
4584 4591 stderr = util.makeloggingfileobject(
4585 4592 ui, proc.stderr, b'e', logdata=True
4586 4593 )
4587 4594
4588 4595 # --localssh also implies the peer connection settings.
4589 4596
4590 4597 url = b'ssh://localserver'
4591 4598 autoreadstderr = not opts[b'noreadstderr']
4592 4599
4593 4600 if opts[b'peer'] == b'ssh1':
4594 4601 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4595 4602 peer = sshpeer.sshv1peer(
4596 4603 ui,
4597 4604 url,
4598 4605 proc,
4599 4606 stdin,
4600 4607 stdout,
4601 4608 stderr,
4602 4609 None,
4603 4610 autoreadstderr=autoreadstderr,
4604 4611 )
4605 4612 elif opts[b'peer'] == b'raw':
4606 4613 ui.write(_(b'using raw connection to peer\n'))
4607 4614 peer = None
4608 4615 else:
4609 4616 ui.write(_(b'creating ssh peer from handshake results\n'))
4610 4617 peer = sshpeer.makepeer(
4611 4618 ui,
4612 4619 url,
4613 4620 proc,
4614 4621 stdin,
4615 4622 stdout,
4616 4623 stderr,
4617 4624 autoreadstderr=autoreadstderr,
4618 4625 )
4619 4626
4620 4627 elif path:
4621 4628 # We bypass hg.peer() so we can proxy the sockets.
4622 4629 # TODO consider not doing this because we skip
4623 4630 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4624 4631 u = urlutil.url(path)
4625 4632 if u.scheme != b'http':
4626 4633 raise error.Abort(_(b'only http:// paths are currently supported'))
4627 4634
4628 4635 url, authinfo = u.authinfo()
4629 4636 openerargs = {
4630 4637 'useragent': b'Mercurial debugwireproto',
4631 4638 }
4632 4639
4633 4640 # Turn pipes/sockets into observers so we can log I/O.
4634 4641 if ui.verbose:
4635 4642 openerargs.update(
4636 4643 {
4637 4644 'loggingfh': ui,
4638 4645 'loggingname': b's',
4639 4646 'loggingopts': {
4640 4647 'logdata': True,
4641 4648 'logdataapis': False,
4642 4649 },
4643 4650 }
4644 4651 )
4645 4652
4646 4653 if ui.debugflag:
4647 4654 openerargs['loggingopts']['logdataapis'] = True
4648 4655
4649 4656 # Don't send default headers when in raw mode. This allows us to
4650 4657 # bypass most of the behavior of our URL handling code so we can
4651 4658 # have near complete control over what's sent on the wire.
4652 4659 if opts[b'peer'] == b'raw':
4653 4660 openerargs['sendaccept'] = False
4654 4661
4655 4662 opener = urlmod.opener(ui, authinfo, **openerargs)
4656 4663
4657 4664 if opts[b'peer'] == b'raw':
4658 4665 ui.write(_(b'using raw connection to peer\n'))
4659 4666 peer = None
4660 4667 elif opts[b'peer']:
4661 4668 raise error.Abort(
4662 4669 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4663 4670 )
4664 4671 else:
4665 4672 peer = httppeer.makepeer(ui, path, opener=opener)
4666 4673
4667 4674 # We /could/ populate stdin/stdout with sock.makefile()...
4668 4675 else:
4669 4676 raise error.Abort(_(b'unsupported connection configuration'))
4670 4677
4671 4678 batchedcommands = None
4672 4679
4673 4680 # Now perform actions based on the parsed wire language instructions.
4674 4681 for action, lines in blocks:
4675 4682 if action in (b'raw', b'raw+'):
4676 4683 if not stdin:
4677 4684 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4678 4685
4679 4686 # Concatenate the data together.
4680 4687 data = b''.join(l.lstrip() for l in lines)
4681 4688 data = stringutil.unescapestr(data)
4682 4689 stdin.write(data)
4683 4690
4684 4691 if action == b'raw+':
4685 4692 stdin.flush()
4686 4693 elif action == b'flush':
4687 4694 if not stdin:
4688 4695 raise error.Abort(_(b'cannot call flush on this peer'))
4689 4696 stdin.flush()
4690 4697 elif action.startswith(b'command'):
4691 4698 if not peer:
4692 4699 raise error.Abort(
4693 4700 _(
4694 4701 b'cannot send commands unless peer instance '
4695 4702 b'is available'
4696 4703 )
4697 4704 )
4698 4705
4699 4706 command = action.split(b' ', 1)[1]
4700 4707
4701 4708 args = {}
4702 4709 for line in lines:
4703 4710 # We need to allow empty values.
4704 4711 fields = line.lstrip().split(b' ', 1)
4705 4712 if len(fields) == 1:
4706 4713 key = fields[0]
4707 4714 value = b''
4708 4715 else:
4709 4716 key, value = fields
4710 4717
4711 4718 if value.startswith(b'eval:'):
4712 4719 value = stringutil.evalpythonliteral(value[5:])
4713 4720 else:
4714 4721 value = stringutil.unescapestr(value)
4715 4722
4716 4723 args[key] = value
4717 4724
4718 4725 if batchedcommands is not None:
4719 4726 batchedcommands.append((command, args))
4720 4727 continue
4721 4728
4722 4729 ui.status(_(b'sending %s command\n') % command)
4723 4730
4724 4731 if b'PUSHFILE' in args:
4725 4732 with open(args[b'PUSHFILE'], 'rb') as fh:
4726 4733 del args[b'PUSHFILE']
4727 4734 res, output = peer._callpush(
4728 4735 command, fh, **pycompat.strkwargs(args)
4729 4736 )
4730 4737 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4731 4738 ui.status(
4732 4739 _(b'remote output: %s\n') % stringutil.escapestr(output)
4733 4740 )
4734 4741 else:
4735 4742 with peer.commandexecutor() as e:
4736 4743 res = e.callcommand(command, args).result()
4737 4744
4738 4745 ui.status(
4739 4746 _(b'response: %s\n')
4740 4747 % stringutil.pprint(res, bprefix=True, indent=2)
4741 4748 )
4742 4749
4743 4750 elif action == b'batchbegin':
4744 4751 if batchedcommands is not None:
4745 4752 raise error.Abort(_(b'nested batchbegin not allowed'))
4746 4753
4747 4754 batchedcommands = []
4748 4755 elif action == b'batchsubmit':
4749 4756 # There is a batching API we could go through. But it would be
4750 4757 # difficult to normalize requests into function calls. It is easier
4751 4758 # to bypass this layer and normalize to commands + args.
4752 4759 ui.status(
4753 4760 _(b'sending batch with %d sub-commands\n')
4754 4761 % len(batchedcommands)
4755 4762 )
4756 4763 assert peer is not None
4757 4764 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4758 4765 ui.status(
4759 4766 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4760 4767 )
4761 4768
4762 4769 batchedcommands = None
4763 4770
4764 4771 elif action.startswith(b'httprequest '):
4765 4772 if not opener:
4766 4773 raise error.Abort(
4767 4774 _(b'cannot use httprequest without an HTTP peer')
4768 4775 )
4769 4776
4770 4777 request = action.split(b' ', 2)
4771 4778 if len(request) != 3:
4772 4779 raise error.Abort(
4773 4780 _(
4774 4781 b'invalid httprequest: expected format is '
4775 4782 b'"httprequest <method> <path>'
4776 4783 )
4777 4784 )
4778 4785
4779 4786 method, httppath = request[1:]
4780 4787 headers = {}
4781 4788 body = None
4782 4789 frames = []
4783 4790 for line in lines:
4784 4791 line = line.lstrip()
4785 4792 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4786 4793 if m:
4787 4794 # Headers need to use native strings.
4788 4795 key = pycompat.strurl(m.group(1))
4789 4796 value = pycompat.strurl(m.group(2))
4790 4797 headers[key] = value
4791 4798 continue
4792 4799
4793 4800 if line.startswith(b'BODYFILE '):
4794 4801 with open(line.split(b' ', 1), b'rb') as fh:
4795 4802 body = fh.read()
4796 4803 elif line.startswith(b'frame '):
4797 4804 frame = wireprotoframing.makeframefromhumanstring(
4798 4805 line[len(b'frame ') :]
4799 4806 )
4800 4807
4801 4808 frames.append(frame)
4802 4809 else:
4803 4810 raise error.Abort(
4804 4811 _(b'unknown argument to httprequest: %s') % line
4805 4812 )
4806 4813
4807 4814 url = path + httppath
4808 4815
4809 4816 if frames:
4810 4817 body = b''.join(bytes(f) for f in frames)
4811 4818
4812 4819 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4813 4820
4814 4821 # urllib.Request insists on using has_data() as a proxy for
4815 4822 # determining the request method. Override that to use our
4816 4823 # explicitly requested method.
4817 4824 req.get_method = lambda: pycompat.sysstr(method)
4818 4825
4819 4826 try:
4820 4827 res = opener.open(req)
4821 4828 body = res.read()
4822 4829 except util.urlerr.urlerror as e:
4823 4830 # read() method must be called, but only exists in Python 2
4824 4831 getattr(e, 'read', lambda: None)()
4825 4832 continue
4826 4833
4827 4834 ct = res.headers.get('Content-Type')
4828 4835 if ct == 'application/mercurial-cbor':
4829 4836 ui.write(
4830 4837 _(b'cbor> %s\n')
4831 4838 % stringutil.pprint(
4832 4839 cborutil.decodeall(body), bprefix=True, indent=2
4833 4840 )
4834 4841 )
4835 4842
4836 4843 elif action == b'close':
4837 4844 assert peer is not None
4838 4845 peer.close()
4839 4846 elif action == b'readavailable':
4840 4847 if not stdout or not stderr:
4841 4848 raise error.Abort(
4842 4849 _(b'readavailable not available on this peer')
4843 4850 )
4844 4851
4845 4852 stdin.close()
4846 4853 stdout.read()
4847 4854 stderr.read()
4848 4855
4849 4856 elif action == b'readline':
4850 4857 if not stdout:
4851 4858 raise error.Abort(_(b'readline not available on this peer'))
4852 4859 stdout.readline()
4853 4860 elif action == b'ereadline':
4854 4861 if not stderr:
4855 4862 raise error.Abort(_(b'ereadline not available on this peer'))
4856 4863 stderr.readline()
4857 4864 elif action.startswith(b'read '):
4858 4865 count = int(action.split(b' ', 1)[1])
4859 4866 if not stdout:
4860 4867 raise error.Abort(_(b'read not available on this peer'))
4861 4868 stdout.read(count)
4862 4869 elif action.startswith(b'eread '):
4863 4870 count = int(action.split(b' ', 1)[1])
4864 4871 if not stderr:
4865 4872 raise error.Abort(_(b'eread not available on this peer'))
4866 4873 stderr.read(count)
4867 4874 else:
4868 4875 raise error.Abort(_(b'unknown action: %s') % action)
4869 4876
4870 4877 if batchedcommands is not None:
4871 4878 raise error.Abort(_(b'unclosed "batchbegin" request'))
4872 4879
4873 4880 if peer:
4874 4881 peer.close()
4875 4882
4876 4883 if proc:
4877 4884 proc.kill()
@@ -1,447 +1,447 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 abort
4 4 add
5 5 addremove
6 6 annotate
7 7 archive
8 8 backout
9 9 bisect
10 10 bookmarks
11 11 branch
12 12 branches
13 13 bundle
14 14 cat
15 15 clone
16 16 commit
17 17 config
18 18 continue
19 19 copy
20 20 diff
21 21 export
22 22 files
23 23 forget
24 24 graft
25 25 grep
26 26 heads
27 27 help
28 28 identify
29 29 import
30 30 incoming
31 31 init
32 32 locate
33 33 log
34 34 manifest
35 35 merge
36 36 outgoing
37 37 parents
38 38 paths
39 39 phase
40 40 pull
41 41 purge
42 42 push
43 43 recover
44 44 remove
45 45 rename
46 46 resolve
47 47 revert
48 48 rollback
49 49 root
50 50 serve
51 51 shelve
52 52 status
53 53 summary
54 54 tag
55 55 tags
56 56 tip
57 57 unbundle
58 58 unshelve
59 59 update
60 60 verify
61 61 version
62 62
63 63 Show all commands that start with "a"
64 64 $ hg debugcomplete a
65 65 abort
66 66 add
67 67 addremove
68 68 annotate
69 69 archive
70 70
71 71 Do not show debug commands if there are other candidates
72 72 $ hg debugcomplete d
73 73 diff
74 74
75 75 Show debug commands if there are no other candidates
76 76 $ hg debugcomplete debug
77 77 debug-repair-issue6528
78 78 debugancestor
79 79 debugantivirusrunning
80 80 debugapplystreamclonebundle
81 81 debugbackupbundle
82 82 debugbuilddag
83 83 debugbundle
84 84 debugcapabilities
85 85 debugchangedfiles
86 86 debugcheckstate
87 87 debugcolor
88 88 debugcommands
89 89 debugcomplete
90 90 debugconfig
91 91 debugcreatestreamclonebundle
92 92 debugdag
93 93 debugdata
94 94 debugdate
95 95 debugdeltachain
96 96 debugdirstate
97 97 debugdirstateignorepatternshash
98 98 debugdiscovery
99 99 debugdownload
100 100 debugextensions
101 101 debugfileset
102 102 debugformat
103 103 debugfsinfo
104 104 debuggetbundle
105 105 debugignore
106 106 debugindex
107 107 debugindexdot
108 108 debugindexstats
109 109 debuginstall
110 110 debugknown
111 111 debuglabelcomplete
112 112 debuglocks
113 113 debugmanifestfulltextcache
114 114 debugmergestate
115 115 debugnamecomplete
116 116 debugnodemap
117 117 debugobsolete
118 118 debugp1copies
119 119 debugp2copies
120 120 debugpathcomplete
121 121 debugpathcopies
122 122 debugpeer
123 123 debugpickmergetool
124 124 debugpushkey
125 125 debugpvec
126 126 debugrebuilddirstate
127 127 debugrebuildfncache
128 128 debugrename
129 129 debugrequires
130 130 debugrevlog
131 131 debugrevlogindex
132 132 debugrevspec
133 133 debugserve
134 134 debugsetparents
135 135 debugshell
136 136 debugsidedata
137 137 debugssl
138 138 debugstrip
139 139 debugsub
140 140 debugsuccessorssets
141 141 debugtagscache
142 142 debugtemplate
143 143 debuguigetpass
144 144 debuguiprompt
145 145 debugupdatecaches
146 146 debugupgraderepo
147 147 debugwalk
148 148 debugwhyunstable
149 149 debugwireargs
150 150 debugwireproto
151 151
152 152 Do not show the alias of a debug command if there are other candidates
153 153 (this should hide rawcommit)
154 154 $ hg debugcomplete r
155 155 recover
156 156 remove
157 157 rename
158 158 resolve
159 159 revert
160 160 rollback
161 161 root
162 162 Show the alias of a debug command if there are no other candidates
163 163 $ hg debugcomplete rawc
164 164
165 165
166 166 Show the global options
167 167 $ hg debugcomplete --options | sort
168 168 --color
169 169 --config
170 170 --cwd
171 171 --debug
172 172 --debugger
173 173 --encoding
174 174 --encodingmode
175 175 --help
176 176 --hidden
177 177 --noninteractive
178 178 --pager
179 179 --profile
180 180 --quiet
181 181 --repository
182 182 --time
183 183 --traceback
184 184 --verbose
185 185 --version
186 186 -R
187 187 -h
188 188 -q
189 189 -v
190 190 -y
191 191
192 192 Show the options for the "serve" command
193 193 $ hg debugcomplete --options serve | sort
194 194 --accesslog
195 195 --address
196 196 --certificate
197 197 --cmdserver
198 198 --color
199 199 --config
200 200 --cwd
201 201 --daemon
202 202 --daemon-postexec
203 203 --debug
204 204 --debugger
205 205 --encoding
206 206 --encodingmode
207 207 --errorlog
208 208 --help
209 209 --hidden
210 210 --ipv6
211 211 --name
212 212 --noninteractive
213 213 --pager
214 214 --pid-file
215 215 --port
216 216 --prefix
217 217 --print-url
218 218 --profile
219 219 --quiet
220 220 --repository
221 221 --stdio
222 222 --style
223 223 --subrepos
224 224 --templates
225 225 --time
226 226 --traceback
227 227 --verbose
228 228 --version
229 229 --web-conf
230 230 -6
231 231 -A
232 232 -E
233 233 -R
234 234 -S
235 235 -a
236 236 -d
237 237 -h
238 238 -n
239 239 -p
240 240 -q
241 241 -t
242 242 -v
243 243 -y
244 244
245 245 Show an error if we use --options with an ambiguous abbreviation
246 246 $ hg debugcomplete --options s
247 247 hg: command 's' is ambiguous:
248 248 serve shelve showconfig status summary
249 249 [10]
250 250
251 251 Show all commands + options
252 252 $ hg debugcommands
253 253 abort: dry-run
254 254 add: include, exclude, subrepos, dry-run
255 255 addremove: similarity, subrepos, include, exclude, dry-run
256 256 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
257 257 archive: no-decode, prefix, rev, type, subrepos, include, exclude
258 258 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
259 259 bisect: reset, good, bad, skip, extend, command, noupdate
260 260 bookmarks: force, rev, delete, rename, inactive, list, template
261 261 branch: force, clean, rev
262 262 branches: active, closed, rev, template
263 263 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
264 264 cat: output, rev, decode, include, exclude, template
265 265 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
266 266 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
267 267 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
268 268 continue: dry-run
269 269 copy: forget, after, at-rev, force, include, exclude, dry-run
270 270 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
271 271 debugancestor:
272 272 debugantivirusrunning:
273 273 debugapplystreamclonebundle:
274 274 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
275 debugbuilddag: mergeable-file, overwritten-file, new-file
275 debugbuilddag: mergeable-file, overwritten-file, new-file, from-existing
276 276 debugbundle: all, part-type, spec
277 277 debugcapabilities:
278 278 debugchangedfiles: compute
279 279 debugcheckstate:
280 280 debugcolor: style
281 281 debugcommands:
282 282 debugcomplete: options
283 283 debugcreatestreamclonebundle:
284 284 debugdag: tags, branches, dots, spaces
285 285 debugdata: changelog, manifest, dir
286 286 debugdate: extended
287 287 debugdeltachain: changelog, manifest, dir, template
288 288 debugdirstateignorepatternshash:
289 289 debugdirstate: nodates, dates, datesort, all
290 290 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
291 291 debugdownload: output
292 292 debugextensions: template
293 293 debugfileset: rev, all-files, show-matcher, show-stage
294 294 debugformat: template
295 295 debugfsinfo:
296 296 debuggetbundle: head, common, type
297 297 debugignore:
298 298 debugindex: changelog, manifest, dir, template
299 299 debugindexdot: changelog, manifest, dir
300 300 debugindexstats:
301 301 debuginstall: template
302 302 debugknown:
303 303 debuglabelcomplete:
304 304 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
305 305 debugmanifestfulltextcache: clear, add
306 306 debugmergestate: style, template
307 307 debugnamecomplete:
308 308 debugnodemap: dump-new, dump-disk, check, metadata
309 309 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
310 310 debugp1copies: rev
311 311 debugp2copies: rev
312 312 debugpathcomplete: full, normal, added, removed
313 313 debugpathcopies: include, exclude
314 314 debugpeer:
315 315 debugpickmergetool: rev, changedelete, include, exclude, tool
316 316 debugpushkey:
317 317 debugpvec:
318 318 debugrebuilddirstate: rev, minimal
319 319 debugrebuildfncache: only-data
320 320 debugrename: rev
321 321 debugrequires:
322 322 debugrevlog: changelog, manifest, dir, dump
323 323 debugrevlogindex: changelog, manifest, dir, format
324 324 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
325 325 debugserve: sshstdio, logiofd, logiofile
326 326 debugsetparents:
327 327 debugshell:
328 328 debugsidedata: changelog, manifest, dir
329 329 debugssl:
330 330 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
331 331 debugsub: rev
332 332 debugsuccessorssets: closest
333 333 debugtagscache:
334 334 debugtemplate: rev, define
335 335 debuguigetpass: prompt
336 336 debuguiprompt: prompt
337 337 debugupdatecaches:
338 338 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
339 339 debugwalk: include, exclude
340 340 debugwhyunstable:
341 341 debugwireargs: three, four, five, ssh, remotecmd, insecure
342 342 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
343 343 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
344 344 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
345 345 files: rev, print0, include, exclude, template, subrepos
346 346 forget: interactive, include, exclude, dry-run
347 347 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
348 348 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
349 349 heads: rev, topo, active, closed, style, template
350 350 help: extension, command, keyword, system
351 351 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
352 352 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
353 353 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
354 354 init: ssh, remotecmd, insecure
355 355 locate: rev, print0, fullpath, include, exclude
356 356 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
357 357 manifest: rev, all, template
358 358 merge: force, rev, preview, abort, tool
359 359 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
360 360 parents: rev, style, template
361 361 paths: template
362 362 phase: public, draft, secret, force, rev
363 363 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
364 364 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
365 365 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
366 366 recover: verify
367 367 remove: after, force, subrepos, include, exclude, dry-run
368 368 rename: forget, after, at-rev, force, include, exclude, dry-run
369 369 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
370 370 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
371 371 rollback: dry-run, force
372 372 root: template
373 373 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
374 374 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
375 375 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
376 376 summary: remote
377 377 tag: force, local, rev, remove, edit, message, date, user
378 378 tags: template
379 379 tip: patch, git, style, template
380 380 unbundle: update
381 381 unshelve: abort, continue, interactive, keep, name, tool, date
382 382 update: clean, check, merge, date, rev, tool
383 383 verify: full
384 384 version: template
385 385
386 386 $ hg init a
387 387 $ cd a
388 388 $ echo fee > fee
389 389 $ hg ci -q -Amfee
390 390 $ hg tag fee
391 391 $ mkdir fie
392 392 $ echo dead > fie/dead
393 393 $ echo live > fie/live
394 394 $ hg bookmark fo
395 395 $ hg branch -q fie
396 396 $ hg ci -q -Amfie
397 397 $ echo fo > fo
398 398 $ hg branch -qf default
399 399 $ hg ci -q -Amfo
400 400 $ echo Fum > Fum
401 401 $ hg ci -q -AmFum
402 402 $ hg bookmark Fum
403 403
404 404 Test debugpathcomplete
405 405
406 406 $ hg debugpathcomplete f
407 407 fee
408 408 fie
409 409 fo
410 410 $ hg debugpathcomplete -f f
411 411 fee
412 412 fie/dead
413 413 fie/live
414 414 fo
415 415
416 416 $ hg rm Fum
417 417 $ hg debugpathcomplete -r F
418 418 Fum
419 419
420 420 Test debugnamecomplete
421 421
422 422 $ hg debugnamecomplete
423 423 Fum
424 424 default
425 425 fee
426 426 fie
427 427 fo
428 428 tip
429 429 $ hg debugnamecomplete f
430 430 fee
431 431 fie
432 432 fo
433 433
434 434 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
435 435 used for completions in some shells.
436 436
437 437 $ hg debuglabelcomplete
438 438 Fum
439 439 default
440 440 fee
441 441 fie
442 442 fo
443 443 tip
444 444 $ hg debuglabelcomplete f
445 445 fee
446 446 fie
447 447 fo
General Comments 0
You need to be logged in to leave comments. Login now