##// END OF EJS Templates
cli: make debugnodemap capable of inspecting an arbitrary nodemap...
Arseniy Alekseyev -
r51402:1b73868d default
parent child Browse files
Show More
@@ -1,4808 +1,4816 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 bundlerepo,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 dirstateutils,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 filelog,
53 54 filemerge,
54 55 filesetlang,
55 56 formatter,
56 57 hg,
57 58 httppeer,
58 59 localrepo,
59 60 lock as lockmod,
60 61 logcmdutil,
62 manifest,
61 63 mergestate as mergestatemod,
62 64 metadata,
63 65 obsolete,
64 66 obsutil,
65 67 pathutil,
66 68 phases,
67 69 policy,
68 70 pvec,
69 71 pycompat,
70 72 registrar,
71 73 repair,
72 74 repoview,
73 75 requirements,
74 76 revlog,
75 77 revset,
76 78 revsetlang,
77 79 scmutil,
78 80 setdiscovery,
79 81 simplemerge,
80 82 sshpeer,
81 83 sslutil,
82 84 streamclone,
83 85 strip,
84 86 tags as tagsmod,
85 87 templater,
86 88 treediscovery,
87 89 upgrade,
88 90 url as urlmod,
89 91 util,
90 92 verify,
91 93 vfs as vfsmod,
92 94 wireprotoframing,
93 95 wireprotoserver,
94 96 )
95 97 from .interfaces import repository
96 98 from .stabletailgraph import stabletailsort
97 99 from .utils import (
98 100 cborutil,
99 101 compression,
100 102 dateutil,
101 103 procutil,
102 104 stringutil,
103 105 urlutil,
104 106 )
105 107
106 108 from .revlogutils import (
107 109 constants as revlog_constants,
108 110 debug as revlog_debug,
109 111 deltas as deltautil,
110 112 nodemap,
111 113 rewrite,
112 114 sidedata,
113 115 )
114 116
115 117 release = lockmod.release
116 118
117 119 table = {}
118 120 table.update(strip.command._table)
119 121 command = registrar.command(table)
120 122
121 123
122 124 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 125 def debugancestor(ui, repo, *args):
124 126 """find the ancestor revision of two revisions in a given index"""
125 127 if len(args) == 3:
126 128 index, rev1, rev2 = args
127 129 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 130 lookup = r.lookup
129 131 elif len(args) == 2:
130 132 if not repo:
131 133 raise error.Abort(
132 134 _(b'there is no Mercurial repository here (.hg not found)')
133 135 )
134 136 rev1, rev2 = args
135 137 r = repo.changelog
136 138 lookup = repo.lookup
137 139 else:
138 140 raise error.Abort(_(b'either two or three arguments required'))
139 141 a = r.ancestor(lookup(rev1), lookup(rev2))
140 142 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 143
142 144
143 145 @command(b'debugantivirusrunning', [])
144 146 def debugantivirusrunning(ui, repo):
145 147 """attempt to trigger an antivirus scanner to see if one is active"""
146 148 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 149 f.write(
148 150 util.b85decode(
149 151 # This is a base85-armored version of the EICAR test file. See
150 152 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 153 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 154 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 155 )
154 156 )
155 157 # Give an AV engine time to scan the file.
156 158 time.sleep(2)
157 159 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 160
159 161
160 162 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 163 def debugapplystreamclonebundle(ui, repo, fname):
162 164 """apply a stream clone bundle file"""
163 165 f = hg.openpath(ui, fname)
164 166 gen = exchange.readbundle(ui, f, fname)
165 167 gen.apply(repo)
166 168
167 169
168 170 @command(
169 171 b'debugbuilddag',
170 172 [
171 173 (
172 174 b'm',
173 175 b'mergeable-file',
174 176 None,
175 177 _(b'add single file mergeable changes'),
176 178 ),
177 179 (
178 180 b'o',
179 181 b'overwritten-file',
180 182 None,
181 183 _(b'add single file all revs overwrite'),
182 184 ),
183 185 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 186 (
185 187 b'',
186 188 b'from-existing',
187 189 None,
188 190 _(b'continue from a non-empty repository'),
189 191 ),
190 192 ],
191 193 _(b'[OPTION]... [TEXT]'),
192 194 )
193 195 def debugbuilddag(
194 196 ui,
195 197 repo,
196 198 text=None,
197 199 mergeable_file=False,
198 200 overwritten_file=False,
199 201 new_file=False,
200 202 from_existing=False,
201 203 ):
202 204 """builds a repo with a given DAG from scratch in the current empty repo
203 205
204 206 The description of the DAG is read from stdin if not given on the
205 207 command line.
206 208
207 209 Elements:
208 210
209 211 - "+n" is a linear run of n nodes based on the current default parent
210 212 - "." is a single node based on the current default parent
211 213 - "$" resets the default parent to null (implied at the start);
212 214 otherwise the default parent is always the last node created
213 215 - "<p" sets the default parent to the backref p
214 216 - "*p" is a fork at parent p, which is a backref
215 217 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 218 - "/p2" is a merge of the preceding node and p2
217 219 - ":tag" defines a local tag for the preceding node
218 220 - "@branch" sets the named branch for subsequent nodes
219 221 - "#...\\n" is a comment up to the end of the line
220 222
221 223 Whitespace between the above elements is ignored.
222 224
223 225 A backref is either
224 226
225 227 - a number n, which references the node curr-n, where curr is the current
226 228 node, or
227 229 - the name of a local tag you placed earlier using ":tag", or
228 230 - empty to denote the default parent.
229 231
230 232 All string valued-elements are either strictly alphanumeric, or must
231 233 be enclosed in double quotes ("..."), with "\\" as escape character.
232 234 """
233 235
234 236 if text is None:
235 237 ui.status(_(b"reading DAG from stdin\n"))
236 238 text = ui.fin.read()
237 239
238 240 cl = repo.changelog
239 241 if len(cl) > 0 and not from_existing:
240 242 raise error.Abort(_(b'repository is not empty'))
241 243
242 244 # determine number of revs in DAG
243 245 total = 0
244 246 for type, data in dagparser.parsedag(text):
245 247 if type == b'n':
246 248 total += 1
247 249
248 250 if mergeable_file:
249 251 linesperrev = 2
250 252 # make a file with k lines per rev
251 253 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 254 initialmergedlines.append(b"")
253 255
254 256 tags = []
255 257 progress = ui.makeprogress(
256 258 _(b'building'), unit=_(b'revisions'), total=total
257 259 )
258 260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 261 at = -1
260 262 atbranch = b'default'
261 263 nodeids = []
262 264 id = 0
263 265 progress.update(id)
264 266 for type, data in dagparser.parsedag(text):
265 267 if type == b'n':
266 268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 269 id, ps = data
268 270
269 271 files = []
270 272 filecontent = {}
271 273
272 274 p2 = None
273 275 if mergeable_file:
274 276 fn = b"mf"
275 277 p1 = repo[ps[0]]
276 278 if len(ps) > 1:
277 279 p2 = repo[ps[1]]
278 280 pa = p1.ancestor(p2)
279 281 base, local, other = [
280 282 x[fn].data() for x in (pa, p1, p2)
281 283 ]
282 284 m3 = simplemerge.Merge3Text(base, local, other)
283 285 ml = [
284 286 l.strip()
285 287 for l in simplemerge.render_minimized(m3)[0]
286 288 ]
287 289 ml.append(b"")
288 290 elif at > 0:
289 291 ml = p1[fn].data().split(b"\n")
290 292 else:
291 293 ml = initialmergedlines
292 294 ml[id * linesperrev] += b" r%i" % id
293 295 mergedtext = b"\n".join(ml)
294 296 files.append(fn)
295 297 filecontent[fn] = mergedtext
296 298
297 299 if overwritten_file:
298 300 fn = b"of"
299 301 files.append(fn)
300 302 filecontent[fn] = b"r%i\n" % id
301 303
302 304 if new_file:
303 305 fn = b"nf%i" % id
304 306 files.append(fn)
305 307 filecontent[fn] = b"r%i\n" % id
306 308 if len(ps) > 1:
307 309 if not p2:
308 310 p2 = repo[ps[1]]
309 311 for fn in p2:
310 312 if fn.startswith(b"nf"):
311 313 files.append(fn)
312 314 filecontent[fn] = p2[fn].data()
313 315
314 316 def fctxfn(repo, cx, path):
315 317 if path in filecontent:
316 318 return context.memfilectx(
317 319 repo, cx, path, filecontent[path]
318 320 )
319 321 return None
320 322
321 323 if len(ps) == 0 or ps[0] < 0:
322 324 pars = [None, None]
323 325 elif len(ps) == 1:
324 326 pars = [nodeids[ps[0]], None]
325 327 else:
326 328 pars = [nodeids[p] for p in ps]
327 329 cx = context.memctx(
328 330 repo,
329 331 pars,
330 332 b"r%i" % id,
331 333 files,
332 334 fctxfn,
333 335 date=(id, 0),
334 336 user=b"debugbuilddag",
335 337 extra={b'branch': atbranch},
336 338 )
337 339 nodeid = repo.commitctx(cx)
338 340 nodeids.append(nodeid)
339 341 at = id
340 342 elif type == b'l':
341 343 id, name = data
342 344 ui.note((b'tag %s\n' % name))
343 345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 346 elif type == b'a':
345 347 ui.note((b'branch %s\n' % data))
346 348 atbranch = data
347 349 progress.update(id)
348 350
349 351 if tags:
350 352 repo.vfs.write(b"localtags", b"".join(tags))
351 353
352 354
353 355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 356 indent_string = b' ' * indent
355 357 if all:
356 358 ui.writenoi18n(
357 359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 360 % indent_string
359 361 )
360 362
361 363 def showchunks(named):
362 364 ui.write(b"\n%s%s\n" % (indent_string, named))
363 365 for deltadata in gen.deltaiter():
364 366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 367 ui.write(
366 368 b"%s%s %s %s %s %s %d\n"
367 369 % (
368 370 indent_string,
369 371 hex(node),
370 372 hex(p1),
371 373 hex(p2),
372 374 hex(cs),
373 375 hex(deltabase),
374 376 len(delta),
375 377 )
376 378 )
377 379
378 380 gen.changelogheader()
379 381 showchunks(b"changelog")
380 382 gen.manifestheader()
381 383 showchunks(b"manifest")
382 384 for chunkdata in iter(gen.filelogheader, {}):
383 385 fname = chunkdata[b'filename']
384 386 showchunks(fname)
385 387 else:
386 388 if isinstance(gen, bundle2.unbundle20):
387 389 raise error.Abort(_(b'use debugbundle2 for this file'))
388 390 gen.changelogheader()
389 391 for deltadata in gen.deltaiter():
390 392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 394
393 395
394 396 def _debugobsmarkers(ui, part, indent=0, **opts):
395 397 """display version and markers contained in 'data'"""
396 398 opts = pycompat.byteskwargs(opts)
397 399 data = part.read()
398 400 indent_string = b' ' * indent
399 401 try:
400 402 version, markers = obsolete._readmarkers(data)
401 403 except error.UnknownVersion as exc:
402 404 msg = b"%sunsupported version: %s (%d bytes)\n"
403 405 msg %= indent_string, exc.version, len(data)
404 406 ui.write(msg)
405 407 else:
406 408 msg = b"%sversion: %d (%d bytes)\n"
407 409 msg %= indent_string, version, len(data)
408 410 ui.write(msg)
409 411 fm = ui.formatter(b'debugobsolete', opts)
410 412 for rawmarker in sorted(markers):
411 413 m = obsutil.marker(None, rawmarker)
412 414 fm.startitem()
413 415 fm.plain(indent_string)
414 416 cmdutil.showmarker(fm, m)
415 417 fm.end()
416 418
417 419
418 420 def _debugphaseheads(ui, data, indent=0):
419 421 """display version and markers contained in 'data'"""
420 422 indent_string = b' ' * indent
421 423 headsbyphase = phases.binarydecode(data)
422 424 for phase in phases.allphases:
423 425 for head in headsbyphase[phase]:
424 426 ui.write(indent_string)
425 427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 428
427 429
428 430 def _quasirepr(thing):
429 431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 432 return b'{%s}' % (
431 433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 434 )
433 435 return pycompat.bytestr(repr(thing))
434 436
435 437
436 438 def _debugbundle2(ui, gen, all=None, **opts):
437 439 """lists the contents of a bundle2"""
438 440 if not isinstance(gen, bundle2.unbundle20):
439 441 raise error.Abort(_(b'not a bundle2 file'))
440 442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 443 parttypes = opts.get('part_type', [])
442 444 for part in gen.iterparts():
443 445 if parttypes and part.type not in parttypes:
444 446 continue
445 447 msg = b'%s -- %s (mandatory: %r)\n'
446 448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 449 if part.type == b'changegroup':
448 450 version = part.params.get(b'version', b'01')
449 451 cg = changegroup.getunbundler(version, part, b'UN')
450 452 if not ui.quiet:
451 453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 454 if part.type == b'obsmarkers':
453 455 if not ui.quiet:
454 456 _debugobsmarkers(ui, part, indent=4, **opts)
455 457 if part.type == b'phase-heads':
456 458 if not ui.quiet:
457 459 _debugphaseheads(ui, part, indent=4)
458 460
459 461
460 462 @command(
461 463 b'debugbundle',
462 464 [
463 465 (b'a', b'all', None, _(b'show all details')),
464 466 (b'', b'part-type', [], _(b'show only the named part type')),
465 467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 468 ],
467 469 _(b'FILE'),
468 470 norepo=True,
469 471 )
470 472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 473 """lists the contents of a bundle"""
472 474 with hg.openpath(ui, bundlepath) as f:
473 475 if spec:
474 476 spec = exchange.getbundlespec(ui, f)
475 477 ui.write(b'%s\n' % spec)
476 478 return
477 479
478 480 gen = exchange.readbundle(ui, f, bundlepath)
479 481 if isinstance(gen, bundle2.unbundle20):
480 482 return _debugbundle2(ui, gen, all=all, **opts)
481 483 _debugchangegroup(ui, gen, all=all, **opts)
482 484
483 485
484 486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 487 def debugcapabilities(ui, path, **opts):
486 488 """lists the capabilities of a remote peer"""
487 489 opts = pycompat.byteskwargs(opts)
488 490 peer = hg.peer(ui, opts, path)
489 491 try:
490 492 caps = peer.capabilities()
491 493 ui.writenoi18n(b'Main capabilities:\n')
492 494 for c in sorted(caps):
493 495 ui.write(b' %s\n' % c)
494 496 b2caps = bundle2.bundle2caps(peer)
495 497 if b2caps:
496 498 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 499 for key, values in sorted(b2caps.items()):
498 500 ui.write(b' %s\n' % key)
499 501 for v in values:
500 502 ui.write(b' %s\n' % v)
501 503 finally:
502 504 peer.close()
503 505
504 506
505 507 @command(
506 508 b'debugchangedfiles',
507 509 [
508 510 (
509 511 b'',
510 512 b'compute',
511 513 False,
512 514 b"compute information instead of reading it from storage",
513 515 ),
514 516 ],
515 517 b'REV',
516 518 )
517 519 def debugchangedfiles(ui, repo, rev, **opts):
518 520 """list the stored files changes for a revision"""
519 521 ctx = logcmdutil.revsingle(repo, rev, None)
520 522 files = None
521 523
522 524 if opts['compute']:
523 525 files = metadata.compute_all_files_changes(ctx)
524 526 else:
525 527 sd = repo.changelog.sidedata(ctx.rev())
526 528 files_block = sd.get(sidedata.SD_FILES)
527 529 if files_block is not None:
528 530 files = metadata.decode_files_sidedata(sd)
529 531 if files is not None:
530 532 for f in sorted(files.touched):
531 533 if f in files.added:
532 534 action = b"added"
533 535 elif f in files.removed:
534 536 action = b"removed"
535 537 elif f in files.merged:
536 538 action = b"merged"
537 539 elif f in files.salvaged:
538 540 action = b"salvaged"
539 541 else:
540 542 action = b"touched"
541 543
542 544 copy_parent = b""
543 545 copy_source = b""
544 546 if f in files.copied_from_p1:
545 547 copy_parent = b"p1"
546 548 copy_source = files.copied_from_p1[f]
547 549 elif f in files.copied_from_p2:
548 550 copy_parent = b"p2"
549 551 copy_source = files.copied_from_p2[f]
550 552
551 553 data = (action, copy_parent, f, copy_source)
552 554 template = b"%-8s %2s: %s, %s;\n"
553 555 ui.write(template % data)
554 556
555 557
556 558 @command(b'debugcheckstate', [], b'')
557 559 def debugcheckstate(ui, repo):
558 560 """validate the correctness of the current dirstate"""
559 561 errors = verify.verifier(repo)._verify_dirstate()
560 562 if errors:
561 563 errstr = _(b"dirstate inconsistent with current parent's manifest")
562 564 raise error.Abort(errstr)
563 565
564 566
565 567 @command(
566 568 b'debugcolor',
567 569 [(b'', b'style', None, _(b'show all configured styles'))],
568 570 b'hg debugcolor',
569 571 )
570 572 def debugcolor(ui, repo, **opts):
571 573 """show available color, effects or style"""
572 574 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
573 575 if opts.get('style'):
574 576 return _debugdisplaystyle(ui)
575 577 else:
576 578 return _debugdisplaycolor(ui)
577 579
578 580
579 581 def _debugdisplaycolor(ui):
580 582 ui = ui.copy()
581 583 ui._styles.clear()
582 584 for effect in color._activeeffects(ui).keys():
583 585 ui._styles[effect] = effect
584 586 if ui._terminfoparams:
585 587 for k, v in ui.configitems(b'color'):
586 588 if k.startswith(b'color.'):
587 589 ui._styles[k] = k[6:]
588 590 elif k.startswith(b'terminfo.'):
589 591 ui._styles[k] = k[9:]
590 592 ui.write(_(b'available colors:\n'))
591 593 # sort label with a '_' after the other to group '_background' entry.
592 594 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
593 595 for colorname, label in items:
594 596 ui.write(b'%s\n' % colorname, label=label)
595 597
596 598
597 599 def _debugdisplaystyle(ui):
598 600 ui.write(_(b'available style:\n'))
599 601 if not ui._styles:
600 602 return
601 603 width = max(len(s) for s in ui._styles)
602 604 for label, effects in sorted(ui._styles.items()):
603 605 ui.write(b'%s' % label, label=label)
604 606 if effects:
605 607 # 50
606 608 ui.write(b': ')
607 609 ui.write(b' ' * (max(0, width - len(label))))
608 610 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
609 611 ui.write(b'\n')
610 612
611 613
612 614 @command(b'debugcreatestreamclonebundle', [], b'FILE')
613 615 def debugcreatestreamclonebundle(ui, repo, fname):
614 616 """create a stream clone bundle file
615 617
616 618 Stream bundles are special bundles that are essentially archives of
617 619 revlog files. They are commonly used for cloning very quickly.
618 620 """
619 621 # TODO we may want to turn this into an abort when this functionality
620 622 # is moved into `hg bundle`.
621 623 if phases.hassecret(repo):
622 624 ui.warn(
623 625 _(
624 626 b'(warning: stream clone bundle will contain secret '
625 627 b'revisions)\n'
626 628 )
627 629 )
628 630
629 631 requirements, gen = streamclone.generatebundlev1(repo)
630 632 changegroup.writechunks(ui, gen, fname)
631 633
632 634 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
633 635
634 636
635 637 @command(
636 638 b'debugdag',
637 639 [
638 640 (b't', b'tags', None, _(b'use tags as labels')),
639 641 (b'b', b'branches', None, _(b'annotate with branch names')),
640 642 (b'', b'dots', None, _(b'use dots for runs')),
641 643 (b's', b'spaces', None, _(b'separate elements by spaces')),
642 644 ],
643 645 _(b'[OPTION]... [FILE [REV]...]'),
644 646 optionalrepo=True,
645 647 )
646 648 def debugdag(ui, repo, file_=None, *revs, **opts):
647 649 """format the changelog or an index DAG as a concise textual description
648 650
649 651 If you pass a revlog index, the revlog's DAG is emitted. If you list
650 652 revision numbers, they get labeled in the output as rN.
651 653
652 654 Otherwise, the changelog DAG of the current repo is emitted.
653 655 """
654 656 spaces = opts.get('spaces')
655 657 dots = opts.get('dots')
656 658 if file_:
657 659 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
658 660 revs = {int(r) for r in revs}
659 661
660 662 def events():
661 663 for r in rlog:
662 664 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
663 665 if r in revs:
664 666 yield b'l', (r, b"r%i" % r)
665 667
666 668 elif repo:
667 669 cl = repo.changelog
668 670 tags = opts.get('tags')
669 671 branches = opts.get('branches')
670 672 if tags:
671 673 labels = {}
672 674 for l, n in repo.tags().items():
673 675 labels.setdefault(cl.rev(n), []).append(l)
674 676
675 677 def events():
676 678 b = b"default"
677 679 for r in cl:
678 680 if branches:
679 681 newb = cl.read(cl.node(r))[5][b'branch']
680 682 if newb != b:
681 683 yield b'a', newb
682 684 b = newb
683 685 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
684 686 if tags:
685 687 ls = labels.get(r)
686 688 if ls:
687 689 for l in ls:
688 690 yield b'l', (r, l)
689 691
690 692 else:
691 693 raise error.Abort(_(b'need repo for changelog dag'))
692 694
693 695 for line in dagparser.dagtextlines(
694 696 events(),
695 697 addspaces=spaces,
696 698 wraplabels=True,
697 699 wrapannotations=True,
698 700 wrapnonlinear=dots,
699 701 usedots=dots,
700 702 maxlinewidth=70,
701 703 ):
702 704 ui.write(line)
703 705 ui.write(b"\n")
704 706
705 707
706 708 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
707 709 def debugdata(ui, repo, file_, rev=None, **opts):
708 710 """dump the contents of a data file revision"""
709 711 opts = pycompat.byteskwargs(opts)
710 712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
711 713 if rev is not None:
712 714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
713 715 file_, rev = None, file_
714 716 elif rev is None:
715 717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
716 718 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
717 719 try:
718 720 ui.write(r.rawdata(r.lookup(rev)))
719 721 except KeyError:
720 722 raise error.Abort(_(b'invalid revision identifier %s') % rev)
721 723
722 724
723 725 @command(
724 726 b'debugdate',
725 727 [(b'e', b'extended', None, _(b'try extended date formats'))],
726 728 _(b'[-e] DATE [RANGE]'),
727 729 norepo=True,
728 730 optionalrepo=True,
729 731 )
730 732 def debugdate(ui, date, range=None, **opts):
731 733 """parse and display a date"""
732 734 if opts["extended"]:
733 735 d = dateutil.parsedate(date, dateutil.extendeddateformats)
734 736 else:
735 737 d = dateutil.parsedate(date)
736 738 ui.writenoi18n(b"internal: %d %d\n" % d)
737 739 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
738 740 if range:
739 741 m = dateutil.matchdate(range)
740 742 ui.writenoi18n(b"match: %s\n" % m(d[0]))
741 743
742 744
743 745 @command(
744 746 b'debugdeltachain',
745 747 cmdutil.debugrevlogopts + cmdutil.formatteropts,
746 748 _(b'-c|-m|FILE'),
747 749 optionalrepo=True,
748 750 )
749 751 def debugdeltachain(ui, repo, file_=None, **opts):
750 752 """dump information about delta chains in a revlog
751 753
752 754 Output can be templatized. Available template keywords are:
753 755
754 756 :``rev``: revision number
755 757 :``p1``: parent 1 revision number (for reference)
756 758 :``p2``: parent 2 revision number (for reference)
757 759 :``chainid``: delta chain identifier (numbered by unique base)
758 760 :``chainlen``: delta chain length to this revision
759 761 :``prevrev``: previous revision in delta chain
760 762 :``deltatype``: role of delta / how it was computed
761 763 - base: a full snapshot
762 764 - snap: an intermediate snapshot
763 765 - p1: a delta against the first parent
764 766 - p2: a delta against the second parent
765 767 - skip1: a delta against the same base as p1
766 768 (when p1 has empty delta
767 769 - skip2: a delta against the same base as p2
768 770 (when p2 has empty delta
769 771 - prev: a delta against the previous revision
770 772 - other: a delta against an arbitrary revision
771 773 :``compsize``: compressed size of revision
772 774 :``uncompsize``: uncompressed size of revision
773 775 :``chainsize``: total size of compressed revisions in chain
774 776 :``chainratio``: total chain size divided by uncompressed revision size
775 777 (new delta chains typically start at ratio 2.00)
776 778 :``lindist``: linear distance from base revision in delta chain to end
777 779 of this revision
778 780 :``extradist``: total size of revisions not part of this delta chain from
779 781 base of delta chain to end of this revision; a measurement
780 782 of how much extra data we need to read/seek across to read
781 783 the delta chain for this revision
782 784 :``extraratio``: extradist divided by chainsize; another representation of
783 785 how much unrelated data is needed to load this delta chain
784 786
785 787 If the repository is configured to use the sparse read, additional keywords
786 788 are available:
787 789
788 790 :``readsize``: total size of data read from the disk for a revision
789 791 (sum of the sizes of all the blocks)
790 792 :``largestblock``: size of the largest block of data read from the disk
791 793 :``readdensity``: density of useful bytes in the data read from the disk
792 794 :``srchunks``: in how many data hunks the whole revision would be read
793 795
794 796 The sparse read can be enabled with experimental.sparse-read = True
795 797 """
796 798 opts = pycompat.byteskwargs(opts)
797 799 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
798 800 index = r.index
799 801 start = r.start
800 802 length = r.length
801 803 generaldelta = r._generaldelta
802 804 withsparseread = getattr(r, '_withsparseread', False)
803 805
804 806 # security to avoid crash on corrupted revlogs
805 807 total_revs = len(index)
806 808
807 809 chain_size_cache = {}
808 810
809 811 def revinfo(rev):
810 812 e = index[rev]
811 813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
812 814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
813 815
814 816 base = e[revlog_constants.ENTRY_DELTA_BASE]
815 817 p1 = e[revlog_constants.ENTRY_PARENT_1]
816 818 p2 = e[revlog_constants.ENTRY_PARENT_2]
817 819
818 820 # If the parents of a revision has an empty delta, we never try to delta
819 821 # against that parent, but directly against the delta base of that
820 822 # parent (recursively). It avoids adding a useless entry in the chain.
821 823 #
822 824 # However we need to detect that as a special case for delta-type, that
823 825 # is not simply "other".
824 826 p1_base = p1
825 827 if p1 != nullrev and p1 < total_revs:
826 828 e1 = index[p1]
827 829 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
828 830 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
829 831 if (
830 832 new_base == p1_base
831 833 or new_base == nullrev
832 834 or new_base >= total_revs
833 835 ):
834 836 break
835 837 p1_base = new_base
836 838 e1 = index[p1_base]
837 839 p2_base = p2
838 840 if p2 != nullrev and p2 < total_revs:
839 841 e2 = index[p2]
840 842 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
841 843 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
842 844 if (
843 845 new_base == p2_base
844 846 or new_base == nullrev
845 847 or new_base >= total_revs
846 848 ):
847 849 break
848 850 p2_base = new_base
849 851 e2 = index[p2_base]
850 852
851 853 if generaldelta:
852 854 if base == p1:
853 855 deltatype = b'p1'
854 856 elif base == p2:
855 857 deltatype = b'p2'
856 858 elif base == rev:
857 859 deltatype = b'base'
858 860 elif base == p1_base:
859 861 deltatype = b'skip1'
860 862 elif base == p2_base:
861 863 deltatype = b'skip2'
862 864 elif r.issnapshot(rev):
863 865 deltatype = b'snap'
864 866 elif base == rev - 1:
865 867 deltatype = b'prev'
866 868 else:
867 869 deltatype = b'other'
868 870 else:
869 871 if base == rev:
870 872 deltatype = b'base'
871 873 else:
872 874 deltatype = b'prev'
873 875
874 876 chain = r._deltachain(rev)[0]
875 877 chain_size = 0
876 878 for iter_rev in reversed(chain):
877 879 cached = chain_size_cache.get(iter_rev)
878 880 if cached is not None:
879 881 chain_size += cached
880 882 break
881 883 e = index[iter_rev]
882 884 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 885 chain_size_cache[rev] = chain_size
884 886
885 887 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
886 888
887 889 fm = ui.formatter(b'debugdeltachain', opts)
888 890
889 891 fm.plain(
890 892 b' rev p1 p2 chain# chainlen prev delta '
891 893 b'size rawsize chainsize ratio lindist extradist '
892 894 b'extraratio'
893 895 )
894 896 if withsparseread:
895 897 fm.plain(b' readsize largestblk rddensity srchunks')
896 898 fm.plain(b'\n')
897 899
898 900 chainbases = {}
899 901 for rev in r:
900 902 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
901 903 chainbase = chain[0]
902 904 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
903 905 basestart = start(chainbase)
904 906 revstart = start(rev)
905 907 lineardist = revstart + comp - basestart
906 908 extradist = lineardist - chainsize
907 909 try:
908 910 prevrev = chain[-2]
909 911 except IndexError:
910 912 prevrev = -1
911 913
912 914 if uncomp != 0:
913 915 chainratio = float(chainsize) / float(uncomp)
914 916 else:
915 917 chainratio = chainsize
916 918
917 919 if chainsize != 0:
918 920 extraratio = float(extradist) / float(chainsize)
919 921 else:
920 922 extraratio = extradist
921 923
922 924 fm.startitem()
923 925 fm.write(
924 926 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
925 927 b'uncompsize chainsize chainratio lindist extradist '
926 928 b'extraratio',
927 929 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
928 930 rev,
929 931 p1,
930 932 p2,
931 933 chainid,
932 934 len(chain),
933 935 prevrev,
934 936 deltatype,
935 937 comp,
936 938 uncomp,
937 939 chainsize,
938 940 chainratio,
939 941 lineardist,
940 942 extradist,
941 943 extraratio,
942 944 rev=rev,
943 945 chainid=chainid,
944 946 chainlen=len(chain),
945 947 prevrev=prevrev,
946 948 deltatype=deltatype,
947 949 compsize=comp,
948 950 uncompsize=uncomp,
949 951 chainsize=chainsize,
950 952 chainratio=chainratio,
951 953 lindist=lineardist,
952 954 extradist=extradist,
953 955 extraratio=extraratio,
954 956 )
955 957 if withsparseread:
956 958 readsize = 0
957 959 largestblock = 0
958 960 srchunks = 0
959 961
960 962 for revschunk in deltautil.slicechunk(r, chain):
961 963 srchunks += 1
962 964 blkend = start(revschunk[-1]) + length(revschunk[-1])
963 965 blksize = blkend - start(revschunk[0])
964 966
965 967 readsize += blksize
966 968 if largestblock < blksize:
967 969 largestblock = blksize
968 970
969 971 if readsize:
970 972 readdensity = float(chainsize) / float(readsize)
971 973 else:
972 974 readdensity = 1
973 975
974 976 fm.write(
975 977 b'readsize largestblock readdensity srchunks',
976 978 b' %10d %10d %9.5f %8d',
977 979 readsize,
978 980 largestblock,
979 981 readdensity,
980 982 srchunks,
981 983 readsize=readsize,
982 984 largestblock=largestblock,
983 985 readdensity=readdensity,
984 986 srchunks=srchunks,
985 987 )
986 988
987 989 fm.plain(b'\n')
988 990
989 991 fm.end()
990 992
991 993
992 994 @command(
993 995 b'debug-delta-find',
994 996 cmdutil.debugrevlogopts
995 997 + cmdutil.formatteropts
996 998 + [
997 999 (
998 1000 b'',
999 1001 b'source',
1000 1002 b'full',
1001 1003 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1002 1004 ),
1003 1005 ],
1004 1006 _(b'-c|-m|FILE REV'),
1005 1007 optionalrepo=True,
1006 1008 )
1007 1009 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1008 1010 """display the computation to get to a valid delta for storing REV
1009 1011
1010 1012 This command will replay the process used to find the "best" delta to store
1011 1013 a revision and display information about all the steps used to get to that
1012 1014 result.
1013 1015
1014 1016 By default, the process is fed with a the full-text for the revision. This
1015 1017 can be controlled with the --source flag.
1016 1018
1017 1019 The revision use the revision number of the target storage (not changelog
1018 1020 revision number).
1019 1021
1020 1022 note: the process is initiated from a full text of the revision to store.
1021 1023 """
1022 1024 opts = pycompat.byteskwargs(opts)
1023 1025 if arg_2 is None:
1024 1026 file_ = None
1025 1027 rev = arg_1
1026 1028 else:
1027 1029 file_ = arg_1
1028 1030 rev = arg_2
1029 1031
1030 1032 rev = int(rev)
1031 1033
1032 1034 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1033 1035 p1r, p2r = revlog.parentrevs(rev)
1034 1036
1035 1037 if source == b'full':
1036 1038 base_rev = nullrev
1037 1039 elif source == b'storage':
1038 1040 base_rev = revlog.deltaparent(rev)
1039 1041 elif source == b'p1':
1040 1042 base_rev = p1r
1041 1043 elif source == b'p2':
1042 1044 base_rev = p2r
1043 1045 elif source == b'prev':
1044 1046 base_rev = rev - 1
1045 1047 else:
1046 1048 raise error.InputError(b"invalid --source value: %s" % source)
1047 1049
1048 1050 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1049 1051
1050 1052
1051 1053 @command(
1052 1054 b'debugdirstate|debugstate',
1053 1055 [
1054 1056 (
1055 1057 b'',
1056 1058 b'nodates',
1057 1059 None,
1058 1060 _(b'do not display the saved mtime (DEPRECATED)'),
1059 1061 ),
1060 1062 (b'', b'dates', True, _(b'display the saved mtime')),
1061 1063 (b'', b'datesort', None, _(b'sort by saved mtime')),
1062 1064 (
1063 1065 b'',
1064 1066 b'docket',
1065 1067 False,
1066 1068 _(b'display the docket (metadata file) instead'),
1067 1069 ),
1068 1070 (
1069 1071 b'',
1070 1072 b'all',
1071 1073 False,
1072 1074 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1073 1075 ),
1074 1076 ],
1075 1077 _(b'[OPTION]...'),
1076 1078 )
1077 1079 def debugstate(ui, repo, **opts):
1078 1080 """show the contents of the current dirstate"""
1079 1081
1080 1082 if opts.get("docket"):
1081 1083 if not repo.dirstate._use_dirstate_v2:
1082 1084 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1083 1085
1084 1086 docket = repo.dirstate._map.docket
1085 1087 (
1086 1088 start_offset,
1087 1089 root_nodes,
1088 1090 nodes_with_entry,
1089 1091 nodes_with_copy,
1090 1092 unused_bytes,
1091 1093 _unused,
1092 1094 ignore_pattern,
1093 1095 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1094 1096
1095 1097 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1096 1098 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1097 1099 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1098 1100 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1099 1101 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1100 1102 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1101 1103 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1102 1104 ui.write(
1103 1105 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1104 1106 )
1105 1107 return
1106 1108
1107 1109 nodates = not opts['dates']
1108 1110 if opts.get('nodates') is not None:
1109 1111 nodates = True
1110 1112 datesort = opts.get('datesort')
1111 1113
1112 1114 if datesort:
1113 1115
1114 1116 def keyfunc(entry):
1115 1117 filename, _state, _mode, _size, mtime = entry
1116 1118 return (mtime, filename)
1117 1119
1118 1120 else:
1119 1121 keyfunc = None # sort by filename
1120 1122 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1121 1123 entries.sort(key=keyfunc)
1122 1124 for entry in entries:
1123 1125 filename, state, mode, size, mtime = entry
1124 1126 if mtime == -1:
1125 1127 timestr = b'unset '
1126 1128 elif nodates:
1127 1129 timestr = b'set '
1128 1130 else:
1129 1131 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1130 1132 timestr = encoding.strtolocal(timestr)
1131 1133 if mode & 0o20000:
1132 1134 mode = b'lnk'
1133 1135 else:
1134 1136 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1135 1137 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1136 1138 for f in repo.dirstate.copies():
1137 1139 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1138 1140
1139 1141
1140 1142 @command(
1141 1143 b'debugdirstateignorepatternshash',
1142 1144 [],
1143 1145 _(b''),
1144 1146 )
1145 1147 def debugdirstateignorepatternshash(ui, repo, **opts):
1146 1148 """show the hash of ignore patterns stored in dirstate if v2,
1147 1149 or nothing for dirstate-v2
1148 1150 """
1149 1151 if repo.dirstate._use_dirstate_v2:
1150 1152 docket = repo.dirstate._map.docket
1151 1153 hash_len = 20 # 160 bits for SHA-1
1152 1154 hash_bytes = docket.tree_metadata[-hash_len:]
1153 1155 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1154 1156
1155 1157
1156 1158 @command(
1157 1159 b'debugdiscovery',
1158 1160 [
1159 1161 (b'', b'old', None, _(b'use old-style discovery')),
1160 1162 (
1161 1163 b'',
1162 1164 b'nonheads',
1163 1165 None,
1164 1166 _(b'use old-style discovery with non-heads included'),
1165 1167 ),
1166 1168 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1167 1169 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1168 1170 (
1169 1171 b'',
1170 1172 b'local-as-revs',
1171 1173 b"",
1172 1174 b'treat local has having these revisions only',
1173 1175 ),
1174 1176 (
1175 1177 b'',
1176 1178 b'remote-as-revs',
1177 1179 b"",
1178 1180 b'use local as remote, with only these revisions',
1179 1181 ),
1180 1182 ]
1181 1183 + cmdutil.remoteopts
1182 1184 + cmdutil.formatteropts,
1183 1185 _(b'[--rev REV] [OTHER]'),
1184 1186 )
1185 1187 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1186 1188 """runs the changeset discovery protocol in isolation
1187 1189
1188 1190 The local peer can be "replaced" by a subset of the local repository by
1189 1191 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1190 1192 can be "replaced" by a subset of the local repository using the
1191 1193 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1192 1194 discovery situations.
1193 1195
1194 1196 The following developer oriented config are relevant for people playing with this command:
1195 1197
1196 1198 * devel.discovery.exchange-heads=True
1197 1199
1198 1200 If False, the discovery will not start with
1199 1201 remote head fetching and local head querying.
1200 1202
1201 1203 * devel.discovery.grow-sample=True
1202 1204
1203 1205 If False, the sample size used in set discovery will not be increased
1204 1206 through the process
1205 1207
1206 1208 * devel.discovery.grow-sample.dynamic=True
1207 1209
1208 1210 When discovery.grow-sample.dynamic is True, the default, the sample size is
1209 1211 adapted to the shape of the undecided set (it is set to the max of:
1210 1212 <target-size>, len(roots(undecided)), len(heads(undecided)
1211 1213
1212 1214 * devel.discovery.grow-sample.rate=1.05
1213 1215
1214 1216 the rate at which the sample grow
1215 1217
1216 1218 * devel.discovery.randomize=True
1217 1219
1218 1220 If andom sampling during discovery are deterministic. It is meant for
1219 1221 integration tests.
1220 1222
1221 1223 * devel.discovery.sample-size=200
1222 1224
1223 1225 Control the initial size of the discovery sample
1224 1226
1225 1227 * devel.discovery.sample-size.initial=100
1226 1228
1227 1229 Control the initial size of the discovery for initial change
1228 1230 """
1229 1231 opts = pycompat.byteskwargs(opts)
1230 1232 unfi = repo.unfiltered()
1231 1233
1232 1234 # setup potential extra filtering
1233 1235 local_revs = opts[b"local_as_revs"]
1234 1236 remote_revs = opts[b"remote_as_revs"]
1235 1237
1236 1238 # make sure tests are repeatable
1237 1239 random.seed(int(opts[b'seed']))
1238 1240
1239 1241 if not remote_revs:
1240 1242 path = urlutil.get_unique_pull_path_obj(
1241 1243 b'debugdiscovery', ui, remoteurl
1242 1244 )
1243 1245 branches = (path.branch, [])
1244 1246 remote = hg.peer(repo, opts, path)
1245 1247 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1246 1248 else:
1247 1249 branches = (None, [])
1248 1250 remote_filtered_revs = logcmdutil.revrange(
1249 1251 unfi, [b"not (::(%s))" % remote_revs]
1250 1252 )
1251 1253 remote_filtered_revs = frozenset(remote_filtered_revs)
1252 1254
1253 1255 def remote_func(x):
1254 1256 return remote_filtered_revs
1255 1257
1256 1258 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1257 1259
1258 1260 remote = repo.peer()
1259 1261 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1260 1262
1261 1263 if local_revs:
1262 1264 local_filtered_revs = logcmdutil.revrange(
1263 1265 unfi, [b"not (::(%s))" % local_revs]
1264 1266 )
1265 1267 local_filtered_revs = frozenset(local_filtered_revs)
1266 1268
1267 1269 def local_func(x):
1268 1270 return local_filtered_revs
1269 1271
1270 1272 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1271 1273 repo = repo.filtered(b'debug-discovery-local-filter')
1272 1274
1273 1275 data = {}
1274 1276 if opts.get(b'old'):
1275 1277
1276 1278 def doit(pushedrevs, remoteheads, remote=remote):
1277 1279 if not util.safehasattr(remote, b'branches'):
1278 1280 # enable in-client legacy support
1279 1281 remote = localrepo.locallegacypeer(remote.local())
1280 1282 if remote_revs:
1281 1283 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1282 1284 remote._repo = r
1283 1285 common, _in, hds = treediscovery.findcommonincoming(
1284 1286 repo, remote, force=True, audit=data
1285 1287 )
1286 1288 common = set(common)
1287 1289 if not opts.get(b'nonheads'):
1288 1290 ui.writenoi18n(
1289 1291 b"unpruned common: %s\n"
1290 1292 % b" ".join(sorted(short(n) for n in common))
1291 1293 )
1292 1294
1293 1295 clnode = repo.changelog.node
1294 1296 common = repo.revs(b'heads(::%ln)', common)
1295 1297 common = {clnode(r) for r in common}
1296 1298 return common, hds
1297 1299
1298 1300 else:
1299 1301
1300 1302 def doit(pushedrevs, remoteheads, remote=remote):
1301 1303 nodes = None
1302 1304 if pushedrevs:
1303 1305 revs = logcmdutil.revrange(repo, pushedrevs)
1304 1306 nodes = [repo[r].node() for r in revs]
1305 1307 common, any, hds = setdiscovery.findcommonheads(
1306 1308 ui,
1307 1309 repo,
1308 1310 remote,
1309 1311 ancestorsof=nodes,
1310 1312 audit=data,
1311 1313 abortwhenunrelated=False,
1312 1314 )
1313 1315 return common, hds
1314 1316
1315 1317 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1316 1318 localrevs = opts[b'rev']
1317 1319
1318 1320 fm = ui.formatter(b'debugdiscovery', opts)
1319 1321 if fm.strict_format:
1320 1322
1321 1323 @contextlib.contextmanager
1322 1324 def may_capture_output():
1323 1325 ui.pushbuffer()
1324 1326 yield
1325 1327 data[b'output'] = ui.popbuffer()
1326 1328
1327 1329 else:
1328 1330 may_capture_output = util.nullcontextmanager
1329 1331 with may_capture_output():
1330 1332 with util.timedcm('debug-discovery') as t:
1331 1333 common, hds = doit(localrevs, remoterevs)
1332 1334
1333 1335 # compute all statistics
1334 1336 if len(common) == 1 and repo.nullid in common:
1335 1337 common = set()
1336 1338 heads_common = set(common)
1337 1339 heads_remote = set(hds)
1338 1340 heads_local = set(repo.heads())
1339 1341 # note: they cannot be a local or remote head that is in common and not
1340 1342 # itself a head of common.
1341 1343 heads_common_local = heads_common & heads_local
1342 1344 heads_common_remote = heads_common & heads_remote
1343 1345 heads_common_both = heads_common & heads_remote & heads_local
1344 1346
1345 1347 all = repo.revs(b'all()')
1346 1348 common = repo.revs(b'::%ln', common)
1347 1349 roots_common = repo.revs(b'roots(::%ld)', common)
1348 1350 missing = repo.revs(b'not ::%ld', common)
1349 1351 heads_missing = repo.revs(b'heads(%ld)', missing)
1350 1352 roots_missing = repo.revs(b'roots(%ld)', missing)
1351 1353 assert len(common) + len(missing) == len(all)
1352 1354
1353 1355 initial_undecided = repo.revs(
1354 1356 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1355 1357 )
1356 1358 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1357 1359 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1358 1360 common_initial_undecided = initial_undecided & common
1359 1361 missing_initial_undecided = initial_undecided & missing
1360 1362
1361 1363 data[b'elapsed'] = t.elapsed
1362 1364 data[b'nb-common-heads'] = len(heads_common)
1363 1365 data[b'nb-common-heads-local'] = len(heads_common_local)
1364 1366 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1365 1367 data[b'nb-common-heads-both'] = len(heads_common_both)
1366 1368 data[b'nb-common-roots'] = len(roots_common)
1367 1369 data[b'nb-head-local'] = len(heads_local)
1368 1370 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1369 1371 data[b'nb-head-remote'] = len(heads_remote)
1370 1372 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1371 1373 heads_common_remote
1372 1374 )
1373 1375 data[b'nb-revs'] = len(all)
1374 1376 data[b'nb-revs-common'] = len(common)
1375 1377 data[b'nb-revs-missing'] = len(missing)
1376 1378 data[b'nb-missing-heads'] = len(heads_missing)
1377 1379 data[b'nb-missing-roots'] = len(roots_missing)
1378 1380 data[b'nb-ini_und'] = len(initial_undecided)
1379 1381 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1380 1382 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1381 1383 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1382 1384 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1383 1385
1384 1386 fm.startitem()
1385 1387 fm.data(**pycompat.strkwargs(data))
1386 1388 # display discovery summary
1387 1389 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1388 1390 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1389 1391 if b'total-round-trips-heads' in data:
1390 1392 fm.plain(
1391 1393 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1392 1394 )
1393 1395 if b'total-round-trips-branches' in data:
1394 1396 fm.plain(
1395 1397 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1396 1398 % data
1397 1399 )
1398 1400 if b'total-round-trips-between' in data:
1399 1401 fm.plain(
1400 1402 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1401 1403 )
1402 1404 fm.plain(b"queries: %(total-queries)9d\n" % data)
1403 1405 if b'total-queries-branches' in data:
1404 1406 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1405 1407 if b'total-queries-between' in data:
1406 1408 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1407 1409 fm.plain(b"heads summary:\n")
1408 1410 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1409 1411 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1410 1412 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1411 1413 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1412 1414 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1413 1415 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1414 1416 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1415 1417 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1416 1418 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1417 1419 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1418 1420 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1419 1421 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1420 1422 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1421 1423 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1422 1424 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1423 1425 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1424 1426 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1425 1427 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1426 1428 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1427 1429 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1428 1430 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1429 1431 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1430 1432
1431 1433 if ui.verbose:
1432 1434 fm.plain(
1433 1435 b"common heads: %s\n"
1434 1436 % b" ".join(sorted(short(n) for n in heads_common))
1435 1437 )
1436 1438 fm.end()
1437 1439
1438 1440
1439 1441 _chunksize = 4 << 10
1440 1442
1441 1443
1442 1444 @command(
1443 1445 b'debugdownload',
1444 1446 [
1445 1447 (b'o', b'output', b'', _(b'path')),
1446 1448 ],
1447 1449 optionalrepo=True,
1448 1450 )
1449 1451 def debugdownload(ui, repo, url, output=None, **opts):
1450 1452 """download a resource using Mercurial logic and config"""
1451 1453 fh = urlmod.open(ui, url, output)
1452 1454
1453 1455 dest = ui
1454 1456 if output:
1455 1457 dest = open(output, b"wb", _chunksize)
1456 1458 try:
1457 1459 data = fh.read(_chunksize)
1458 1460 while data:
1459 1461 dest.write(data)
1460 1462 data = fh.read(_chunksize)
1461 1463 finally:
1462 1464 if output:
1463 1465 dest.close()
1464 1466
1465 1467
1466 1468 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1467 1469 def debugextensions(ui, repo, **opts):
1468 1470 '''show information about active extensions'''
1469 1471 opts = pycompat.byteskwargs(opts)
1470 1472 exts = extensions.extensions(ui)
1471 1473 hgver = util.version()
1472 1474 fm = ui.formatter(b'debugextensions', opts)
1473 1475 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1474 1476 isinternal = extensions.ismoduleinternal(extmod)
1475 1477 extsource = None
1476 1478
1477 1479 if util.safehasattr(extmod, '__file__'):
1478 1480 extsource = pycompat.fsencode(extmod.__file__)
1479 1481 elif getattr(sys, 'oxidized', False):
1480 1482 extsource = pycompat.sysexecutable
1481 1483 if isinternal:
1482 1484 exttestedwith = [] # never expose magic string to users
1483 1485 else:
1484 1486 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1485 1487 extbuglink = getattr(extmod, 'buglink', None)
1486 1488
1487 1489 fm.startitem()
1488 1490
1489 1491 if ui.quiet or ui.verbose:
1490 1492 fm.write(b'name', b'%s\n', extname)
1491 1493 else:
1492 1494 fm.write(b'name', b'%s', extname)
1493 1495 if isinternal or hgver in exttestedwith:
1494 1496 fm.plain(b'\n')
1495 1497 elif not exttestedwith:
1496 1498 fm.plain(_(b' (untested!)\n'))
1497 1499 else:
1498 1500 lasttestedversion = exttestedwith[-1]
1499 1501 fm.plain(b' (%s!)\n' % lasttestedversion)
1500 1502
1501 1503 fm.condwrite(
1502 1504 ui.verbose and extsource,
1503 1505 b'source',
1504 1506 _(b' location: %s\n'),
1505 1507 extsource or b"",
1506 1508 )
1507 1509
1508 1510 if ui.verbose:
1509 1511 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1510 1512 fm.data(bundled=isinternal)
1511 1513
1512 1514 fm.condwrite(
1513 1515 ui.verbose and exttestedwith,
1514 1516 b'testedwith',
1515 1517 _(b' tested with: %s\n'),
1516 1518 fm.formatlist(exttestedwith, name=b'ver'),
1517 1519 )
1518 1520
1519 1521 fm.condwrite(
1520 1522 ui.verbose and extbuglink,
1521 1523 b'buglink',
1522 1524 _(b' bug reporting: %s\n'),
1523 1525 extbuglink or b"",
1524 1526 )
1525 1527
1526 1528 fm.end()
1527 1529
1528 1530
1529 1531 @command(
1530 1532 b'debugfileset',
1531 1533 [
1532 1534 (
1533 1535 b'r',
1534 1536 b'rev',
1535 1537 b'',
1536 1538 _(b'apply the filespec on this revision'),
1537 1539 _(b'REV'),
1538 1540 ),
1539 1541 (
1540 1542 b'',
1541 1543 b'all-files',
1542 1544 False,
1543 1545 _(b'test files from all revisions and working directory'),
1544 1546 ),
1545 1547 (
1546 1548 b's',
1547 1549 b'show-matcher',
1548 1550 None,
1549 1551 _(b'print internal representation of matcher'),
1550 1552 ),
1551 1553 (
1552 1554 b'p',
1553 1555 b'show-stage',
1554 1556 [],
1555 1557 _(b'print parsed tree at the given stage'),
1556 1558 _(b'NAME'),
1557 1559 ),
1558 1560 ],
1559 1561 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1560 1562 )
1561 1563 def debugfileset(ui, repo, expr, **opts):
1562 1564 '''parse and apply a fileset specification'''
1563 1565 from . import fileset
1564 1566
1565 1567 fileset.symbols # force import of fileset so we have predicates to optimize
1566 1568 opts = pycompat.byteskwargs(opts)
1567 1569 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1568 1570
1569 1571 stages = [
1570 1572 (b'parsed', pycompat.identity),
1571 1573 (b'analyzed', filesetlang.analyze),
1572 1574 (b'optimized', filesetlang.optimize),
1573 1575 ]
1574 1576 stagenames = {n for n, f in stages}
1575 1577
1576 1578 showalways = set()
1577 1579 if ui.verbose and not opts[b'show_stage']:
1578 1580 # show parsed tree by --verbose (deprecated)
1579 1581 showalways.add(b'parsed')
1580 1582 if opts[b'show_stage'] == [b'all']:
1581 1583 showalways.update(stagenames)
1582 1584 else:
1583 1585 for n in opts[b'show_stage']:
1584 1586 if n not in stagenames:
1585 1587 raise error.Abort(_(b'invalid stage name: %s') % n)
1586 1588 showalways.update(opts[b'show_stage'])
1587 1589
1588 1590 tree = filesetlang.parse(expr)
1589 1591 for n, f in stages:
1590 1592 tree = f(tree)
1591 1593 if n in showalways:
1592 1594 if opts[b'show_stage'] or n != b'parsed':
1593 1595 ui.write(b"* %s:\n" % n)
1594 1596 ui.write(filesetlang.prettyformat(tree), b"\n")
1595 1597
1596 1598 files = set()
1597 1599 if opts[b'all_files']:
1598 1600 for r in repo:
1599 1601 c = repo[r]
1600 1602 files.update(c.files())
1601 1603 files.update(c.substate)
1602 1604 if opts[b'all_files'] or ctx.rev() is None:
1603 1605 wctx = repo[None]
1604 1606 files.update(
1605 1607 repo.dirstate.walk(
1606 1608 scmutil.matchall(repo),
1607 1609 subrepos=list(wctx.substate),
1608 1610 unknown=True,
1609 1611 ignored=True,
1610 1612 )
1611 1613 )
1612 1614 files.update(wctx.substate)
1613 1615 else:
1614 1616 files.update(ctx.files())
1615 1617 files.update(ctx.substate)
1616 1618
1617 1619 m = ctx.matchfileset(repo.getcwd(), expr)
1618 1620 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1619 1621 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1620 1622 for f in sorted(files):
1621 1623 if not m(f):
1622 1624 continue
1623 1625 ui.write(b"%s\n" % f)
1624 1626
1625 1627
1626 1628 @command(
1627 1629 b"debug-repair-issue6528",
1628 1630 [
1629 1631 (
1630 1632 b'',
1631 1633 b'to-report',
1632 1634 b'',
1633 1635 _(b'build a report of affected revisions to this file'),
1634 1636 _(b'FILE'),
1635 1637 ),
1636 1638 (
1637 1639 b'',
1638 1640 b'from-report',
1639 1641 b'',
1640 1642 _(b'repair revisions listed in this report file'),
1641 1643 _(b'FILE'),
1642 1644 ),
1643 1645 (
1644 1646 b'',
1645 1647 b'paranoid',
1646 1648 False,
1647 1649 _(b'check that both detection methods do the same thing'),
1648 1650 ),
1649 1651 ]
1650 1652 + cmdutil.dryrunopts,
1651 1653 )
1652 1654 def debug_repair_issue6528(ui, repo, **opts):
1653 1655 """find affected revisions and repair them. See issue6528 for more details.
1654 1656
1655 1657 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1656 1658 computation of affected revisions for a given repository across clones.
1657 1659 The report format is line-based (with empty lines ignored):
1658 1660
1659 1661 ```
1660 1662 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1661 1663 ```
1662 1664
1663 1665 There can be multiple broken revisions per filelog, they are separated by
1664 1666 a comma with no spaces. The only space is between the revision(s) and the
1665 1667 filename.
1666 1668
1667 1669 Note that this does *not* mean that this repairs future affected revisions,
1668 1670 that needs a separate fix at the exchange level that was introduced in
1669 1671 Mercurial 5.9.1.
1670 1672
1671 1673 There is a `--paranoid` flag to test that the fast implementation is correct
1672 1674 by checking it against the slow implementation. Since this matter is quite
1673 1675 urgent and testing every edge-case is probably quite costly, we use this
1674 1676 method to test on large repositories as a fuzzing method of sorts.
1675 1677 """
1676 1678 cmdutil.check_incompatible_arguments(
1677 1679 opts, 'to_report', ['from_report', 'dry_run']
1678 1680 )
1679 1681 dry_run = opts.get('dry_run')
1680 1682 to_report = opts.get('to_report')
1681 1683 from_report = opts.get('from_report')
1682 1684 paranoid = opts.get('paranoid')
1683 1685 # TODO maybe add filelog pattern and revision pattern parameters to help
1684 1686 # narrow down the search for users that know what they're looking for?
1685 1687
1686 1688 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1687 1689 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1688 1690 raise error.Abort(_(msg))
1689 1691
1690 1692 rewrite.repair_issue6528(
1691 1693 ui,
1692 1694 repo,
1693 1695 dry_run=dry_run,
1694 1696 to_report=to_report,
1695 1697 from_report=from_report,
1696 1698 paranoid=paranoid,
1697 1699 )
1698 1700
1699 1701
1700 1702 @command(b'debugformat', [] + cmdutil.formatteropts)
1701 1703 def debugformat(ui, repo, **opts):
1702 1704 """display format information about the current repository
1703 1705
1704 1706 Use --verbose to get extra information about current config value and
1705 1707 Mercurial default."""
1706 1708 opts = pycompat.byteskwargs(opts)
1707 1709 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1708 1710 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1709 1711
1710 1712 def makeformatname(name):
1711 1713 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1712 1714
1713 1715 fm = ui.formatter(b'debugformat', opts)
1714 1716 if fm.isplain():
1715 1717
1716 1718 def formatvalue(value):
1717 1719 if util.safehasattr(value, b'startswith'):
1718 1720 return value
1719 1721 if value:
1720 1722 return b'yes'
1721 1723 else:
1722 1724 return b'no'
1723 1725
1724 1726 else:
1725 1727 formatvalue = pycompat.identity
1726 1728
1727 1729 fm.plain(b'format-variant')
1728 1730 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1729 1731 fm.plain(b' repo')
1730 1732 if ui.verbose:
1731 1733 fm.plain(b' config default')
1732 1734 fm.plain(b'\n')
1733 1735 for fv in upgrade.allformatvariant:
1734 1736 fm.startitem()
1735 1737 repovalue = fv.fromrepo(repo)
1736 1738 configvalue = fv.fromconfig(repo)
1737 1739
1738 1740 if repovalue != configvalue:
1739 1741 namelabel = b'formatvariant.name.mismatchconfig'
1740 1742 repolabel = b'formatvariant.repo.mismatchconfig'
1741 1743 elif repovalue != fv.default:
1742 1744 namelabel = b'formatvariant.name.mismatchdefault'
1743 1745 repolabel = b'formatvariant.repo.mismatchdefault'
1744 1746 else:
1745 1747 namelabel = b'formatvariant.name.uptodate'
1746 1748 repolabel = b'formatvariant.repo.uptodate'
1747 1749
1748 1750 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1749 1751 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1750 1752 if fv.default != configvalue:
1751 1753 configlabel = b'formatvariant.config.special'
1752 1754 else:
1753 1755 configlabel = b'formatvariant.config.default'
1754 1756 fm.condwrite(
1755 1757 ui.verbose,
1756 1758 b'config',
1757 1759 b' %6s',
1758 1760 formatvalue(configvalue),
1759 1761 label=configlabel,
1760 1762 )
1761 1763 fm.condwrite(
1762 1764 ui.verbose,
1763 1765 b'default',
1764 1766 b' %7s',
1765 1767 formatvalue(fv.default),
1766 1768 label=b'formatvariant.default',
1767 1769 )
1768 1770 fm.plain(b'\n')
1769 1771 fm.end()
1770 1772
1771 1773
1772 1774 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1773 1775 def debugfsinfo(ui, path=b"."):
1774 1776 """show information detected about current filesystem"""
1775 1777 ui.writenoi18n(b'path: %s\n' % path)
1776 1778 ui.writenoi18n(
1777 1779 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1778 1780 )
1779 1781 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1780 1782 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1781 1783 ui.writenoi18n(
1782 1784 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1783 1785 )
1784 1786 ui.writenoi18n(
1785 1787 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1786 1788 )
1787 1789 casesensitive = b'(unknown)'
1788 1790 try:
1789 1791 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1790 1792 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1791 1793 except OSError:
1792 1794 pass
1793 1795 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1794 1796
1795 1797
1796 1798 @command(
1797 1799 b'debuggetbundle',
1798 1800 [
1799 1801 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1800 1802 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1801 1803 (
1802 1804 b't',
1803 1805 b'type',
1804 1806 b'bzip2',
1805 1807 _(b'bundle compression type to use'),
1806 1808 _(b'TYPE'),
1807 1809 ),
1808 1810 ],
1809 1811 _(b'REPO FILE [-H|-C ID]...'),
1810 1812 norepo=True,
1811 1813 )
1812 1814 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1813 1815 """retrieves a bundle from a repo
1814 1816
1815 1817 Every ID must be a full-length hex node id string. Saves the bundle to the
1816 1818 given file.
1817 1819 """
1818 1820 opts = pycompat.byteskwargs(opts)
1819 1821 repo = hg.peer(ui, opts, repopath)
1820 1822 if not repo.capable(b'getbundle'):
1821 1823 raise error.Abort(b"getbundle() not supported by target repository")
1822 1824 args = {}
1823 1825 if common:
1824 1826 args['common'] = [bin(s) for s in common]
1825 1827 if head:
1826 1828 args['heads'] = [bin(s) for s in head]
1827 1829 # TODO: get desired bundlecaps from command line.
1828 1830 args['bundlecaps'] = None
1829 1831 bundle = repo.getbundle(b'debug', **args)
1830 1832
1831 1833 bundletype = opts.get(b'type', b'bzip2').lower()
1832 1834 btypes = {
1833 1835 b'none': b'HG10UN',
1834 1836 b'bzip2': b'HG10BZ',
1835 1837 b'gzip': b'HG10GZ',
1836 1838 b'bundle2': b'HG20',
1837 1839 }
1838 1840 bundletype = btypes.get(bundletype)
1839 1841 if bundletype not in bundle2.bundletypes:
1840 1842 raise error.Abort(_(b'unknown bundle type specified with --type'))
1841 1843 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1842 1844
1843 1845
1844 1846 @command(b'debugignore', [], b'[FILE]')
1845 1847 def debugignore(ui, repo, *files, **opts):
1846 1848 """display the combined ignore pattern and information about ignored files
1847 1849
1848 1850 With no argument display the combined ignore pattern.
1849 1851
1850 1852 Given space separated file names, shows if the given file is ignored and
1851 1853 if so, show the ignore rule (file and line number) that matched it.
1852 1854 """
1853 1855 ignore = repo.dirstate._ignore
1854 1856 if not files:
1855 1857 # Show all the patterns
1856 1858 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1857 1859 else:
1858 1860 m = scmutil.match(repo[None], pats=files)
1859 1861 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1860 1862 for f in m.files():
1861 1863 nf = util.normpath(f)
1862 1864 ignored = None
1863 1865 ignoredata = None
1864 1866 if nf != b'.':
1865 1867 if ignore(nf):
1866 1868 ignored = nf
1867 1869 ignoredata = repo.dirstate._ignorefileandline(nf)
1868 1870 else:
1869 1871 for p in pathutil.finddirs(nf):
1870 1872 if ignore(p):
1871 1873 ignored = p
1872 1874 ignoredata = repo.dirstate._ignorefileandline(p)
1873 1875 break
1874 1876 if ignored:
1875 1877 if ignored == nf:
1876 1878 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1877 1879 else:
1878 1880 ui.write(
1879 1881 _(
1880 1882 b"%s is ignored because of "
1881 1883 b"containing directory %s\n"
1882 1884 )
1883 1885 % (uipathfn(f), ignored)
1884 1886 )
1885 1887 ignorefile, lineno, line = ignoredata
1886 1888 ui.write(
1887 1889 _(b"(ignore rule in %s, line %d: '%s')\n")
1888 1890 % (ignorefile, lineno, line)
1889 1891 )
1890 1892 else:
1891 1893 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1892 1894
1893 1895
1894 1896 @command(
1895 1897 b'debug-revlog-index|debugindex',
1896 1898 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1897 1899 _(b'-c|-m|FILE'),
1898 1900 )
1899 1901 def debugindex(ui, repo, file_=None, **opts):
1900 1902 """dump index data for a revlog"""
1901 1903 opts = pycompat.byteskwargs(opts)
1902 1904 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1903 1905
1904 1906 fm = ui.formatter(b'debugindex', opts)
1905 1907
1906 1908 revlog = getattr(store, b'_revlog', store)
1907 1909
1908 1910 return revlog_debug.debug_index(
1909 1911 ui,
1910 1912 repo,
1911 1913 formatter=fm,
1912 1914 revlog=revlog,
1913 1915 full_node=ui.debugflag,
1914 1916 )
1915 1917
1916 1918
1917 1919 @command(
1918 1920 b'debugindexdot',
1919 1921 cmdutil.debugrevlogopts,
1920 1922 _(b'-c|-m|FILE'),
1921 1923 optionalrepo=True,
1922 1924 )
1923 1925 def debugindexdot(ui, repo, file_=None, **opts):
1924 1926 """dump an index DAG as a graphviz dot file"""
1925 1927 opts = pycompat.byteskwargs(opts)
1926 1928 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1927 1929 ui.writenoi18n(b"digraph G {\n")
1928 1930 for i in r:
1929 1931 node = r.node(i)
1930 1932 pp = r.parents(node)
1931 1933 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1932 1934 if pp[1] != repo.nullid:
1933 1935 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1934 1936 ui.write(b"}\n")
1935 1937
1936 1938
1937 1939 @command(b'debugindexstats', [])
1938 1940 def debugindexstats(ui, repo):
1939 1941 """show stats related to the changelog index"""
1940 1942 repo.changelog.shortest(repo.nullid, 1)
1941 1943 index = repo.changelog.index
1942 1944 if not util.safehasattr(index, b'stats'):
1943 1945 raise error.Abort(_(b'debugindexstats only works with native code'))
1944 1946 for k, v in sorted(index.stats().items()):
1945 1947 ui.write(b'%s: %d\n' % (k, v))
1946 1948
1947 1949
1948 1950 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1949 1951 def debuginstall(ui, **opts):
1950 1952 """test Mercurial installation
1951 1953
1952 1954 Returns 0 on success.
1953 1955 """
1954 1956 opts = pycompat.byteskwargs(opts)
1955 1957
1956 1958 problems = 0
1957 1959
1958 1960 fm = ui.formatter(b'debuginstall', opts)
1959 1961 fm.startitem()
1960 1962
1961 1963 # encoding might be unknown or wrong. don't translate these messages.
1962 1964 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1963 1965 err = None
1964 1966 try:
1965 1967 codecs.lookup(pycompat.sysstr(encoding.encoding))
1966 1968 except LookupError as inst:
1967 1969 err = stringutil.forcebytestr(inst)
1968 1970 problems += 1
1969 1971 fm.condwrite(
1970 1972 err,
1971 1973 b'encodingerror',
1972 1974 b" %s\n (check that your locale is properly set)\n",
1973 1975 err,
1974 1976 )
1975 1977
1976 1978 # Python
1977 1979 pythonlib = None
1978 1980 if util.safehasattr(os, '__file__'):
1979 1981 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1980 1982 elif getattr(sys, 'oxidized', False):
1981 1983 pythonlib = pycompat.sysexecutable
1982 1984
1983 1985 fm.write(
1984 1986 b'pythonexe',
1985 1987 _(b"checking Python executable (%s)\n"),
1986 1988 pycompat.sysexecutable or _(b"unknown"),
1987 1989 )
1988 1990 fm.write(
1989 1991 b'pythonimplementation',
1990 1992 _(b"checking Python implementation (%s)\n"),
1991 1993 pycompat.sysbytes(platform.python_implementation()),
1992 1994 )
1993 1995 fm.write(
1994 1996 b'pythonver',
1995 1997 _(b"checking Python version (%s)\n"),
1996 1998 (b"%d.%d.%d" % sys.version_info[:3]),
1997 1999 )
1998 2000 fm.write(
1999 2001 b'pythonlib',
2000 2002 _(b"checking Python lib (%s)...\n"),
2001 2003 pythonlib or _(b"unknown"),
2002 2004 )
2003 2005
2004 2006 try:
2005 2007 from . import rustext # pytype: disable=import-error
2006 2008
2007 2009 rustext.__doc__ # trigger lazy import
2008 2010 except ImportError:
2009 2011 rustext = None
2010 2012
2011 2013 security = set(sslutil.supportedprotocols)
2012 2014 if sslutil.hassni:
2013 2015 security.add(b'sni')
2014 2016
2015 2017 fm.write(
2016 2018 b'pythonsecurity',
2017 2019 _(b"checking Python security support (%s)\n"),
2018 2020 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2019 2021 )
2020 2022
2021 2023 # These are warnings, not errors. So don't increment problem count. This
2022 2024 # may change in the future.
2023 2025 if b'tls1.2' not in security:
2024 2026 fm.plain(
2025 2027 _(
2026 2028 b' TLS 1.2 not supported by Python install; '
2027 2029 b'network connections lack modern security\n'
2028 2030 )
2029 2031 )
2030 2032 if b'sni' not in security:
2031 2033 fm.plain(
2032 2034 _(
2033 2035 b' SNI not supported by Python install; may have '
2034 2036 b'connectivity issues with some servers\n'
2035 2037 )
2036 2038 )
2037 2039
2038 2040 fm.plain(
2039 2041 _(
2040 2042 b"checking Rust extensions (%s)\n"
2041 2043 % (b'missing' if rustext is None else b'installed')
2042 2044 ),
2043 2045 )
2044 2046
2045 2047 # TODO print CA cert info
2046 2048
2047 2049 # hg version
2048 2050 hgver = util.version()
2049 2051 fm.write(
2050 2052 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2051 2053 )
2052 2054 fm.write(
2053 2055 b'hgverextra',
2054 2056 _(b"checking Mercurial custom build (%s)\n"),
2055 2057 b'+'.join(hgver.split(b'+')[1:]),
2056 2058 )
2057 2059
2058 2060 # compiled modules
2059 2061 hgmodules = None
2060 2062 if util.safehasattr(sys.modules[__name__], '__file__'):
2061 2063 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2062 2064 elif getattr(sys, 'oxidized', False):
2063 2065 hgmodules = pycompat.sysexecutable
2064 2066
2065 2067 fm.write(
2066 2068 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2067 2069 )
2068 2070 fm.write(
2069 2071 b'hgmodules',
2070 2072 _(b"checking installed modules (%s)...\n"),
2071 2073 hgmodules or _(b"unknown"),
2072 2074 )
2073 2075
2074 2076 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2075 2077 rustext = rustandc # for now, that's the only case
2076 2078 cext = policy.policy in (b'c', b'allow') or rustandc
2077 2079 nopure = cext or rustext
2078 2080 if nopure:
2079 2081 err = None
2080 2082 try:
2081 2083 if cext:
2082 2084 from .cext import ( # pytype: disable=import-error
2083 2085 base85,
2084 2086 bdiff,
2085 2087 mpatch,
2086 2088 osutil,
2087 2089 )
2088 2090
2089 2091 # quiet pyflakes
2090 2092 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2091 2093 if rustext:
2092 2094 from .rustext import ( # pytype: disable=import-error
2093 2095 ancestor,
2094 2096 dirstate,
2095 2097 )
2096 2098
2097 2099 dir(ancestor), dir(dirstate) # quiet pyflakes
2098 2100 except Exception as inst:
2099 2101 err = stringutil.forcebytestr(inst)
2100 2102 problems += 1
2101 2103 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2102 2104
2103 2105 compengines = util.compengines._engines.values()
2104 2106 fm.write(
2105 2107 b'compengines',
2106 2108 _(b'checking registered compression engines (%s)\n'),
2107 2109 fm.formatlist(
2108 2110 sorted(e.name() for e in compengines),
2109 2111 name=b'compengine',
2110 2112 fmt=b'%s',
2111 2113 sep=b', ',
2112 2114 ),
2113 2115 )
2114 2116 fm.write(
2115 2117 b'compenginesavail',
2116 2118 _(b'checking available compression engines (%s)\n'),
2117 2119 fm.formatlist(
2118 2120 sorted(e.name() for e in compengines if e.available()),
2119 2121 name=b'compengine',
2120 2122 fmt=b'%s',
2121 2123 sep=b', ',
2122 2124 ),
2123 2125 )
2124 2126 wirecompengines = compression.compengines.supportedwireengines(
2125 2127 compression.SERVERROLE
2126 2128 )
2127 2129 fm.write(
2128 2130 b'compenginesserver',
2129 2131 _(
2130 2132 b'checking available compression engines '
2131 2133 b'for wire protocol (%s)\n'
2132 2134 ),
2133 2135 fm.formatlist(
2134 2136 [e.name() for e in wirecompengines if e.wireprotosupport()],
2135 2137 name=b'compengine',
2136 2138 fmt=b'%s',
2137 2139 sep=b', ',
2138 2140 ),
2139 2141 )
2140 2142 re2 = b'missing'
2141 2143 if util._re2:
2142 2144 re2 = b'available'
2143 2145 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2144 2146 fm.data(re2=bool(util._re2))
2145 2147
2146 2148 # templates
2147 2149 p = templater.templatedir()
2148 2150 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2149 2151 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2150 2152 if p:
2151 2153 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2152 2154 if m:
2153 2155 # template found, check if it is working
2154 2156 err = None
2155 2157 try:
2156 2158 templater.templater.frommapfile(m)
2157 2159 except Exception as inst:
2158 2160 err = stringutil.forcebytestr(inst)
2159 2161 p = None
2160 2162 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2161 2163 else:
2162 2164 p = None
2163 2165 fm.condwrite(
2164 2166 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2165 2167 )
2166 2168 fm.condwrite(
2167 2169 not m,
2168 2170 b'defaulttemplatenotfound',
2169 2171 _(b" template '%s' not found\n"),
2170 2172 b"default",
2171 2173 )
2172 2174 if not p:
2173 2175 problems += 1
2174 2176 fm.condwrite(
2175 2177 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2176 2178 )
2177 2179
2178 2180 # editor
2179 2181 editor = ui.geteditor()
2180 2182 editor = util.expandpath(editor)
2181 2183 editorbin = procutil.shellsplit(editor)[0]
2182 2184 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2183 2185 cmdpath = procutil.findexe(editorbin)
2184 2186 fm.condwrite(
2185 2187 not cmdpath and editor == b'vi',
2186 2188 b'vinotfound',
2187 2189 _(
2188 2190 b" No commit editor set and can't find %s in PATH\n"
2189 2191 b" (specify a commit editor in your configuration"
2190 2192 b" file)\n"
2191 2193 ),
2192 2194 not cmdpath and editor == b'vi' and editorbin,
2193 2195 )
2194 2196 fm.condwrite(
2195 2197 not cmdpath and editor != b'vi',
2196 2198 b'editornotfound',
2197 2199 _(
2198 2200 b" Can't find editor '%s' in PATH\n"
2199 2201 b" (specify a commit editor in your configuration"
2200 2202 b" file)\n"
2201 2203 ),
2202 2204 not cmdpath and editorbin,
2203 2205 )
2204 2206 if not cmdpath and editor != b'vi':
2205 2207 problems += 1
2206 2208
2207 2209 # check username
2208 2210 username = None
2209 2211 err = None
2210 2212 try:
2211 2213 username = ui.username()
2212 2214 except error.Abort as e:
2213 2215 err = e.message
2214 2216 problems += 1
2215 2217
2216 2218 fm.condwrite(
2217 2219 username, b'username', _(b"checking username (%s)\n"), username
2218 2220 )
2219 2221 fm.condwrite(
2220 2222 err,
2221 2223 b'usernameerror',
2222 2224 _(
2223 2225 b"checking username...\n %s\n"
2224 2226 b" (specify a username in your configuration file)\n"
2225 2227 ),
2226 2228 err,
2227 2229 )
2228 2230
2229 2231 for name, mod in extensions.extensions():
2230 2232 handler = getattr(mod, 'debuginstall', None)
2231 2233 if handler is not None:
2232 2234 problems += handler(ui, fm)
2233 2235
2234 2236 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2235 2237 if not problems:
2236 2238 fm.data(problems=problems)
2237 2239 fm.condwrite(
2238 2240 problems,
2239 2241 b'problems',
2240 2242 _(b"%d problems detected, please check your install!\n"),
2241 2243 problems,
2242 2244 )
2243 2245 fm.end()
2244 2246
2245 2247 return problems
2246 2248
2247 2249
2248 2250 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2249 2251 def debugknown(ui, repopath, *ids, **opts):
2250 2252 """test whether node ids are known to a repo
2251 2253
2252 2254 Every ID must be a full-length hex node id string. Returns a list of 0s
2253 2255 and 1s indicating unknown/known.
2254 2256 """
2255 2257 opts = pycompat.byteskwargs(opts)
2256 2258 repo = hg.peer(ui, opts, repopath)
2257 2259 if not repo.capable(b'known'):
2258 2260 raise error.Abort(b"known() not supported by target repository")
2259 2261 flags = repo.known([bin(s) for s in ids])
2260 2262 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2261 2263
2262 2264
2263 2265 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2264 2266 def debuglabelcomplete(ui, repo, *args):
2265 2267 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2266 2268 debugnamecomplete(ui, repo, *args)
2267 2269
2268 2270
2269 2271 @command(
2270 2272 b'debuglocks',
2271 2273 [
2272 2274 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2273 2275 (
2274 2276 b'W',
2275 2277 b'force-free-wlock',
2276 2278 None,
2277 2279 _(b'free the working state lock (DANGEROUS)'),
2278 2280 ),
2279 2281 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2280 2282 (
2281 2283 b'S',
2282 2284 b'set-wlock',
2283 2285 None,
2284 2286 _(b'set the working state lock until stopped'),
2285 2287 ),
2286 2288 ],
2287 2289 _(b'[OPTION]...'),
2288 2290 )
2289 2291 def debuglocks(ui, repo, **opts):
2290 2292 """show or modify state of locks
2291 2293
2292 2294 By default, this command will show which locks are held. This
2293 2295 includes the user and process holding the lock, the amount of time
2294 2296 the lock has been held, and the machine name where the process is
2295 2297 running if it's not local.
2296 2298
2297 2299 Locks protect the integrity of Mercurial's data, so should be
2298 2300 treated with care. System crashes or other interruptions may cause
2299 2301 locks to not be properly released, though Mercurial will usually
2300 2302 detect and remove such stale locks automatically.
2301 2303
2302 2304 However, detecting stale locks may not always be possible (for
2303 2305 instance, on a shared filesystem). Removing locks may also be
2304 2306 blocked by filesystem permissions.
2305 2307
2306 2308 Setting a lock will prevent other commands from changing the data.
2307 2309 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2308 2310 The set locks are removed when the command exits.
2309 2311
2310 2312 Returns 0 if no locks are held.
2311 2313
2312 2314 """
2313 2315
2314 2316 if opts.get('force_free_lock'):
2315 2317 repo.svfs.tryunlink(b'lock')
2316 2318 if opts.get('force_free_wlock'):
2317 2319 repo.vfs.tryunlink(b'wlock')
2318 2320 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2319 2321 return 0
2320 2322
2321 2323 locks = []
2322 2324 try:
2323 2325 if opts.get('set_wlock'):
2324 2326 try:
2325 2327 locks.append(repo.wlock(False))
2326 2328 except error.LockHeld:
2327 2329 raise error.Abort(_(b'wlock is already held'))
2328 2330 if opts.get('set_lock'):
2329 2331 try:
2330 2332 locks.append(repo.lock(False))
2331 2333 except error.LockHeld:
2332 2334 raise error.Abort(_(b'lock is already held'))
2333 2335 if len(locks):
2334 2336 try:
2335 2337 if ui.interactive():
2336 2338 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2337 2339 ui.promptchoice(prompt)
2338 2340 else:
2339 2341 msg = b"%d locks held, waiting for signal\n"
2340 2342 msg %= len(locks)
2341 2343 ui.status(msg)
2342 2344 while True: # XXX wait for a signal
2343 2345 time.sleep(0.1)
2344 2346 except KeyboardInterrupt:
2345 2347 msg = b"signal-received releasing locks\n"
2346 2348 ui.status(msg)
2347 2349 return 0
2348 2350 finally:
2349 2351 release(*locks)
2350 2352
2351 2353 now = time.time()
2352 2354 held = 0
2353 2355
2354 2356 def report(vfs, name, method):
2355 2357 # this causes stale locks to get reaped for more accurate reporting
2356 2358 try:
2357 2359 l = method(False)
2358 2360 except error.LockHeld:
2359 2361 l = None
2360 2362
2361 2363 if l:
2362 2364 l.release()
2363 2365 else:
2364 2366 try:
2365 2367 st = vfs.lstat(name)
2366 2368 age = now - st[stat.ST_MTIME]
2367 2369 user = util.username(st.st_uid)
2368 2370 locker = vfs.readlock(name)
2369 2371 if b":" in locker:
2370 2372 host, pid = locker.split(b':')
2371 2373 if host == socket.gethostname():
2372 2374 locker = b'user %s, process %s' % (user or b'None', pid)
2373 2375 else:
2374 2376 locker = b'user %s, process %s, host %s' % (
2375 2377 user or b'None',
2376 2378 pid,
2377 2379 host,
2378 2380 )
2379 2381 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2380 2382 return 1
2381 2383 except FileNotFoundError:
2382 2384 pass
2383 2385
2384 2386 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2385 2387 return 0
2386 2388
2387 2389 held += report(repo.svfs, b"lock", repo.lock)
2388 2390 held += report(repo.vfs, b"wlock", repo.wlock)
2389 2391
2390 2392 return held
2391 2393
2392 2394
2393 2395 @command(
2394 2396 b'debugmanifestfulltextcache',
2395 2397 [
2396 2398 (b'', b'clear', False, _(b'clear the cache')),
2397 2399 (
2398 2400 b'a',
2399 2401 b'add',
2400 2402 [],
2401 2403 _(b'add the given manifest nodes to the cache'),
2402 2404 _(b'NODE'),
2403 2405 ),
2404 2406 ],
2405 2407 b'',
2406 2408 )
2407 2409 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2408 2410 """show, clear or amend the contents of the manifest fulltext cache"""
2409 2411
2410 2412 def getcache():
2411 2413 r = repo.manifestlog.getstorage(b'')
2412 2414 try:
2413 2415 return r._fulltextcache
2414 2416 except AttributeError:
2415 2417 msg = _(
2416 2418 b"Current revlog implementation doesn't appear to have a "
2417 2419 b"manifest fulltext cache\n"
2418 2420 )
2419 2421 raise error.Abort(msg)
2420 2422
2421 2423 if opts.get('clear'):
2422 2424 with repo.wlock():
2423 2425 cache = getcache()
2424 2426 cache.clear(clear_persisted_data=True)
2425 2427 return
2426 2428
2427 2429 if add:
2428 2430 with repo.wlock():
2429 2431 m = repo.manifestlog
2430 2432 store = m.getstorage(b'')
2431 2433 for n in add:
2432 2434 try:
2433 2435 manifest = m[store.lookup(n)]
2434 2436 except error.LookupError as e:
2435 2437 raise error.Abort(
2436 2438 bytes(e), hint=b"Check your manifest node id"
2437 2439 )
2438 2440 manifest.read() # stores revisision in cache too
2439 2441 return
2440 2442
2441 2443 cache = getcache()
2442 2444 if not len(cache):
2443 2445 ui.write(_(b'cache empty\n'))
2444 2446 else:
2445 2447 ui.write(
2446 2448 _(
2447 2449 b'cache contains %d manifest entries, in order of most to '
2448 2450 b'least recent:\n'
2449 2451 )
2450 2452 % (len(cache),)
2451 2453 )
2452 2454 totalsize = 0
2453 2455 for nodeid in cache:
2454 2456 # Use cache.get to not update the LRU order
2455 2457 data = cache.peek(nodeid)
2456 2458 size = len(data)
2457 2459 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2458 2460 ui.write(
2459 2461 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2460 2462 )
2461 2463 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2462 2464 ui.write(
2463 2465 _(b'total cache data size %s, on-disk %s\n')
2464 2466 % (util.bytecount(totalsize), util.bytecount(ondisk))
2465 2467 )
2466 2468
2467 2469
2468 2470 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2469 2471 def debugmergestate(ui, repo, *args, **opts):
2470 2472 """print merge state
2471 2473
2472 2474 Use --verbose to print out information about whether v1 or v2 merge state
2473 2475 was chosen."""
2474 2476
2475 2477 if ui.verbose:
2476 2478 ms = mergestatemod.mergestate(repo)
2477 2479
2478 2480 # sort so that reasonable information is on top
2479 2481 v1records = ms._readrecordsv1()
2480 2482 v2records = ms._readrecordsv2()
2481 2483
2482 2484 if not v1records and not v2records:
2483 2485 pass
2484 2486 elif not v2records:
2485 2487 ui.writenoi18n(b'no version 2 merge state\n')
2486 2488 elif ms._v1v2match(v1records, v2records):
2487 2489 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2488 2490 else:
2489 2491 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2490 2492
2491 2493 opts = pycompat.byteskwargs(opts)
2492 2494 if not opts[b'template']:
2493 2495 opts[b'template'] = (
2494 2496 b'{if(commits, "", "no merge state found\n")}'
2495 2497 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2496 2498 b'{files % "file: {path} (state \\"{state}\\")\n'
2497 2499 b'{if(local_path, "'
2498 2500 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2499 2501 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2500 2502 b' other path: {other_path} (node {other_node})\n'
2501 2503 b'")}'
2502 2504 b'{if(rename_side, "'
2503 2505 b' rename side: {rename_side}\n'
2504 2506 b' renamed path: {renamed_path}\n'
2505 2507 b'")}'
2506 2508 b'{extras % " extra: {key} = {value}\n"}'
2507 2509 b'"}'
2508 2510 b'{extras % "extra: {file} ({key} = {value})\n"}'
2509 2511 )
2510 2512
2511 2513 ms = mergestatemod.mergestate.read(repo)
2512 2514
2513 2515 fm = ui.formatter(b'debugmergestate', opts)
2514 2516 fm.startitem()
2515 2517
2516 2518 fm_commits = fm.nested(b'commits')
2517 2519 if ms.active():
2518 2520 for name, node, label_index in (
2519 2521 (b'local', ms.local, 0),
2520 2522 (b'other', ms.other, 1),
2521 2523 ):
2522 2524 fm_commits.startitem()
2523 2525 fm_commits.data(name=name)
2524 2526 fm_commits.data(node=hex(node))
2525 2527 if ms._labels and len(ms._labels) > label_index:
2526 2528 fm_commits.data(label=ms._labels[label_index])
2527 2529 fm_commits.end()
2528 2530
2529 2531 fm_files = fm.nested(b'files')
2530 2532 if ms.active():
2531 2533 for f in ms:
2532 2534 fm_files.startitem()
2533 2535 fm_files.data(path=f)
2534 2536 state = ms._state[f]
2535 2537 fm_files.data(state=state[0])
2536 2538 if state[0] in (
2537 2539 mergestatemod.MERGE_RECORD_UNRESOLVED,
2538 2540 mergestatemod.MERGE_RECORD_RESOLVED,
2539 2541 ):
2540 2542 fm_files.data(local_key=state[1])
2541 2543 fm_files.data(local_path=state[2])
2542 2544 fm_files.data(ancestor_path=state[3])
2543 2545 fm_files.data(ancestor_node=state[4])
2544 2546 fm_files.data(other_path=state[5])
2545 2547 fm_files.data(other_node=state[6])
2546 2548 fm_files.data(local_flags=state[7])
2547 2549 elif state[0] in (
2548 2550 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2549 2551 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2550 2552 ):
2551 2553 fm_files.data(renamed_path=state[1])
2552 2554 fm_files.data(rename_side=state[2])
2553 2555 fm_extras = fm_files.nested(b'extras')
2554 2556 for k, v in sorted(ms.extras(f).items()):
2555 2557 fm_extras.startitem()
2556 2558 fm_extras.data(key=k)
2557 2559 fm_extras.data(value=v)
2558 2560 fm_extras.end()
2559 2561
2560 2562 fm_files.end()
2561 2563
2562 2564 fm_extras = fm.nested(b'extras')
2563 2565 for f, d in sorted(ms.allextras().items()):
2564 2566 if f in ms:
2565 2567 # If file is in mergestate, we have already processed it's extras
2566 2568 continue
2567 2569 for k, v in d.items():
2568 2570 fm_extras.startitem()
2569 2571 fm_extras.data(file=f)
2570 2572 fm_extras.data(key=k)
2571 2573 fm_extras.data(value=v)
2572 2574 fm_extras.end()
2573 2575
2574 2576 fm.end()
2575 2577
2576 2578
2577 2579 @command(b'debugnamecomplete', [], _(b'NAME...'))
2578 2580 def debugnamecomplete(ui, repo, *args):
2579 2581 '''complete "names" - tags, open branch names, bookmark names'''
2580 2582
2581 2583 names = set()
2582 2584 # since we previously only listed open branches, we will handle that
2583 2585 # specially (after this for loop)
2584 2586 for name, ns in repo.names.items():
2585 2587 if name != b'branches':
2586 2588 names.update(ns.listnames(repo))
2587 2589 names.update(
2588 2590 tag
2589 2591 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2590 2592 if not closed
2591 2593 )
2592 2594 completions = set()
2593 2595 if not args:
2594 2596 args = [b'']
2595 2597 for a in args:
2596 2598 completions.update(n for n in names if n.startswith(a))
2597 2599 ui.write(b'\n'.join(sorted(completions)))
2598 2600 ui.write(b'\n')
2599 2601
2600 2602
2601 2603 @command(
2602 2604 b'debugnodemap',
2603 [
2604 (
2605 b'',
2606 b'dump-new',
2607 False,
2608 _(b'write a (new) persistent binary nodemap on stdout'),
2609 ),
2610 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2611 (
2612 b'',
2613 b'check',
2614 False,
2615 _(b'check that the data on disk data are correct.'),
2616 ),
2617 (
2618 b'',
2619 b'metadata',
2620 False,
2621 _(b'display the on disk meta data for the nodemap'),
2622 ),
2623 ],
2605 (
2606 cmdutil.debugrevlogopts
2607 + [
2608 (
2609 b'',
2610 b'dump-new',
2611 False,
2612 _(b'write a (new) persistent binary nodemap on stdout'),
2613 ),
2614 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2615 (
2616 b'',
2617 b'check',
2618 False,
2619 _(b'check that the data on disk data are correct.'),
2620 ),
2621 (
2622 b'',
2623 b'metadata',
2624 False,
2625 _(b'display the on disk meta data for the nodemap'),
2626 ),
2627 ]
2628 ),
2629 _(b'-c|-m|FILE REV'),
2624 2630 )
2625 def debugnodemap(ui, repo, **opts):
2631 def debugnodemap(ui, repo, file_=None, **opts):
2626 2632 """write and inspect on disk nodemap"""
2633 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2634 if file_ is not None:
2635 raise error.CommandError(b'debugnodemap', _(b'invalid arguments'))
2636 elif file_ is None:
2637 opts['changelog'] = True
2638 r = cmdutil.openstorage(
2639 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2640 )
2641 if isinstance(r, manifest.manifestrevlog) or isinstance(r, filelog.filelog):
2642 r = r._revlog
2627 2643 if opts['dump_new']:
2628 unfi = repo.unfiltered()
2629 cl = unfi.changelog
2630 if util.safehasattr(cl.index, "nodemap_data_all"):
2631 data = cl.index.nodemap_data_all()
2644 if util.safehasattr(r.index, "nodemap_data_all"):
2645 data = r.index.nodemap_data_all()
2632 2646 else:
2633 data = nodemap.persistent_data(cl.index)
2647 data = nodemap.persistent_data(r.index)
2634 2648 ui.write(data)
2635 2649 elif opts['dump_disk']:
2636 unfi = repo.unfiltered()
2637 cl = unfi.changelog
2638 nm_data = nodemap.persisted_data(cl)
2650 nm_data = nodemap.persisted_data(r)
2639 2651 if nm_data is not None:
2640 2652 docket, data = nm_data
2641 2653 ui.write(data[:])
2642 2654 elif opts['check']:
2643 unfi = repo.unfiltered()
2644 cl = unfi.changelog
2645 nm_data = nodemap.persisted_data(cl)
2655 nm_data = nodemap.persisted_data(r)
2646 2656 if nm_data is not None:
2647 2657 docket, data = nm_data
2648 return nodemap.check_data(ui, cl.index, data)
2658 return nodemap.check_data(ui, r.index, data)
2649 2659 elif opts['metadata']:
2650 unfi = repo.unfiltered()
2651 cl = unfi.changelog
2652 nm_data = nodemap.persisted_data(cl)
2660 nm_data = nodemap.persisted_data(r)
2653 2661 if nm_data is not None:
2654 2662 docket, data = nm_data
2655 2663 ui.write((b"uid: %s\n") % docket.uid)
2656 2664 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2657 2665 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2658 2666 ui.write((b"data-length: %d\n") % docket.data_length)
2659 2667 ui.write((b"data-unused: %d\n") % docket.data_unused)
2660 2668 unused_perc = docket.data_unused * 100.0 / docket.data_length
2661 2669 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2662 2670
2663 2671
2664 2672 @command(
2665 2673 b'debugobsolete',
2666 2674 [
2667 2675 (b'', b'flags', 0, _(b'markers flag')),
2668 2676 (
2669 2677 b'',
2670 2678 b'record-parents',
2671 2679 False,
2672 2680 _(b'record parent information for the precursor'),
2673 2681 ),
2674 2682 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2675 2683 (
2676 2684 b'',
2677 2685 b'exclusive',
2678 2686 False,
2679 2687 _(b'restrict display to markers only relevant to REV'),
2680 2688 ),
2681 2689 (b'', b'index', False, _(b'display index of the marker')),
2682 2690 (b'', b'delete', [], _(b'delete markers specified by indices')),
2683 2691 ]
2684 2692 + cmdutil.commitopts2
2685 2693 + cmdutil.formatteropts,
2686 2694 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2687 2695 )
2688 2696 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2689 2697 """create arbitrary obsolete marker
2690 2698
2691 2699 With no arguments, displays the list of obsolescence markers."""
2692 2700
2693 2701 opts = pycompat.byteskwargs(opts)
2694 2702
2695 2703 def parsenodeid(s):
2696 2704 try:
2697 2705 # We do not use revsingle/revrange functions here to accept
2698 2706 # arbitrary node identifiers, possibly not present in the
2699 2707 # local repository.
2700 2708 n = bin(s)
2701 2709 if len(n) != repo.nodeconstants.nodelen:
2702 2710 raise ValueError
2703 2711 return n
2704 2712 except ValueError:
2705 2713 raise error.InputError(
2706 2714 b'changeset references must be full hexadecimal '
2707 2715 b'node identifiers'
2708 2716 )
2709 2717
2710 2718 if opts.get(b'delete'):
2711 2719 indices = []
2712 2720 for v in opts.get(b'delete'):
2713 2721 try:
2714 2722 indices.append(int(v))
2715 2723 except ValueError:
2716 2724 raise error.InputError(
2717 2725 _(b'invalid index value: %r') % v,
2718 2726 hint=_(b'use integers for indices'),
2719 2727 )
2720 2728
2721 2729 if repo.currenttransaction():
2722 2730 raise error.Abort(
2723 2731 _(b'cannot delete obsmarkers in the middle of transaction.')
2724 2732 )
2725 2733
2726 2734 with repo.lock():
2727 2735 n = repair.deleteobsmarkers(repo.obsstore, indices)
2728 2736 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2729 2737
2730 2738 return
2731 2739
2732 2740 if precursor is not None:
2733 2741 if opts[b'rev']:
2734 2742 raise error.InputError(
2735 2743 b'cannot select revision when creating marker'
2736 2744 )
2737 2745 metadata = {}
2738 2746 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2739 2747 succs = tuple(parsenodeid(succ) for succ in successors)
2740 2748 l = repo.lock()
2741 2749 try:
2742 2750 tr = repo.transaction(b'debugobsolete')
2743 2751 try:
2744 2752 date = opts.get(b'date')
2745 2753 if date:
2746 2754 date = dateutil.parsedate(date)
2747 2755 else:
2748 2756 date = None
2749 2757 prec = parsenodeid(precursor)
2750 2758 parents = None
2751 2759 if opts[b'record_parents']:
2752 2760 if prec not in repo.unfiltered():
2753 2761 raise error.Abort(
2754 2762 b'cannot used --record-parents on '
2755 2763 b'unknown changesets'
2756 2764 )
2757 2765 parents = repo.unfiltered()[prec].parents()
2758 2766 parents = tuple(p.node() for p in parents)
2759 2767 repo.obsstore.create(
2760 2768 tr,
2761 2769 prec,
2762 2770 succs,
2763 2771 opts[b'flags'],
2764 2772 parents=parents,
2765 2773 date=date,
2766 2774 metadata=metadata,
2767 2775 ui=ui,
2768 2776 )
2769 2777 tr.close()
2770 2778 except ValueError as exc:
2771 2779 raise error.Abort(
2772 2780 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2773 2781 )
2774 2782 finally:
2775 2783 tr.release()
2776 2784 finally:
2777 2785 l.release()
2778 2786 else:
2779 2787 if opts[b'rev']:
2780 2788 revs = logcmdutil.revrange(repo, opts[b'rev'])
2781 2789 nodes = [repo[r].node() for r in revs]
2782 2790 markers = list(
2783 2791 obsutil.getmarkers(
2784 2792 repo, nodes=nodes, exclusive=opts[b'exclusive']
2785 2793 )
2786 2794 )
2787 2795 markers.sort(key=lambda x: x._data)
2788 2796 else:
2789 2797 markers = obsutil.getmarkers(repo)
2790 2798
2791 2799 markerstoiter = markers
2792 2800 isrelevant = lambda m: True
2793 2801 if opts.get(b'rev') and opts.get(b'index'):
2794 2802 markerstoiter = obsutil.getmarkers(repo)
2795 2803 markerset = set(markers)
2796 2804 isrelevant = lambda m: m in markerset
2797 2805
2798 2806 fm = ui.formatter(b'debugobsolete', opts)
2799 2807 for i, m in enumerate(markerstoiter):
2800 2808 if not isrelevant(m):
2801 2809 # marker can be irrelevant when we're iterating over a set
2802 2810 # of markers (markerstoiter) which is bigger than the set
2803 2811 # of markers we want to display (markers)
2804 2812 # this can happen if both --index and --rev options are
2805 2813 # provided and thus we need to iterate over all of the markers
2806 2814 # to get the correct indices, but only display the ones that
2807 2815 # are relevant to --rev value
2808 2816 continue
2809 2817 fm.startitem()
2810 2818 ind = i if opts.get(b'index') else None
2811 2819 cmdutil.showmarker(fm, m, index=ind)
2812 2820 fm.end()
2813 2821
2814 2822
2815 2823 @command(
2816 2824 b'debugp1copies',
2817 2825 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2818 2826 _(b'[-r REV]'),
2819 2827 )
2820 2828 def debugp1copies(ui, repo, **opts):
2821 2829 """dump copy information compared to p1"""
2822 2830
2823 2831 opts = pycompat.byteskwargs(opts)
2824 2832 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2825 2833 for dst, src in ctx.p1copies().items():
2826 2834 ui.write(b'%s -> %s\n' % (src, dst))
2827 2835
2828 2836
2829 2837 @command(
2830 2838 b'debugp2copies',
2831 2839 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2832 2840 _(b'[-r REV]'),
2833 2841 )
2834 2842 def debugp2copies(ui, repo, **opts):
2835 2843 """dump copy information compared to p2"""
2836 2844
2837 2845 opts = pycompat.byteskwargs(opts)
2838 2846 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2839 2847 for dst, src in ctx.p2copies().items():
2840 2848 ui.write(b'%s -> %s\n' % (src, dst))
2841 2849
2842 2850
2843 2851 @command(
2844 2852 b'debugpathcomplete',
2845 2853 [
2846 2854 (b'f', b'full', None, _(b'complete an entire path')),
2847 2855 (b'n', b'normal', None, _(b'show only normal files')),
2848 2856 (b'a', b'added', None, _(b'show only added files')),
2849 2857 (b'r', b'removed', None, _(b'show only removed files')),
2850 2858 ],
2851 2859 _(b'FILESPEC...'),
2852 2860 )
2853 2861 def debugpathcomplete(ui, repo, *specs, **opts):
2854 2862 """complete part or all of a tracked path
2855 2863
2856 2864 This command supports shells that offer path name completion. It
2857 2865 currently completes only files already known to the dirstate.
2858 2866
2859 2867 Completion extends only to the next path segment unless
2860 2868 --full is specified, in which case entire paths are used."""
2861 2869
2862 2870 def complete(path, acceptable):
2863 2871 dirstate = repo.dirstate
2864 2872 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2865 2873 rootdir = repo.root + pycompat.ossep
2866 2874 if spec != repo.root and not spec.startswith(rootdir):
2867 2875 return [], []
2868 2876 if os.path.isdir(spec):
2869 2877 spec += b'/'
2870 2878 spec = spec[len(rootdir) :]
2871 2879 fixpaths = pycompat.ossep != b'/'
2872 2880 if fixpaths:
2873 2881 spec = spec.replace(pycompat.ossep, b'/')
2874 2882 speclen = len(spec)
2875 2883 fullpaths = opts['full']
2876 2884 files, dirs = set(), set()
2877 2885 adddir, addfile = dirs.add, files.add
2878 2886 for f, st in dirstate.items():
2879 2887 if f.startswith(spec) and st.state in acceptable:
2880 2888 if fixpaths:
2881 2889 f = f.replace(b'/', pycompat.ossep)
2882 2890 if fullpaths:
2883 2891 addfile(f)
2884 2892 continue
2885 2893 s = f.find(pycompat.ossep, speclen)
2886 2894 if s >= 0:
2887 2895 adddir(f[:s])
2888 2896 else:
2889 2897 addfile(f)
2890 2898 return files, dirs
2891 2899
2892 2900 acceptable = b''
2893 2901 if opts['normal']:
2894 2902 acceptable += b'nm'
2895 2903 if opts['added']:
2896 2904 acceptable += b'a'
2897 2905 if opts['removed']:
2898 2906 acceptable += b'r'
2899 2907 cwd = repo.getcwd()
2900 2908 if not specs:
2901 2909 specs = [b'.']
2902 2910
2903 2911 files, dirs = set(), set()
2904 2912 for spec in specs:
2905 2913 f, d = complete(spec, acceptable or b'nmar')
2906 2914 files.update(f)
2907 2915 dirs.update(d)
2908 2916 files.update(dirs)
2909 2917 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2910 2918 ui.write(b'\n')
2911 2919
2912 2920
2913 2921 @command(
2914 2922 b'debugpathcopies',
2915 2923 cmdutil.walkopts,
2916 2924 b'hg debugpathcopies REV1 REV2 [FILE]',
2917 2925 inferrepo=True,
2918 2926 )
2919 2927 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2920 2928 """show copies between two revisions"""
2921 2929 ctx1 = scmutil.revsingle(repo, rev1)
2922 2930 ctx2 = scmutil.revsingle(repo, rev2)
2923 2931 m = scmutil.match(ctx1, pats, opts)
2924 2932 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2925 2933 ui.write(b'%s -> %s\n' % (src, dst))
2926 2934
2927 2935
2928 2936 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2929 2937 def debugpeer(ui, path):
2930 2938 """establish a connection to a peer repository"""
2931 2939 # Always enable peer request logging. Requires --debug to display
2932 2940 # though.
2933 2941 overrides = {
2934 2942 (b'devel', b'debug.peer-request'): True,
2935 2943 }
2936 2944
2937 2945 with ui.configoverride(overrides):
2938 2946 peer = hg.peer(ui, {}, path)
2939 2947
2940 2948 try:
2941 2949 local = peer.local() is not None
2942 2950 canpush = peer.canpush()
2943 2951
2944 2952 ui.write(_(b'url: %s\n') % peer.url())
2945 2953 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2946 2954 ui.write(
2947 2955 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2948 2956 )
2949 2957 finally:
2950 2958 peer.close()
2951 2959
2952 2960
2953 2961 @command(
2954 2962 b'debugpickmergetool',
2955 2963 [
2956 2964 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2957 2965 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2958 2966 ]
2959 2967 + cmdutil.walkopts
2960 2968 + cmdutil.mergetoolopts,
2961 2969 _(b'[PATTERN]...'),
2962 2970 inferrepo=True,
2963 2971 )
2964 2972 def debugpickmergetool(ui, repo, *pats, **opts):
2965 2973 """examine which merge tool is chosen for specified file
2966 2974
2967 2975 As described in :hg:`help merge-tools`, Mercurial examines
2968 2976 configurations below in this order to decide which merge tool is
2969 2977 chosen for specified file.
2970 2978
2971 2979 1. ``--tool`` option
2972 2980 2. ``HGMERGE`` environment variable
2973 2981 3. configurations in ``merge-patterns`` section
2974 2982 4. configuration of ``ui.merge``
2975 2983 5. configurations in ``merge-tools`` section
2976 2984 6. ``hgmerge`` tool (for historical reason only)
2977 2985 7. default tool for fallback (``:merge`` or ``:prompt``)
2978 2986
2979 2987 This command writes out examination result in the style below::
2980 2988
2981 2989 FILE = MERGETOOL
2982 2990
2983 2991 By default, all files known in the first parent context of the
2984 2992 working directory are examined. Use file patterns and/or -I/-X
2985 2993 options to limit target files. -r/--rev is also useful to examine
2986 2994 files in another context without actual updating to it.
2987 2995
2988 2996 With --debug, this command shows warning messages while matching
2989 2997 against ``merge-patterns`` and so on, too. It is recommended to
2990 2998 use this option with explicit file patterns and/or -I/-X options,
2991 2999 because this option increases amount of output per file according
2992 3000 to configurations in hgrc.
2993 3001
2994 3002 With -v/--verbose, this command shows configurations below at
2995 3003 first (only if specified).
2996 3004
2997 3005 - ``--tool`` option
2998 3006 - ``HGMERGE`` environment variable
2999 3007 - configuration of ``ui.merge``
3000 3008
3001 3009 If merge tool is chosen before matching against
3002 3010 ``merge-patterns``, this command can't show any helpful
3003 3011 information, even with --debug. In such case, information above is
3004 3012 useful to know why a merge tool is chosen.
3005 3013 """
3006 3014 opts = pycompat.byteskwargs(opts)
3007 3015 overrides = {}
3008 3016 if opts[b'tool']:
3009 3017 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3010 3018 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3011 3019
3012 3020 with ui.configoverride(overrides, b'debugmergepatterns'):
3013 3021 hgmerge = encoding.environ.get(b"HGMERGE")
3014 3022 if hgmerge is not None:
3015 3023 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3016 3024 uimerge = ui.config(b"ui", b"merge")
3017 3025 if uimerge:
3018 3026 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3019 3027
3020 3028 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3021 3029 m = scmutil.match(ctx, pats, opts)
3022 3030 changedelete = opts[b'changedelete']
3023 3031 for path in ctx.walk(m):
3024 3032 fctx = ctx[path]
3025 3033 with ui.silent(
3026 3034 error=True
3027 3035 ) if not ui.debugflag else util.nullcontextmanager():
3028 3036 tool, toolpath = filemerge._picktool(
3029 3037 repo,
3030 3038 ui,
3031 3039 path,
3032 3040 fctx.isbinary(),
3033 3041 b'l' in fctx.flags(),
3034 3042 changedelete,
3035 3043 )
3036 3044 ui.write(b'%s = %s\n' % (path, tool))
3037 3045
3038 3046
3039 3047 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3040 3048 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3041 3049 """access the pushkey key/value protocol
3042 3050
3043 3051 With two args, list the keys in the given namespace.
3044 3052
3045 3053 With five args, set a key to new if it currently is set to old.
3046 3054 Reports success or failure.
3047 3055 """
3048 3056
3049 3057 target = hg.peer(ui, {}, repopath)
3050 3058 try:
3051 3059 if keyinfo:
3052 3060 key, old, new = keyinfo
3053 3061 with target.commandexecutor() as e:
3054 3062 r = e.callcommand(
3055 3063 b'pushkey',
3056 3064 {
3057 3065 b'namespace': namespace,
3058 3066 b'key': key,
3059 3067 b'old': old,
3060 3068 b'new': new,
3061 3069 },
3062 3070 ).result()
3063 3071
3064 3072 ui.status(pycompat.bytestr(r) + b'\n')
3065 3073 return not r
3066 3074 else:
3067 3075 for k, v in sorted(target.listkeys(namespace).items()):
3068 3076 ui.write(
3069 3077 b"%s\t%s\n"
3070 3078 % (stringutil.escapestr(k), stringutil.escapestr(v))
3071 3079 )
3072 3080 finally:
3073 3081 target.close()
3074 3082
3075 3083
3076 3084 @command(b'debugpvec', [], _(b'A B'))
3077 3085 def debugpvec(ui, repo, a, b=None):
3078 3086 ca = scmutil.revsingle(repo, a)
3079 3087 cb = scmutil.revsingle(repo, b)
3080 3088 pa = pvec.ctxpvec(ca)
3081 3089 pb = pvec.ctxpvec(cb)
3082 3090 if pa == pb:
3083 3091 rel = b"="
3084 3092 elif pa > pb:
3085 3093 rel = b">"
3086 3094 elif pa < pb:
3087 3095 rel = b"<"
3088 3096 elif pa | pb:
3089 3097 rel = b"|"
3090 3098 ui.write(_(b"a: %s\n") % pa)
3091 3099 ui.write(_(b"b: %s\n") % pb)
3092 3100 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3093 3101 ui.write(
3094 3102 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3095 3103 % (
3096 3104 abs(pa._depth - pb._depth),
3097 3105 pvec._hamming(pa._vec, pb._vec),
3098 3106 pa.distance(pb),
3099 3107 rel,
3100 3108 )
3101 3109 )
3102 3110
3103 3111
3104 3112 @command(
3105 3113 b'debugrebuilddirstate|debugrebuildstate',
3106 3114 [
3107 3115 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3108 3116 (
3109 3117 b'',
3110 3118 b'minimal',
3111 3119 None,
3112 3120 _(
3113 3121 b'only rebuild files that are inconsistent with '
3114 3122 b'the working copy parent'
3115 3123 ),
3116 3124 ),
3117 3125 ],
3118 3126 _(b'[-r REV]'),
3119 3127 )
3120 3128 def debugrebuilddirstate(ui, repo, rev, **opts):
3121 3129 """rebuild the dirstate as it would look like for the given revision
3122 3130
3123 3131 If no revision is specified the first current parent will be used.
3124 3132
3125 3133 The dirstate will be set to the files of the given revision.
3126 3134 The actual working directory content or existing dirstate
3127 3135 information such as adds or removes is not considered.
3128 3136
3129 3137 ``minimal`` will only rebuild the dirstate status for files that claim to be
3130 3138 tracked but are not in the parent manifest, or that exist in the parent
3131 3139 manifest but are not in the dirstate. It will not change adds, removes, or
3132 3140 modified files that are in the working copy parent.
3133 3141
3134 3142 One use of this command is to make the next :hg:`status` invocation
3135 3143 check the actual file content.
3136 3144 """
3137 3145 ctx = scmutil.revsingle(repo, rev)
3138 3146 with repo.wlock():
3139 3147 if repo.currenttransaction() is not None:
3140 3148 msg = b'rebuild the dirstate outside of a transaction'
3141 3149 raise error.ProgrammingError(msg)
3142 3150 dirstate = repo.dirstate
3143 3151 changedfiles = None
3144 3152 # See command doc for what minimal does.
3145 3153 if opts.get('minimal'):
3146 3154 manifestfiles = set(ctx.manifest().keys())
3147 3155 dirstatefiles = set(dirstate)
3148 3156 manifestonly = manifestfiles - dirstatefiles
3149 3157 dsonly = dirstatefiles - manifestfiles
3150 3158 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3151 3159 changedfiles = manifestonly | dsnotadded
3152 3160
3153 3161 with dirstate.changing_parents(repo):
3154 3162 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3155 3163
3156 3164
3157 3165 @command(
3158 3166 b'debugrebuildfncache',
3159 3167 [
3160 3168 (
3161 3169 b'',
3162 3170 b'only-data',
3163 3171 False,
3164 3172 _(b'only look for wrong .d files (much faster)'),
3165 3173 )
3166 3174 ],
3167 3175 b'',
3168 3176 )
3169 3177 def debugrebuildfncache(ui, repo, **opts):
3170 3178 """rebuild the fncache file"""
3171 3179 opts = pycompat.byteskwargs(opts)
3172 3180 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3173 3181
3174 3182
3175 3183 @command(
3176 3184 b'debugrename',
3177 3185 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3178 3186 _(b'[-r REV] [FILE]...'),
3179 3187 )
3180 3188 def debugrename(ui, repo, *pats, **opts):
3181 3189 """dump rename information"""
3182 3190
3183 3191 opts = pycompat.byteskwargs(opts)
3184 3192 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3185 3193 m = scmutil.match(ctx, pats, opts)
3186 3194 for abs in ctx.walk(m):
3187 3195 fctx = ctx[abs]
3188 3196 o = fctx.filelog().renamed(fctx.filenode())
3189 3197 rel = repo.pathto(abs)
3190 3198 if o:
3191 3199 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3192 3200 else:
3193 3201 ui.write(_(b"%s not renamed\n") % rel)
3194 3202
3195 3203
3196 3204 @command(b'debugrequires|debugrequirements', [], b'')
3197 3205 def debugrequirements(ui, repo):
3198 3206 """print the current repo requirements"""
3199 3207 for r in sorted(repo.requirements):
3200 3208 ui.write(b"%s\n" % r)
3201 3209
3202 3210
3203 3211 @command(
3204 3212 b'debugrevlog',
3205 3213 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3206 3214 _(b'-c|-m|FILE'),
3207 3215 optionalrepo=True,
3208 3216 )
3209 3217 def debugrevlog(ui, repo, file_=None, **opts):
3210 3218 """show data and statistics about a revlog"""
3211 3219 opts = pycompat.byteskwargs(opts)
3212 3220 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3213 3221
3214 3222 if opts.get(b"dump"):
3215 3223 revlog_debug.dump(ui, r)
3216 3224 else:
3217 3225 revlog_debug.debug_revlog(ui, r)
3218 3226 return 0
3219 3227
3220 3228
3221 3229 @command(
3222 3230 b'debugrevlogindex',
3223 3231 cmdutil.debugrevlogopts
3224 3232 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3225 3233 _(b'[-f FORMAT] -c|-m|FILE'),
3226 3234 optionalrepo=True,
3227 3235 )
3228 3236 def debugrevlogindex(ui, repo, file_=None, **opts):
3229 3237 """dump the contents of a revlog index"""
3230 3238 opts = pycompat.byteskwargs(opts)
3231 3239 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3232 3240 format = opts.get(b'format', 0)
3233 3241 if format not in (0, 1):
3234 3242 raise error.Abort(_(b"unknown format %d") % format)
3235 3243
3236 3244 if ui.debugflag:
3237 3245 shortfn = hex
3238 3246 else:
3239 3247 shortfn = short
3240 3248
3241 3249 # There might not be anything in r, so have a sane default
3242 3250 idlen = 12
3243 3251 for i in r:
3244 3252 idlen = len(shortfn(r.node(i)))
3245 3253 break
3246 3254
3247 3255 if format == 0:
3248 3256 if ui.verbose:
3249 3257 ui.writenoi18n(
3250 3258 b" rev offset length linkrev %s %s p2\n"
3251 3259 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3252 3260 )
3253 3261 else:
3254 3262 ui.writenoi18n(
3255 3263 b" rev linkrev %s %s p2\n"
3256 3264 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3257 3265 )
3258 3266 elif format == 1:
3259 3267 if ui.verbose:
3260 3268 ui.writenoi18n(
3261 3269 (
3262 3270 b" rev flag offset length size link p1"
3263 3271 b" p2 %s\n"
3264 3272 )
3265 3273 % b"nodeid".rjust(idlen)
3266 3274 )
3267 3275 else:
3268 3276 ui.writenoi18n(
3269 3277 b" rev flag size link p1 p2 %s\n"
3270 3278 % b"nodeid".rjust(idlen)
3271 3279 )
3272 3280
3273 3281 for i in r:
3274 3282 node = r.node(i)
3275 3283 if format == 0:
3276 3284 try:
3277 3285 pp = r.parents(node)
3278 3286 except Exception:
3279 3287 pp = [repo.nullid, repo.nullid]
3280 3288 if ui.verbose:
3281 3289 ui.write(
3282 3290 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3283 3291 % (
3284 3292 i,
3285 3293 r.start(i),
3286 3294 r.length(i),
3287 3295 r.linkrev(i),
3288 3296 shortfn(node),
3289 3297 shortfn(pp[0]),
3290 3298 shortfn(pp[1]),
3291 3299 )
3292 3300 )
3293 3301 else:
3294 3302 ui.write(
3295 3303 b"% 6d % 7d %s %s %s\n"
3296 3304 % (
3297 3305 i,
3298 3306 r.linkrev(i),
3299 3307 shortfn(node),
3300 3308 shortfn(pp[0]),
3301 3309 shortfn(pp[1]),
3302 3310 )
3303 3311 )
3304 3312 elif format == 1:
3305 3313 pr = r.parentrevs(i)
3306 3314 if ui.verbose:
3307 3315 ui.write(
3308 3316 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3309 3317 % (
3310 3318 i,
3311 3319 r.flags(i),
3312 3320 r.start(i),
3313 3321 r.length(i),
3314 3322 r.rawsize(i),
3315 3323 r.linkrev(i),
3316 3324 pr[0],
3317 3325 pr[1],
3318 3326 shortfn(node),
3319 3327 )
3320 3328 )
3321 3329 else:
3322 3330 ui.write(
3323 3331 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3324 3332 % (
3325 3333 i,
3326 3334 r.flags(i),
3327 3335 r.rawsize(i),
3328 3336 r.linkrev(i),
3329 3337 pr[0],
3330 3338 pr[1],
3331 3339 shortfn(node),
3332 3340 )
3333 3341 )
3334 3342
3335 3343
3336 3344 @command(
3337 3345 b'debugrevspec',
3338 3346 [
3339 3347 (
3340 3348 b'',
3341 3349 b'optimize',
3342 3350 None,
3343 3351 _(b'print parsed tree after optimizing (DEPRECATED)'),
3344 3352 ),
3345 3353 (
3346 3354 b'',
3347 3355 b'show-revs',
3348 3356 True,
3349 3357 _(b'print list of result revisions (default)'),
3350 3358 ),
3351 3359 (
3352 3360 b's',
3353 3361 b'show-set',
3354 3362 None,
3355 3363 _(b'print internal representation of result set'),
3356 3364 ),
3357 3365 (
3358 3366 b'p',
3359 3367 b'show-stage',
3360 3368 [],
3361 3369 _(b'print parsed tree at the given stage'),
3362 3370 _(b'NAME'),
3363 3371 ),
3364 3372 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3365 3373 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3366 3374 ],
3367 3375 b'REVSPEC',
3368 3376 )
3369 3377 def debugrevspec(ui, repo, expr, **opts):
3370 3378 """parse and apply a revision specification
3371 3379
3372 3380 Use -p/--show-stage option to print the parsed tree at the given stages.
3373 3381 Use -p all to print tree at every stage.
3374 3382
3375 3383 Use --no-show-revs option with -s or -p to print only the set
3376 3384 representation or the parsed tree respectively.
3377 3385
3378 3386 Use --verify-optimized to compare the optimized result with the unoptimized
3379 3387 one. Returns 1 if the optimized result differs.
3380 3388 """
3381 3389 opts = pycompat.byteskwargs(opts)
3382 3390 aliases = ui.configitems(b'revsetalias')
3383 3391 stages = [
3384 3392 (b'parsed', lambda tree: tree),
3385 3393 (
3386 3394 b'expanded',
3387 3395 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3388 3396 ),
3389 3397 (b'concatenated', revsetlang.foldconcat),
3390 3398 (b'analyzed', revsetlang.analyze),
3391 3399 (b'optimized', revsetlang.optimize),
3392 3400 ]
3393 3401 if opts[b'no_optimized']:
3394 3402 stages = stages[:-1]
3395 3403 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3396 3404 raise error.Abort(
3397 3405 _(b'cannot use --verify-optimized with --no-optimized')
3398 3406 )
3399 3407 stagenames = {n for n, f in stages}
3400 3408
3401 3409 showalways = set()
3402 3410 showchanged = set()
3403 3411 if ui.verbose and not opts[b'show_stage']:
3404 3412 # show parsed tree by --verbose (deprecated)
3405 3413 showalways.add(b'parsed')
3406 3414 showchanged.update([b'expanded', b'concatenated'])
3407 3415 if opts[b'optimize']:
3408 3416 showalways.add(b'optimized')
3409 3417 if opts[b'show_stage'] and opts[b'optimize']:
3410 3418 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3411 3419 if opts[b'show_stage'] == [b'all']:
3412 3420 showalways.update(stagenames)
3413 3421 else:
3414 3422 for n in opts[b'show_stage']:
3415 3423 if n not in stagenames:
3416 3424 raise error.Abort(_(b'invalid stage name: %s') % n)
3417 3425 showalways.update(opts[b'show_stage'])
3418 3426
3419 3427 treebystage = {}
3420 3428 printedtree = None
3421 3429 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3422 3430 for n, f in stages:
3423 3431 treebystage[n] = tree = f(tree)
3424 3432 if n in showalways or (n in showchanged and tree != printedtree):
3425 3433 if opts[b'show_stage'] or n != b'parsed':
3426 3434 ui.write(b"* %s:\n" % n)
3427 3435 ui.write(revsetlang.prettyformat(tree), b"\n")
3428 3436 printedtree = tree
3429 3437
3430 3438 if opts[b'verify_optimized']:
3431 3439 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3432 3440 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3433 3441 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3434 3442 ui.writenoi18n(
3435 3443 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3436 3444 )
3437 3445 ui.writenoi18n(
3438 3446 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3439 3447 )
3440 3448 arevs = list(arevs)
3441 3449 brevs = list(brevs)
3442 3450 if arevs == brevs:
3443 3451 return 0
3444 3452 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3445 3453 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3446 3454 sm = difflib.SequenceMatcher(None, arevs, brevs)
3447 3455 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3448 3456 if tag in ('delete', 'replace'):
3449 3457 for c in arevs[alo:ahi]:
3450 3458 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3451 3459 if tag in ('insert', 'replace'):
3452 3460 for c in brevs[blo:bhi]:
3453 3461 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3454 3462 if tag == 'equal':
3455 3463 for c in arevs[alo:ahi]:
3456 3464 ui.write(b' %d\n' % c)
3457 3465 return 1
3458 3466
3459 3467 func = revset.makematcher(tree)
3460 3468 revs = func(repo)
3461 3469 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3462 3470 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3463 3471 if not opts[b'show_revs']:
3464 3472 return
3465 3473 for c in revs:
3466 3474 ui.write(b"%d\n" % c)
3467 3475
3468 3476
3469 3477 @command(
3470 3478 b'debugserve',
3471 3479 [
3472 3480 (
3473 3481 b'',
3474 3482 b'sshstdio',
3475 3483 False,
3476 3484 _(b'run an SSH server bound to process handles'),
3477 3485 ),
3478 3486 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3479 3487 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3480 3488 ],
3481 3489 b'',
3482 3490 )
3483 3491 def debugserve(ui, repo, **opts):
3484 3492 """run a server with advanced settings
3485 3493
3486 3494 This command is similar to :hg:`serve`. It exists partially as a
3487 3495 workaround to the fact that ``hg serve --stdio`` must have specific
3488 3496 arguments for security reasons.
3489 3497 """
3490 3498 opts = pycompat.byteskwargs(opts)
3491 3499
3492 3500 if not opts[b'sshstdio']:
3493 3501 raise error.Abort(_(b'only --sshstdio is currently supported'))
3494 3502
3495 3503 logfh = None
3496 3504
3497 3505 if opts[b'logiofd'] and opts[b'logiofile']:
3498 3506 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3499 3507
3500 3508 if opts[b'logiofd']:
3501 3509 # Ideally we would be line buffered. But line buffering in binary
3502 3510 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3503 3511 # buffering could have performance impacts. But since this isn't
3504 3512 # performance critical code, it should be fine.
3505 3513 try:
3506 3514 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3507 3515 except OSError as e:
3508 3516 if e.errno != errno.ESPIPE:
3509 3517 raise
3510 3518 # can't seek a pipe, so `ab` mode fails on py3
3511 3519 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3512 3520 elif opts[b'logiofile']:
3513 3521 logfh = open(opts[b'logiofile'], b'ab', 0)
3514 3522
3515 3523 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3516 3524 s.serve_forever()
3517 3525
3518 3526
3519 3527 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3520 3528 def debugsetparents(ui, repo, rev1, rev2=None):
3521 3529 """manually set the parents of the current working directory (DANGEROUS)
3522 3530
3523 3531 This command is not what you are looking for and should not be used. Using
3524 3532 this command will most certainly results in slight corruption of the file
3525 3533 level histories withing your repository. DO NOT USE THIS COMMAND.
3526 3534
3527 3535 The command update the p1 and p2 field in the dirstate, and not touching
3528 3536 anything else. This useful for writing repository conversion tools, but
3529 3537 should be used with extreme care. For example, neither the working
3530 3538 directory nor the dirstate is updated, so file status may be incorrect
3531 3539 after running this command. Only used if you are one of the few people that
3532 3540 deeply unstand both conversion tools and file level histories. If you are
3533 3541 reading this help, you are not one of this people (most of them sailed west
3534 3542 from Mithlond anyway.
3535 3543
3536 3544 So one last time DO NOT USE THIS COMMAND.
3537 3545
3538 3546 Returns 0 on success.
3539 3547 """
3540 3548
3541 3549 node1 = scmutil.revsingle(repo, rev1).node()
3542 3550 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3543 3551
3544 3552 with repo.wlock():
3545 3553 repo.setparents(node1, node2)
3546 3554
3547 3555
3548 3556 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3549 3557 def debugsidedata(ui, repo, file_, rev=None, **opts):
3550 3558 """dump the side data for a cl/manifest/file revision
3551 3559
3552 3560 Use --verbose to dump the sidedata content."""
3553 3561 opts = pycompat.byteskwargs(opts)
3554 3562 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3555 3563 if rev is not None:
3556 3564 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3557 3565 file_, rev = None, file_
3558 3566 elif rev is None:
3559 3567 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3560 3568 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3561 3569 r = getattr(r, '_revlog', r)
3562 3570 try:
3563 3571 sidedata = r.sidedata(r.lookup(rev))
3564 3572 except KeyError:
3565 3573 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3566 3574 if sidedata:
3567 3575 sidedata = list(sidedata.items())
3568 3576 sidedata.sort()
3569 3577 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3570 3578 for key, value in sidedata:
3571 3579 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3572 3580 if ui.verbose:
3573 3581 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3574 3582
3575 3583
3576 3584 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3577 3585 def debugssl(ui, repo, source=None, **opts):
3578 3586 """test a secure connection to a server
3579 3587
3580 3588 This builds the certificate chain for the server on Windows, installing the
3581 3589 missing intermediates and trusted root via Windows Update if necessary. It
3582 3590 does nothing on other platforms.
3583 3591
3584 3592 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3585 3593 that server is used. See :hg:`help urls` for more information.
3586 3594
3587 3595 If the update succeeds, retry the original operation. Otherwise, the cause
3588 3596 of the SSL error is likely another issue.
3589 3597 """
3590 3598 if not pycompat.iswindows:
3591 3599 raise error.Abort(
3592 3600 _(b'certificate chain building is only possible on Windows')
3593 3601 )
3594 3602
3595 3603 if not source:
3596 3604 if not repo:
3597 3605 raise error.Abort(
3598 3606 _(
3599 3607 b"there is no Mercurial repository here, and no "
3600 3608 b"server specified"
3601 3609 )
3602 3610 )
3603 3611 source = b"default"
3604 3612
3605 3613 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3606 3614 url = path.url
3607 3615
3608 3616 defaultport = {b'https': 443, b'ssh': 22}
3609 3617 if url.scheme in defaultport:
3610 3618 try:
3611 3619 addr = (url.host, int(url.port or defaultport[url.scheme]))
3612 3620 except ValueError:
3613 3621 raise error.Abort(_(b"malformed port number in URL"))
3614 3622 else:
3615 3623 raise error.Abort(_(b"only https and ssh connections are supported"))
3616 3624
3617 3625 from . import win32
3618 3626
3619 3627 s = ssl.wrap_socket(
3620 3628 socket.socket(),
3621 3629 ssl_version=ssl.PROTOCOL_TLS,
3622 3630 cert_reqs=ssl.CERT_NONE,
3623 3631 ca_certs=None,
3624 3632 )
3625 3633
3626 3634 try:
3627 3635 s.connect(addr)
3628 3636 cert = s.getpeercert(True)
3629 3637
3630 3638 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3631 3639
3632 3640 complete = win32.checkcertificatechain(cert, build=False)
3633 3641
3634 3642 if not complete:
3635 3643 ui.status(_(b'certificate chain is incomplete, updating... '))
3636 3644
3637 3645 if not win32.checkcertificatechain(cert):
3638 3646 ui.status(_(b'failed.\n'))
3639 3647 else:
3640 3648 ui.status(_(b'done.\n'))
3641 3649 else:
3642 3650 ui.status(_(b'full certificate chain is available\n'))
3643 3651 finally:
3644 3652 s.close()
3645 3653
3646 3654
3647 3655 @command(
3648 3656 b'debug::stable-tail-sort',
3649 3657 [
3650 3658 (
3651 3659 b'T',
3652 3660 b'template',
3653 3661 b'{rev}\n',
3654 3662 _(b'display with template'),
3655 3663 _(b'TEMPLATE'),
3656 3664 ),
3657 3665 ],
3658 3666 b'REV',
3659 3667 )
3660 3668 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3661 3669 """display the stable-tail sort of the ancestors of a given node"""
3662 3670 rev = logcmdutil.revsingle(repo, revspec).rev()
3663 3671 cl = repo.changelog
3664 3672
3665 3673 displayer = logcmdutil.maketemplater(ui, repo, template)
3666 3674 sorted_revs = stabletailsort._stable_tail_sort(cl, rev)
3667 3675 for ancestor_rev in sorted_revs:
3668 3676 displayer.show(repo[ancestor_rev])
3669 3677
3670 3678
3671 3679 @command(
3672 3680 b"debugbackupbundle",
3673 3681 [
3674 3682 (
3675 3683 b"",
3676 3684 b"recover",
3677 3685 b"",
3678 3686 b"brings the specified changeset back into the repository",
3679 3687 )
3680 3688 ]
3681 3689 + cmdutil.logopts,
3682 3690 _(b"hg debugbackupbundle [--recover HASH]"),
3683 3691 )
3684 3692 def debugbackupbundle(ui, repo, *pats, **opts):
3685 3693 """lists the changesets available in backup bundles
3686 3694
3687 3695 Without any arguments, this command prints a list of the changesets in each
3688 3696 backup bundle.
3689 3697
3690 3698 --recover takes a changeset hash and unbundles the first bundle that
3691 3699 contains that hash, which puts that changeset back in your repository.
3692 3700
3693 3701 --verbose will print the entire commit message and the bundle path for that
3694 3702 backup.
3695 3703 """
3696 3704 backups = list(
3697 3705 filter(
3698 3706 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3699 3707 )
3700 3708 )
3701 3709 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3702 3710
3703 3711 opts = pycompat.byteskwargs(opts)
3704 3712 opts[b"bundle"] = b""
3705 3713 opts[b"force"] = None
3706 3714 limit = logcmdutil.getlimit(opts)
3707 3715
3708 3716 def display(other, chlist, displayer):
3709 3717 if opts.get(b"newest_first"):
3710 3718 chlist.reverse()
3711 3719 count = 0
3712 3720 for n in chlist:
3713 3721 if limit is not None and count >= limit:
3714 3722 break
3715 3723 parents = [
3716 3724 True for p in other.changelog.parents(n) if p != repo.nullid
3717 3725 ]
3718 3726 if opts.get(b"no_merges") and len(parents) == 2:
3719 3727 continue
3720 3728 count += 1
3721 3729 displayer.show(other[n])
3722 3730
3723 3731 recovernode = opts.get(b"recover")
3724 3732 if recovernode:
3725 3733 if scmutil.isrevsymbol(repo, recovernode):
3726 3734 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3727 3735 return
3728 3736 elif backups:
3729 3737 msg = _(
3730 3738 b"Recover changesets using: hg debugbackupbundle --recover "
3731 3739 b"<changeset hash>\n\nAvailable backup changesets:"
3732 3740 )
3733 3741 ui.status(msg, label=b"status.removed")
3734 3742 else:
3735 3743 ui.status(_(b"no backup changesets found\n"))
3736 3744 return
3737 3745
3738 3746 for backup in backups:
3739 3747 # Much of this is copied from the hg incoming logic
3740 3748 source = os.path.relpath(backup, encoding.getcwd())
3741 3749 path = urlutil.get_unique_pull_path_obj(
3742 3750 b'debugbackupbundle',
3743 3751 ui,
3744 3752 source,
3745 3753 )
3746 3754 try:
3747 3755 other = hg.peer(repo, opts, path)
3748 3756 except error.LookupError as ex:
3749 3757 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3750 3758 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3751 3759 ui.warn(msg, hint=hint)
3752 3760 continue
3753 3761 branches = (path.branch, opts.get(b'branch', []))
3754 3762 revs, checkout = hg.addbranchrevs(
3755 3763 repo, other, branches, opts.get(b"rev")
3756 3764 )
3757 3765
3758 3766 if revs:
3759 3767 revs = [other.lookup(rev) for rev in revs]
3760 3768
3761 3769 with ui.silent():
3762 3770 try:
3763 3771 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3764 3772 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3765 3773 )
3766 3774 except error.LookupError:
3767 3775 continue
3768 3776
3769 3777 try:
3770 3778 if not chlist:
3771 3779 continue
3772 3780 if recovernode:
3773 3781 with repo.lock(), repo.transaction(b"unbundle") as tr:
3774 3782 if scmutil.isrevsymbol(other, recovernode):
3775 3783 ui.status(_(b"Unbundling %s\n") % (recovernode))
3776 3784 f = hg.openpath(ui, path.loc)
3777 3785 gen = exchange.readbundle(ui, f, path.loc)
3778 3786 if isinstance(gen, bundle2.unbundle20):
3779 3787 bundle2.applybundle(
3780 3788 repo,
3781 3789 gen,
3782 3790 tr,
3783 3791 source=b"unbundle",
3784 3792 url=b"bundle:" + path.loc,
3785 3793 )
3786 3794 else:
3787 3795 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3788 3796 break
3789 3797 else:
3790 3798 backupdate = encoding.strtolocal(
3791 3799 time.strftime(
3792 3800 "%a %H:%M, %Y-%m-%d",
3793 3801 time.localtime(os.path.getmtime(path.loc)),
3794 3802 )
3795 3803 )
3796 3804 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3797 3805 if ui.verbose:
3798 3806 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3799 3807 else:
3800 3808 opts[
3801 3809 b"template"
3802 3810 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3803 3811 displayer = logcmdutil.changesetdisplayer(
3804 3812 ui, other, opts, False
3805 3813 )
3806 3814 display(other, chlist, displayer)
3807 3815 displayer.close()
3808 3816 finally:
3809 3817 cleanupfn()
3810 3818
3811 3819
3812 3820 @command(
3813 3821 b'debugsub',
3814 3822 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3815 3823 _(b'[-r REV] [REV]'),
3816 3824 )
3817 3825 def debugsub(ui, repo, rev=None):
3818 3826 ctx = scmutil.revsingle(repo, rev, None)
3819 3827 for k, v in sorted(ctx.substate.items()):
3820 3828 ui.writenoi18n(b'path %s\n' % k)
3821 3829 ui.writenoi18n(b' source %s\n' % v[0])
3822 3830 ui.writenoi18n(b' revision %s\n' % v[1])
3823 3831
3824 3832
3825 3833 @command(
3826 3834 b'debugshell',
3827 3835 [
3828 3836 (
3829 3837 b'c',
3830 3838 b'command',
3831 3839 b'',
3832 3840 _(b'program passed in as a string'),
3833 3841 _(b'COMMAND'),
3834 3842 )
3835 3843 ],
3836 3844 _(b'[-c COMMAND]'),
3837 3845 optionalrepo=True,
3838 3846 )
3839 3847 def debugshell(ui, repo, **opts):
3840 3848 """run an interactive Python interpreter
3841 3849
3842 3850 The local namespace is provided with a reference to the ui and
3843 3851 the repo instance (if available).
3844 3852 """
3845 3853 import code
3846 3854
3847 3855 imported_objects = {
3848 3856 'ui': ui,
3849 3857 'repo': repo,
3850 3858 }
3851 3859
3852 3860 # py2exe disables initialization of the site module, which is responsible
3853 3861 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3854 3862 # the stuff that site normally does here, so that the interpreter can be
3855 3863 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3856 3864 # py.exe, or py2exe.
3857 3865 if getattr(sys, "frozen", None) == 'console_exe':
3858 3866 try:
3859 3867 import site
3860 3868
3861 3869 site.setcopyright()
3862 3870 site.sethelper()
3863 3871 site.setquit()
3864 3872 except ImportError:
3865 3873 site = None # Keep PyCharm happy
3866 3874
3867 3875 command = opts.get('command')
3868 3876 if command:
3869 3877 compiled = code.compile_command(encoding.strfromlocal(command))
3870 3878 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3871 3879 return
3872 3880
3873 3881 code.interact(local=imported_objects)
3874 3882
3875 3883
3876 3884 @command(
3877 3885 b'debug-revlog-stats',
3878 3886 [
3879 3887 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3880 3888 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3881 3889 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3882 3890 ]
3883 3891 + cmdutil.formatteropts,
3884 3892 )
3885 3893 def debug_revlog_stats(ui, repo, **opts):
3886 3894 """display statistics about revlogs in the store"""
3887 3895 opts = pycompat.byteskwargs(opts)
3888 3896 changelog = opts[b"changelog"]
3889 3897 manifest = opts[b"manifest"]
3890 3898 filelogs = opts[b"filelogs"]
3891 3899
3892 3900 if changelog is None and manifest is None and filelogs is None:
3893 3901 changelog = True
3894 3902 manifest = True
3895 3903 filelogs = True
3896 3904
3897 3905 repo = repo.unfiltered()
3898 3906 fm = ui.formatter(b'debug-revlog-stats', opts)
3899 3907 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3900 3908 fm.end()
3901 3909
3902 3910
3903 3911 @command(
3904 3912 b'debugsuccessorssets',
3905 3913 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3906 3914 _(b'[REV]'),
3907 3915 )
3908 3916 def debugsuccessorssets(ui, repo, *revs, **opts):
3909 3917 """show set of successors for revision
3910 3918
3911 3919 A successors set of changeset A is a consistent group of revisions that
3912 3920 succeed A. It contains non-obsolete changesets only unless closests
3913 3921 successors set is set.
3914 3922
3915 3923 In most cases a changeset A has a single successors set containing a single
3916 3924 successor (changeset A replaced by A').
3917 3925
3918 3926 A changeset that is made obsolete with no successors are called "pruned".
3919 3927 Such changesets have no successors sets at all.
3920 3928
3921 3929 A changeset that has been "split" will have a successors set containing
3922 3930 more than one successor.
3923 3931
3924 3932 A changeset that has been rewritten in multiple different ways is called
3925 3933 "divergent". Such changesets have multiple successor sets (each of which
3926 3934 may also be split, i.e. have multiple successors).
3927 3935
3928 3936 Results are displayed as follows::
3929 3937
3930 3938 <rev1>
3931 3939 <successors-1A>
3932 3940 <rev2>
3933 3941 <successors-2A>
3934 3942 <successors-2B1> <successors-2B2> <successors-2B3>
3935 3943
3936 3944 Here rev2 has two possible (i.e. divergent) successors sets. The first
3937 3945 holds one element, whereas the second holds three (i.e. the changeset has
3938 3946 been split).
3939 3947 """
3940 3948 # passed to successorssets caching computation from one call to another
3941 3949 cache = {}
3942 3950 ctx2str = bytes
3943 3951 node2str = short
3944 3952 for rev in logcmdutil.revrange(repo, revs):
3945 3953 ctx = repo[rev]
3946 3954 ui.write(b'%s\n' % ctx2str(ctx))
3947 3955 for succsset in obsutil.successorssets(
3948 3956 repo, ctx.node(), closest=opts['closest'], cache=cache
3949 3957 ):
3950 3958 if succsset:
3951 3959 ui.write(b' ')
3952 3960 ui.write(node2str(succsset[0]))
3953 3961 for node in succsset[1:]:
3954 3962 ui.write(b' ')
3955 3963 ui.write(node2str(node))
3956 3964 ui.write(b'\n')
3957 3965
3958 3966
3959 3967 @command(b'debugtagscache', [])
3960 3968 def debugtagscache(ui, repo):
3961 3969 """display the contents of .hg/cache/hgtagsfnodes1"""
3962 3970 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3963 3971 flog = repo.file(b'.hgtags')
3964 3972 for r in repo:
3965 3973 node = repo[r].node()
3966 3974 tagsnode = cache.getfnode(node, computemissing=False)
3967 3975 if tagsnode:
3968 3976 tagsnodedisplay = hex(tagsnode)
3969 3977 if not flog.hasnode(tagsnode):
3970 3978 tagsnodedisplay += b' (unknown node)'
3971 3979 elif tagsnode is None:
3972 3980 tagsnodedisplay = b'missing'
3973 3981 else:
3974 3982 tagsnodedisplay = b'invalid'
3975 3983
3976 3984 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3977 3985
3978 3986
3979 3987 @command(
3980 3988 b'debugtemplate',
3981 3989 [
3982 3990 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3983 3991 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3984 3992 ],
3985 3993 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3986 3994 optionalrepo=True,
3987 3995 )
3988 3996 def debugtemplate(ui, repo, tmpl, **opts):
3989 3997 """parse and apply a template
3990 3998
3991 3999 If -r/--rev is given, the template is processed as a log template and
3992 4000 applied to the given changesets. Otherwise, it is processed as a generic
3993 4001 template.
3994 4002
3995 4003 Use --verbose to print the parsed tree.
3996 4004 """
3997 4005 revs = None
3998 4006 if opts['rev']:
3999 4007 if repo is None:
4000 4008 raise error.RepoError(
4001 4009 _(b'there is no Mercurial repository here (.hg not found)')
4002 4010 )
4003 4011 revs = logcmdutil.revrange(repo, opts['rev'])
4004 4012
4005 4013 props = {}
4006 4014 for d in opts['define']:
4007 4015 try:
4008 4016 k, v = (e.strip() for e in d.split(b'=', 1))
4009 4017 if not k or k == b'ui':
4010 4018 raise ValueError
4011 4019 props[k] = v
4012 4020 except ValueError:
4013 4021 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4014 4022
4015 4023 if ui.verbose:
4016 4024 aliases = ui.configitems(b'templatealias')
4017 4025 tree = templater.parse(tmpl)
4018 4026 ui.note(templater.prettyformat(tree), b'\n')
4019 4027 newtree = templater.expandaliases(tree, aliases)
4020 4028 if newtree != tree:
4021 4029 ui.notenoi18n(
4022 4030 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4023 4031 )
4024 4032
4025 4033 if revs is None:
4026 4034 tres = formatter.templateresources(ui, repo)
4027 4035 t = formatter.maketemplater(ui, tmpl, resources=tres)
4028 4036 if ui.verbose:
4029 4037 kwds, funcs = t.symbolsuseddefault()
4030 4038 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4031 4039 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4032 4040 ui.write(t.renderdefault(props))
4033 4041 else:
4034 4042 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4035 4043 if ui.verbose:
4036 4044 kwds, funcs = displayer.t.symbolsuseddefault()
4037 4045 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4038 4046 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4039 4047 for r in revs:
4040 4048 displayer.show(repo[r], **pycompat.strkwargs(props))
4041 4049 displayer.close()
4042 4050
4043 4051
4044 4052 @command(
4045 4053 b'debuguigetpass',
4046 4054 [
4047 4055 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4048 4056 ],
4049 4057 _(b'[-p TEXT]'),
4050 4058 norepo=True,
4051 4059 )
4052 4060 def debuguigetpass(ui, prompt=b''):
4053 4061 """show prompt to type password"""
4054 4062 r = ui.getpass(prompt)
4055 4063 if r is None:
4056 4064 r = b"<default response>"
4057 4065 ui.writenoi18n(b'response: %s\n' % r)
4058 4066
4059 4067
4060 4068 @command(
4061 4069 b'debuguiprompt',
4062 4070 [
4063 4071 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4064 4072 ],
4065 4073 _(b'[-p TEXT]'),
4066 4074 norepo=True,
4067 4075 )
4068 4076 def debuguiprompt(ui, prompt=b''):
4069 4077 """show plain prompt"""
4070 4078 r = ui.prompt(prompt)
4071 4079 ui.writenoi18n(b'response: %s\n' % r)
4072 4080
4073 4081
4074 4082 @command(b'debugupdatecaches', [])
4075 4083 def debugupdatecaches(ui, repo, *pats, **opts):
4076 4084 """warm all known caches in the repository"""
4077 4085 with repo.wlock(), repo.lock():
4078 4086 repo.updatecaches(caches=repository.CACHES_ALL)
4079 4087
4080 4088
4081 4089 @command(
4082 4090 b'debugupgraderepo',
4083 4091 [
4084 4092 (
4085 4093 b'o',
4086 4094 b'optimize',
4087 4095 [],
4088 4096 _(b'extra optimization to perform'),
4089 4097 _(b'NAME'),
4090 4098 ),
4091 4099 (b'', b'run', False, _(b'performs an upgrade')),
4092 4100 (b'', b'backup', True, _(b'keep the old repository content around')),
4093 4101 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4094 4102 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4095 4103 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4096 4104 ],
4097 4105 )
4098 4106 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4099 4107 """upgrade a repository to use different features
4100 4108
4101 4109 If no arguments are specified, the repository is evaluated for upgrade
4102 4110 and a list of problems and potential optimizations is printed.
4103 4111
4104 4112 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4105 4113 can be influenced via additional arguments. More details will be provided
4106 4114 by the command output when run without ``--run``.
4107 4115
4108 4116 During the upgrade, the repository will be locked and no writes will be
4109 4117 allowed.
4110 4118
4111 4119 At the end of the upgrade, the repository may not be readable while new
4112 4120 repository data is swapped in. This window will be as long as it takes to
4113 4121 rename some directories inside the ``.hg`` directory. On most machines, this
4114 4122 should complete almost instantaneously and the chances of a consumer being
4115 4123 unable to access the repository should be low.
4116 4124
4117 4125 By default, all revlogs will be upgraded. You can restrict this using flags
4118 4126 such as `--manifest`:
4119 4127
4120 4128 * `--manifest`: only optimize the manifest
4121 4129 * `--no-manifest`: optimize all revlog but the manifest
4122 4130 * `--changelog`: optimize the changelog only
4123 4131 * `--no-changelog --no-manifest`: optimize filelogs only
4124 4132 * `--filelogs`: optimize the filelogs only
4125 4133 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4126 4134 """
4127 4135 return upgrade.upgraderepo(
4128 4136 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4129 4137 )
4130 4138
4131 4139
4132 4140 @command(
4133 4141 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4134 4142 )
4135 4143 def debugwalk(ui, repo, *pats, **opts):
4136 4144 """show how files match on given patterns"""
4137 4145 opts = pycompat.byteskwargs(opts)
4138 4146 m = scmutil.match(repo[None], pats, opts)
4139 4147 if ui.verbose:
4140 4148 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4141 4149 items = list(repo[None].walk(m))
4142 4150 if not items:
4143 4151 return
4144 4152 f = lambda fn: fn
4145 4153 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4146 4154 f = lambda fn: util.normpath(fn)
4147 4155 fmt = b'f %%-%ds %%-%ds %%s' % (
4148 4156 max([len(abs) for abs in items]),
4149 4157 max([len(repo.pathto(abs)) for abs in items]),
4150 4158 )
4151 4159 for abs in items:
4152 4160 line = fmt % (
4153 4161 abs,
4154 4162 f(repo.pathto(abs)),
4155 4163 m.exact(abs) and b'exact' or b'',
4156 4164 )
4157 4165 ui.write(b"%s\n" % line.rstrip())
4158 4166
4159 4167
4160 4168 @command(b'debugwhyunstable', [], _(b'REV'))
4161 4169 def debugwhyunstable(ui, repo, rev):
4162 4170 """explain instabilities of a changeset"""
4163 4171 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4164 4172 dnodes = b''
4165 4173 if entry.get(b'divergentnodes'):
4166 4174 dnodes = (
4167 4175 b' '.join(
4168 4176 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4169 4177 for ctx in entry[b'divergentnodes']
4170 4178 )
4171 4179 + b' '
4172 4180 )
4173 4181 ui.write(
4174 4182 b'%s: %s%s %s\n'
4175 4183 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4176 4184 )
4177 4185
4178 4186
4179 4187 @command(
4180 4188 b'debugwireargs',
4181 4189 [
4182 4190 (b'', b'three', b'', b'three'),
4183 4191 (b'', b'four', b'', b'four'),
4184 4192 (b'', b'five', b'', b'five'),
4185 4193 ]
4186 4194 + cmdutil.remoteopts,
4187 4195 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4188 4196 norepo=True,
4189 4197 )
4190 4198 def debugwireargs(ui, repopath, *vals, **opts):
4191 4199 opts = pycompat.byteskwargs(opts)
4192 4200 repo = hg.peer(ui, opts, repopath)
4193 4201 try:
4194 4202 for opt in cmdutil.remoteopts:
4195 4203 del opts[opt[1]]
4196 4204 args = {}
4197 4205 for k, v in opts.items():
4198 4206 if v:
4199 4207 args[k] = v
4200 4208 args = pycompat.strkwargs(args)
4201 4209 # run twice to check that we don't mess up the stream for the next command
4202 4210 res1 = repo.debugwireargs(*vals, **args)
4203 4211 res2 = repo.debugwireargs(*vals, **args)
4204 4212 ui.write(b"%s\n" % res1)
4205 4213 if res1 != res2:
4206 4214 ui.warn(b"%s\n" % res2)
4207 4215 finally:
4208 4216 repo.close()
4209 4217
4210 4218
4211 4219 def _parsewirelangblocks(fh):
4212 4220 activeaction = None
4213 4221 blocklines = []
4214 4222 lastindent = 0
4215 4223
4216 4224 for line in fh:
4217 4225 line = line.rstrip()
4218 4226 if not line:
4219 4227 continue
4220 4228
4221 4229 if line.startswith(b'#'):
4222 4230 continue
4223 4231
4224 4232 if not line.startswith(b' '):
4225 4233 # New block. Flush previous one.
4226 4234 if activeaction:
4227 4235 yield activeaction, blocklines
4228 4236
4229 4237 activeaction = line
4230 4238 blocklines = []
4231 4239 lastindent = 0
4232 4240 continue
4233 4241
4234 4242 # Else we start with an indent.
4235 4243
4236 4244 if not activeaction:
4237 4245 raise error.Abort(_(b'indented line outside of block'))
4238 4246
4239 4247 indent = len(line) - len(line.lstrip())
4240 4248
4241 4249 # If this line is indented more than the last line, concatenate it.
4242 4250 if indent > lastindent and blocklines:
4243 4251 blocklines[-1] += line.lstrip()
4244 4252 else:
4245 4253 blocklines.append(line)
4246 4254 lastindent = indent
4247 4255
4248 4256 # Flush last block.
4249 4257 if activeaction:
4250 4258 yield activeaction, blocklines
4251 4259
4252 4260
4253 4261 @command(
4254 4262 b'debugwireproto',
4255 4263 [
4256 4264 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4257 4265 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4258 4266 (
4259 4267 b'',
4260 4268 b'noreadstderr',
4261 4269 False,
4262 4270 _(b'do not read from stderr of the remote'),
4263 4271 ),
4264 4272 (
4265 4273 b'',
4266 4274 b'nologhandshake',
4267 4275 False,
4268 4276 _(b'do not log I/O related to the peer handshake'),
4269 4277 ),
4270 4278 ]
4271 4279 + cmdutil.remoteopts,
4272 4280 _(b'[PATH]'),
4273 4281 optionalrepo=True,
4274 4282 )
4275 4283 def debugwireproto(ui, repo, path=None, **opts):
4276 4284 """send wire protocol commands to a server
4277 4285
4278 4286 This command can be used to issue wire protocol commands to remote
4279 4287 peers and to debug the raw data being exchanged.
4280 4288
4281 4289 ``--localssh`` will start an SSH server against the current repository
4282 4290 and connect to that. By default, the connection will perform a handshake
4283 4291 and establish an appropriate peer instance.
4284 4292
4285 4293 ``--peer`` can be used to bypass the handshake protocol and construct a
4286 4294 peer instance using the specified class type. Valid values are ``raw``,
4287 4295 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4288 4296 don't support higher-level command actions.
4289 4297
4290 4298 ``--noreadstderr`` can be used to disable automatic reading from stderr
4291 4299 of the peer (for SSH connections only). Disabling automatic reading of
4292 4300 stderr is useful for making output more deterministic.
4293 4301
4294 4302 Commands are issued via a mini language which is specified via stdin.
4295 4303 The language consists of individual actions to perform. An action is
4296 4304 defined by a block. A block is defined as a line with no leading
4297 4305 space followed by 0 or more lines with leading space. Blocks are
4298 4306 effectively a high-level command with additional metadata.
4299 4307
4300 4308 Lines beginning with ``#`` are ignored.
4301 4309
4302 4310 The following sections denote available actions.
4303 4311
4304 4312 raw
4305 4313 ---
4306 4314
4307 4315 Send raw data to the server.
4308 4316
4309 4317 The block payload contains the raw data to send as one atomic send
4310 4318 operation. The data may not actually be delivered in a single system
4311 4319 call: it depends on the abilities of the transport being used.
4312 4320
4313 4321 Each line in the block is de-indented and concatenated. Then, that
4314 4322 value is evaluated as a Python b'' literal. This allows the use of
4315 4323 backslash escaping, etc.
4316 4324
4317 4325 raw+
4318 4326 ----
4319 4327
4320 4328 Behaves like ``raw`` except flushes output afterwards.
4321 4329
4322 4330 command <X>
4323 4331 -----------
4324 4332
4325 4333 Send a request to run a named command, whose name follows the ``command``
4326 4334 string.
4327 4335
4328 4336 Arguments to the command are defined as lines in this block. The format of
4329 4337 each line is ``<key> <value>``. e.g.::
4330 4338
4331 4339 command listkeys
4332 4340 namespace bookmarks
4333 4341
4334 4342 If the value begins with ``eval:``, it will be interpreted as a Python
4335 4343 literal expression. Otherwise values are interpreted as Python b'' literals.
4336 4344 This allows sending complex types and encoding special byte sequences via
4337 4345 backslash escaping.
4338 4346
4339 4347 The following arguments have special meaning:
4340 4348
4341 4349 ``PUSHFILE``
4342 4350 When defined, the *push* mechanism of the peer will be used instead
4343 4351 of the static request-response mechanism and the content of the
4344 4352 file specified in the value of this argument will be sent as the
4345 4353 command payload.
4346 4354
4347 4355 This can be used to submit a local bundle file to the remote.
4348 4356
4349 4357 batchbegin
4350 4358 ----------
4351 4359
4352 4360 Instruct the peer to begin a batched send.
4353 4361
4354 4362 All ``command`` blocks are queued for execution until the next
4355 4363 ``batchsubmit`` block.
4356 4364
4357 4365 batchsubmit
4358 4366 -----------
4359 4367
4360 4368 Submit previously queued ``command`` blocks as a batch request.
4361 4369
4362 4370 This action MUST be paired with a ``batchbegin`` action.
4363 4371
4364 4372 httprequest <method> <path>
4365 4373 ---------------------------
4366 4374
4367 4375 (HTTP peer only)
4368 4376
4369 4377 Send an HTTP request to the peer.
4370 4378
4371 4379 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4372 4380
4373 4381 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4374 4382 headers to add to the request. e.g. ``Accept: foo``.
4375 4383
4376 4384 The following arguments are special:
4377 4385
4378 4386 ``BODYFILE``
4379 4387 The content of the file defined as the value to this argument will be
4380 4388 transferred verbatim as the HTTP request body.
4381 4389
4382 4390 ``frame <type> <flags> <payload>``
4383 4391 Send a unified protocol frame as part of the request body.
4384 4392
4385 4393 All frames will be collected and sent as the body to the HTTP
4386 4394 request.
4387 4395
4388 4396 close
4389 4397 -----
4390 4398
4391 4399 Close the connection to the server.
4392 4400
4393 4401 flush
4394 4402 -----
4395 4403
4396 4404 Flush data written to the server.
4397 4405
4398 4406 readavailable
4399 4407 -------------
4400 4408
4401 4409 Close the write end of the connection and read all available data from
4402 4410 the server.
4403 4411
4404 4412 If the connection to the server encompasses multiple pipes, we poll both
4405 4413 pipes and read available data.
4406 4414
4407 4415 readline
4408 4416 --------
4409 4417
4410 4418 Read a line of output from the server. If there are multiple output
4411 4419 pipes, reads only the main pipe.
4412 4420
4413 4421 ereadline
4414 4422 ---------
4415 4423
4416 4424 Like ``readline``, but read from the stderr pipe, if available.
4417 4425
4418 4426 read <X>
4419 4427 --------
4420 4428
4421 4429 ``read()`` N bytes from the server's main output pipe.
4422 4430
4423 4431 eread <X>
4424 4432 ---------
4425 4433
4426 4434 ``read()`` N bytes from the server's stderr pipe, if available.
4427 4435
4428 4436 Specifying Unified Frame-Based Protocol Frames
4429 4437 ----------------------------------------------
4430 4438
4431 4439 It is possible to emit a *Unified Frame-Based Protocol* by using special
4432 4440 syntax.
4433 4441
4434 4442 A frame is composed as a type, flags, and payload. These can be parsed
4435 4443 from a string of the form:
4436 4444
4437 4445 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4438 4446
4439 4447 ``request-id`` and ``stream-id`` are integers defining the request and
4440 4448 stream identifiers.
4441 4449
4442 4450 ``type`` can be an integer value for the frame type or the string name
4443 4451 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4444 4452 ``command-name``.
4445 4453
4446 4454 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4447 4455 components. Each component (and there can be just one) can be an integer
4448 4456 or a flag name for stream flags or frame flags, respectively. Values are
4449 4457 resolved to integers and then bitwise OR'd together.
4450 4458
4451 4459 ``payload`` represents the raw frame payload. If it begins with
4452 4460 ``cbor:``, the following string is evaluated as Python code and the
4453 4461 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4454 4462 as a Python byte string literal.
4455 4463 """
4456 4464 opts = pycompat.byteskwargs(opts)
4457 4465
4458 4466 if opts[b'localssh'] and not repo:
4459 4467 raise error.Abort(_(b'--localssh requires a repository'))
4460 4468
4461 4469 if opts[b'peer'] and opts[b'peer'] not in (
4462 4470 b'raw',
4463 4471 b'ssh1',
4464 4472 ):
4465 4473 raise error.Abort(
4466 4474 _(b'invalid value for --peer'),
4467 4475 hint=_(b'valid values are "raw" and "ssh1"'),
4468 4476 )
4469 4477
4470 4478 if path and opts[b'localssh']:
4471 4479 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4472 4480
4473 4481 if ui.interactive():
4474 4482 ui.write(_(b'(waiting for commands on stdin)\n'))
4475 4483
4476 4484 blocks = list(_parsewirelangblocks(ui.fin))
4477 4485
4478 4486 proc = None
4479 4487 stdin = None
4480 4488 stdout = None
4481 4489 stderr = None
4482 4490 opener = None
4483 4491
4484 4492 if opts[b'localssh']:
4485 4493 # We start the SSH server in its own process so there is process
4486 4494 # separation. This prevents a whole class of potential bugs around
4487 4495 # shared state from interfering with server operation.
4488 4496 args = procutil.hgcmd() + [
4489 4497 b'-R',
4490 4498 repo.root,
4491 4499 b'debugserve',
4492 4500 b'--sshstdio',
4493 4501 ]
4494 4502 proc = subprocess.Popen(
4495 4503 pycompat.rapply(procutil.tonativestr, args),
4496 4504 stdin=subprocess.PIPE,
4497 4505 stdout=subprocess.PIPE,
4498 4506 stderr=subprocess.PIPE,
4499 4507 bufsize=0,
4500 4508 )
4501 4509
4502 4510 stdin = proc.stdin
4503 4511 stdout = proc.stdout
4504 4512 stderr = proc.stderr
4505 4513
4506 4514 # We turn the pipes into observers so we can log I/O.
4507 4515 if ui.verbose or opts[b'peer'] == b'raw':
4508 4516 stdin = util.makeloggingfileobject(
4509 4517 ui, proc.stdin, b'i', logdata=True
4510 4518 )
4511 4519 stdout = util.makeloggingfileobject(
4512 4520 ui, proc.stdout, b'o', logdata=True
4513 4521 )
4514 4522 stderr = util.makeloggingfileobject(
4515 4523 ui, proc.stderr, b'e', logdata=True
4516 4524 )
4517 4525
4518 4526 # --localssh also implies the peer connection settings.
4519 4527
4520 4528 url = b'ssh://localserver'
4521 4529 autoreadstderr = not opts[b'noreadstderr']
4522 4530
4523 4531 if opts[b'peer'] == b'ssh1':
4524 4532 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4525 4533 peer = sshpeer.sshv1peer(
4526 4534 ui,
4527 4535 url,
4528 4536 proc,
4529 4537 stdin,
4530 4538 stdout,
4531 4539 stderr,
4532 4540 None,
4533 4541 autoreadstderr=autoreadstderr,
4534 4542 )
4535 4543 elif opts[b'peer'] == b'raw':
4536 4544 ui.write(_(b'using raw connection to peer\n'))
4537 4545 peer = None
4538 4546 else:
4539 4547 ui.write(_(b'creating ssh peer from handshake results\n'))
4540 4548 peer = sshpeer._make_peer(
4541 4549 ui,
4542 4550 url,
4543 4551 proc,
4544 4552 stdin,
4545 4553 stdout,
4546 4554 stderr,
4547 4555 autoreadstderr=autoreadstderr,
4548 4556 )
4549 4557
4550 4558 elif path:
4551 4559 # We bypass hg.peer() so we can proxy the sockets.
4552 4560 # TODO consider not doing this because we skip
4553 4561 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4554 4562 u = urlutil.url(path)
4555 4563 if u.scheme != b'http':
4556 4564 raise error.Abort(_(b'only http:// paths are currently supported'))
4557 4565
4558 4566 url, authinfo = u.authinfo()
4559 4567 openerargs = {
4560 4568 'useragent': b'Mercurial debugwireproto',
4561 4569 }
4562 4570
4563 4571 # Turn pipes/sockets into observers so we can log I/O.
4564 4572 if ui.verbose:
4565 4573 openerargs.update(
4566 4574 {
4567 4575 'loggingfh': ui,
4568 4576 'loggingname': b's',
4569 4577 'loggingopts': {
4570 4578 'logdata': True,
4571 4579 'logdataapis': False,
4572 4580 },
4573 4581 }
4574 4582 )
4575 4583
4576 4584 if ui.debugflag:
4577 4585 openerargs['loggingopts']['logdataapis'] = True
4578 4586
4579 4587 # Don't send default headers when in raw mode. This allows us to
4580 4588 # bypass most of the behavior of our URL handling code so we can
4581 4589 # have near complete control over what's sent on the wire.
4582 4590 if opts[b'peer'] == b'raw':
4583 4591 openerargs['sendaccept'] = False
4584 4592
4585 4593 opener = urlmod.opener(ui, authinfo, **openerargs)
4586 4594
4587 4595 if opts[b'peer'] == b'raw':
4588 4596 ui.write(_(b'using raw connection to peer\n'))
4589 4597 peer = None
4590 4598 elif opts[b'peer']:
4591 4599 raise error.Abort(
4592 4600 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4593 4601 )
4594 4602 else:
4595 4603 peer_path = urlutil.try_path(ui, path)
4596 4604 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4597 4605
4598 4606 # We /could/ populate stdin/stdout with sock.makefile()...
4599 4607 else:
4600 4608 raise error.Abort(_(b'unsupported connection configuration'))
4601 4609
4602 4610 batchedcommands = None
4603 4611
4604 4612 # Now perform actions based on the parsed wire language instructions.
4605 4613 for action, lines in blocks:
4606 4614 if action in (b'raw', b'raw+'):
4607 4615 if not stdin:
4608 4616 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4609 4617
4610 4618 # Concatenate the data together.
4611 4619 data = b''.join(l.lstrip() for l in lines)
4612 4620 data = stringutil.unescapestr(data)
4613 4621 stdin.write(data)
4614 4622
4615 4623 if action == b'raw+':
4616 4624 stdin.flush()
4617 4625 elif action == b'flush':
4618 4626 if not stdin:
4619 4627 raise error.Abort(_(b'cannot call flush on this peer'))
4620 4628 stdin.flush()
4621 4629 elif action.startswith(b'command'):
4622 4630 if not peer:
4623 4631 raise error.Abort(
4624 4632 _(
4625 4633 b'cannot send commands unless peer instance '
4626 4634 b'is available'
4627 4635 )
4628 4636 )
4629 4637
4630 4638 command = action.split(b' ', 1)[1]
4631 4639
4632 4640 args = {}
4633 4641 for line in lines:
4634 4642 # We need to allow empty values.
4635 4643 fields = line.lstrip().split(b' ', 1)
4636 4644 if len(fields) == 1:
4637 4645 key = fields[0]
4638 4646 value = b''
4639 4647 else:
4640 4648 key, value = fields
4641 4649
4642 4650 if value.startswith(b'eval:'):
4643 4651 value = stringutil.evalpythonliteral(value[5:])
4644 4652 else:
4645 4653 value = stringutil.unescapestr(value)
4646 4654
4647 4655 args[key] = value
4648 4656
4649 4657 if batchedcommands is not None:
4650 4658 batchedcommands.append((command, args))
4651 4659 continue
4652 4660
4653 4661 ui.status(_(b'sending %s command\n') % command)
4654 4662
4655 4663 if b'PUSHFILE' in args:
4656 4664 with open(args[b'PUSHFILE'], 'rb') as fh:
4657 4665 del args[b'PUSHFILE']
4658 4666 res, output = peer._callpush(
4659 4667 command, fh, **pycompat.strkwargs(args)
4660 4668 )
4661 4669 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4662 4670 ui.status(
4663 4671 _(b'remote output: %s\n') % stringutil.escapestr(output)
4664 4672 )
4665 4673 else:
4666 4674 with peer.commandexecutor() as e:
4667 4675 res = e.callcommand(command, args).result()
4668 4676
4669 4677 ui.status(
4670 4678 _(b'response: %s\n')
4671 4679 % stringutil.pprint(res, bprefix=True, indent=2)
4672 4680 )
4673 4681
4674 4682 elif action == b'batchbegin':
4675 4683 if batchedcommands is not None:
4676 4684 raise error.Abort(_(b'nested batchbegin not allowed'))
4677 4685
4678 4686 batchedcommands = []
4679 4687 elif action == b'batchsubmit':
4680 4688 # There is a batching API we could go through. But it would be
4681 4689 # difficult to normalize requests into function calls. It is easier
4682 4690 # to bypass this layer and normalize to commands + args.
4683 4691 ui.status(
4684 4692 _(b'sending batch with %d sub-commands\n')
4685 4693 % len(batchedcommands)
4686 4694 )
4687 4695 assert peer is not None
4688 4696 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4689 4697 ui.status(
4690 4698 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4691 4699 )
4692 4700
4693 4701 batchedcommands = None
4694 4702
4695 4703 elif action.startswith(b'httprequest '):
4696 4704 if not opener:
4697 4705 raise error.Abort(
4698 4706 _(b'cannot use httprequest without an HTTP peer')
4699 4707 )
4700 4708
4701 4709 request = action.split(b' ', 2)
4702 4710 if len(request) != 3:
4703 4711 raise error.Abort(
4704 4712 _(
4705 4713 b'invalid httprequest: expected format is '
4706 4714 b'"httprequest <method> <path>'
4707 4715 )
4708 4716 )
4709 4717
4710 4718 method, httppath = request[1:]
4711 4719 headers = {}
4712 4720 body = None
4713 4721 frames = []
4714 4722 for line in lines:
4715 4723 line = line.lstrip()
4716 4724 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4717 4725 if m:
4718 4726 # Headers need to use native strings.
4719 4727 key = pycompat.strurl(m.group(1))
4720 4728 value = pycompat.strurl(m.group(2))
4721 4729 headers[key] = value
4722 4730 continue
4723 4731
4724 4732 if line.startswith(b'BODYFILE '):
4725 4733 with open(line.split(b' ', 1), b'rb') as fh:
4726 4734 body = fh.read()
4727 4735 elif line.startswith(b'frame '):
4728 4736 frame = wireprotoframing.makeframefromhumanstring(
4729 4737 line[len(b'frame ') :]
4730 4738 )
4731 4739
4732 4740 frames.append(frame)
4733 4741 else:
4734 4742 raise error.Abort(
4735 4743 _(b'unknown argument to httprequest: %s') % line
4736 4744 )
4737 4745
4738 4746 url = path + httppath
4739 4747
4740 4748 if frames:
4741 4749 body = b''.join(bytes(f) for f in frames)
4742 4750
4743 4751 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4744 4752
4745 4753 # urllib.Request insists on using has_data() as a proxy for
4746 4754 # determining the request method. Override that to use our
4747 4755 # explicitly requested method.
4748 4756 req.get_method = lambda: pycompat.sysstr(method)
4749 4757
4750 4758 try:
4751 4759 res = opener.open(req)
4752 4760 body = res.read()
4753 4761 except util.urlerr.urlerror as e:
4754 4762 # read() method must be called, but only exists in Python 2
4755 4763 getattr(e, 'read', lambda: None)()
4756 4764 continue
4757 4765
4758 4766 ct = res.headers.get('Content-Type')
4759 4767 if ct == 'application/mercurial-cbor':
4760 4768 ui.write(
4761 4769 _(b'cbor> %s\n')
4762 4770 % stringutil.pprint(
4763 4771 cborutil.decodeall(body), bprefix=True, indent=2
4764 4772 )
4765 4773 )
4766 4774
4767 4775 elif action == b'close':
4768 4776 assert peer is not None
4769 4777 peer.close()
4770 4778 elif action == b'readavailable':
4771 4779 if not stdout or not stderr:
4772 4780 raise error.Abort(
4773 4781 _(b'readavailable not available on this peer')
4774 4782 )
4775 4783
4776 4784 stdin.close()
4777 4785 stdout.read()
4778 4786 stderr.read()
4779 4787
4780 4788 elif action == b'readline':
4781 4789 if not stdout:
4782 4790 raise error.Abort(_(b'readline not available on this peer'))
4783 4791 stdout.readline()
4784 4792 elif action == b'ereadline':
4785 4793 if not stderr:
4786 4794 raise error.Abort(_(b'ereadline not available on this peer'))
4787 4795 stderr.readline()
4788 4796 elif action.startswith(b'read '):
4789 4797 count = int(action.split(b' ', 1)[1])
4790 4798 if not stdout:
4791 4799 raise error.Abort(_(b'read not available on this peer'))
4792 4800 stdout.read(count)
4793 4801 elif action.startswith(b'eread '):
4794 4802 count = int(action.split(b' ', 1)[1])
4795 4803 if not stderr:
4796 4804 raise error.Abort(_(b'eread not available on this peer'))
4797 4805 stderr.read(count)
4798 4806 else:
4799 4807 raise error.Abort(_(b'unknown action: %s') % action)
4800 4808
4801 4809 if batchedcommands is not None:
4802 4810 raise error.Abort(_(b'unclosed "batchbegin" request'))
4803 4811
4804 4812 if peer:
4805 4813 peer.close()
4806 4814
4807 4815 if proc:
4808 4816 proc.kill()
@@ -1,453 +1,453 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 abort
4 4 add
5 5 addremove
6 6 annotate
7 7 archive
8 8 backout
9 9 bisect
10 10 bookmarks
11 11 branch
12 12 branches
13 13 bundle
14 14 cat
15 15 clone
16 16 commit
17 17 config
18 18 continue
19 19 copy
20 20 diff
21 21 export
22 22 files
23 23 forget
24 24 graft
25 25 grep
26 26 heads
27 27 help
28 28 identify
29 29 import
30 30 incoming
31 31 init
32 32 locate
33 33 log
34 34 manifest
35 35 merge
36 36 outgoing
37 37 parents
38 38 paths
39 39 phase
40 40 pull
41 41 purge
42 42 push
43 43 recover
44 44 remove
45 45 rename
46 46 resolve
47 47 revert
48 48 rollback
49 49 root
50 50 serve
51 51 shelve
52 52 status
53 53 summary
54 54 tag
55 55 tags
56 56 tip
57 57 unbundle
58 58 unshelve
59 59 update
60 60 verify
61 61 version
62 62
63 63 Show all commands that start with "a"
64 64 $ hg debugcomplete a
65 65 abort
66 66 add
67 67 addremove
68 68 annotate
69 69 archive
70 70
71 71 Do not show debug commands if there are other candidates
72 72 $ hg debugcomplete d
73 73 diff
74 74
75 75 Show debug commands if there are no other candidates
76 76 $ hg debugcomplete debug
77 77 debug-delta-find
78 78 debug-repair-issue6528
79 79 debug-revlog-index
80 80 debug-revlog-stats
81 81 debug::stable-tail-sort
82 82 debugancestor
83 83 debugantivirusrunning
84 84 debugapplystreamclonebundle
85 85 debugbackupbundle
86 86 debugbuilddag
87 87 debugbundle
88 88 debugcapabilities
89 89 debugchangedfiles
90 90 debugcheckstate
91 91 debugcolor
92 92 debugcommands
93 93 debugcomplete
94 94 debugconfig
95 95 debugcreatestreamclonebundle
96 96 debugdag
97 97 debugdata
98 98 debugdate
99 99 debugdeltachain
100 100 debugdirstate
101 101 debugdirstateignorepatternshash
102 102 debugdiscovery
103 103 debugdownload
104 104 debugextensions
105 105 debugfileset
106 106 debugformat
107 107 debugfsinfo
108 108 debuggetbundle
109 109 debugignore
110 110 debugindexdot
111 111 debugindexstats
112 112 debuginstall
113 113 debugknown
114 114 debuglabelcomplete
115 115 debuglocks
116 116 debugmanifestfulltextcache
117 117 debugmergestate
118 118 debugnamecomplete
119 119 debugnodemap
120 120 debugobsolete
121 121 debugp1copies
122 122 debugp2copies
123 123 debugpathcomplete
124 124 debugpathcopies
125 125 debugpeer
126 126 debugpickmergetool
127 127 debugpushkey
128 128 debugpvec
129 129 debugrebuilddirstate
130 130 debugrebuildfncache
131 131 debugrename
132 132 debugrequires
133 133 debugrevlog
134 134 debugrevlogindex
135 135 debugrevspec
136 136 debugserve
137 137 debugsetparents
138 138 debugshell
139 139 debugsidedata
140 140 debugssl
141 141 debugstrip
142 142 debugsub
143 143 debugsuccessorssets
144 144 debugtagscache
145 145 debugtemplate
146 146 debuguigetpass
147 147 debuguiprompt
148 148 debugupdatecaches
149 149 debugupgraderepo
150 150 debugwalk
151 151 debugwhyunstable
152 152 debugwireargs
153 153 debugwireproto
154 154
155 155 Do not show the alias of a debug command if there are other candidates
156 156 (this should hide rawcommit)
157 157 $ hg debugcomplete r
158 158 recover
159 159 remove
160 160 rename
161 161 resolve
162 162 revert
163 163 rollback
164 164 root
165 165 Show the alias of a debug command if there are no other candidates
166 166 $ hg debugcomplete rawc
167 167
168 168
169 169 Show the global options
170 170 $ hg debugcomplete --options | sort
171 171 --color
172 172 --config
173 173 --cwd
174 174 --debug
175 175 --debugger
176 176 --encoding
177 177 --encodingmode
178 178 --help
179 179 --hidden
180 180 --noninteractive
181 181 --pager
182 182 --profile
183 183 --quiet
184 184 --repository
185 185 --time
186 186 --traceback
187 187 --verbose
188 188 --version
189 189 -R
190 190 -h
191 191 -q
192 192 -v
193 193 -y
194 194
195 195 Show the options for the "serve" command
196 196 $ hg debugcomplete --options serve | sort
197 197 --accesslog
198 198 --address
199 199 --certificate
200 200 --cmdserver
201 201 --color
202 202 --config
203 203 --cwd
204 204 --daemon
205 205 --daemon-postexec
206 206 --debug
207 207 --debugger
208 208 --encoding
209 209 --encodingmode
210 210 --errorlog
211 211 --help
212 212 --hidden
213 213 --ipv6
214 214 --name
215 215 --noninteractive
216 216 --pager
217 217 --pid-file
218 218 --port
219 219 --prefix
220 220 --print-url
221 221 --profile
222 222 --quiet
223 223 --repository
224 224 --stdio
225 225 --style
226 226 --subrepos
227 227 --templates
228 228 --time
229 229 --traceback
230 230 --verbose
231 231 --version
232 232 --web-conf
233 233 -6
234 234 -A
235 235 -E
236 236 -R
237 237 -S
238 238 -a
239 239 -d
240 240 -h
241 241 -n
242 242 -p
243 243 -q
244 244 -t
245 245 -v
246 246 -y
247 247
248 248 Show an error if we use --options with an ambiguous abbreviation
249 249 $ hg debugcomplete --options s
250 250 hg: command 's' is ambiguous:
251 251 serve shelve showconfig status summary
252 252 [10]
253 253
254 254 Show all commands + options
255 255 $ hg debugcommands
256 256 abort: dry-run
257 257 add: include, exclude, subrepos, dry-run
258 258 addremove: similarity, subrepos, include, exclude, dry-run
259 259 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
260 260 archive: no-decode, prefix, rev, type, subrepos, include, exclude
261 261 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
262 262 bisect: reset, good, bad, skip, extend, command, noupdate
263 263 bookmarks: force, rev, delete, rename, inactive, list, template
264 264 branch: force, clean, rev
265 265 branches: active, closed, rev, template
266 266 bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
267 267 cat: output, rev, decode, include, exclude, template
268 268 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
269 269 commit: addremove, close-branch, amend, secret, draft, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
270 270 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
271 271 continue: dry-run
272 272 copy: forget, after, at-rev, force, include, exclude, dry-run
273 273 debug-delta-find: changelog, manifest, dir, template, source
274 274 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
275 275 debug-revlog-index: changelog, manifest, dir, template
276 276 debug-revlog-stats: changelog, manifest, filelogs, template
277 277 debug::stable-tail-sort: template
278 278 debugancestor:
279 279 debugantivirusrunning:
280 280 debugapplystreamclonebundle:
281 281 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
282 282 debugbuilddag: mergeable-file, overwritten-file, new-file, from-existing
283 283 debugbundle: all, part-type, spec
284 284 debugcapabilities:
285 285 debugchangedfiles: compute
286 286 debugcheckstate:
287 287 debugcolor: style
288 288 debugcommands:
289 289 debugcomplete: options
290 290 debugcreatestreamclonebundle:
291 291 debugdag: tags, branches, dots, spaces
292 292 debugdata: changelog, manifest, dir
293 293 debugdate: extended
294 294 debugdeltachain: changelog, manifest, dir, template
295 295 debugdirstateignorepatternshash:
296 296 debugdirstate: nodates, dates, datesort, docket, all
297 297 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
298 298 debugdownload: output
299 299 debugextensions: template
300 300 debugfileset: rev, all-files, show-matcher, show-stage
301 301 debugformat: template
302 302 debugfsinfo:
303 303 debuggetbundle: head, common, type
304 304 debugignore:
305 305 debugindexdot: changelog, manifest, dir
306 306 debugindexstats:
307 307 debuginstall: template
308 308 debugknown:
309 309 debuglabelcomplete:
310 310 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
311 311 debugmanifestfulltextcache: clear, add
312 312 debugmergestate: style, template
313 313 debugnamecomplete:
314 debugnodemap: dump-new, dump-disk, check, metadata
314 debugnodemap: changelog, manifest, dir, dump-new, dump-disk, check, metadata
315 315 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
316 316 debugp1copies: rev
317 317 debugp2copies: rev
318 318 debugpathcomplete: full, normal, added, removed
319 319 debugpathcopies: include, exclude
320 320 debugpeer:
321 321 debugpickmergetool: rev, changedelete, include, exclude, tool
322 322 debugpushkey:
323 323 debugpvec:
324 324 debugrebuilddirstate: rev, minimal
325 325 debugrebuildfncache: only-data
326 326 debugrename: rev
327 327 debugrequires:
328 328 debugrevlog: changelog, manifest, dir, dump
329 329 debugrevlogindex: changelog, manifest, dir, format
330 330 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
331 331 debugserve: sshstdio, logiofd, logiofile
332 332 debugsetparents:
333 333 debugshell: command
334 334 debugsidedata: changelog, manifest, dir
335 335 debugssl:
336 336 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
337 337 debugsub: rev
338 338 debugsuccessorssets: closest
339 339 debugtagscache:
340 340 debugtemplate: rev, define
341 341 debuguigetpass: prompt
342 342 debuguiprompt: prompt
343 343 debugupdatecaches:
344 344 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
345 345 debugwalk: include, exclude
346 346 debugwhyunstable:
347 347 debugwireargs: three, four, five, ssh, remotecmd, insecure
348 348 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
349 349 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
350 350 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
351 351 files: rev, print0, include, exclude, template, subrepos
352 352 forget: interactive, include, exclude, dry-run
353 353 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
354 354 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
355 355 heads: rev, topo, active, closed, style, template
356 356 help: extension, command, keyword, system
357 357 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
358 358 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
359 359 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
360 360 init: ssh, remotecmd, insecure
361 361 locate: rev, print0, fullpath, include, exclude
362 362 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
363 363 manifest: rev, all, template
364 364 merge: force, rev, preview, abort, tool
365 365 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
366 366 parents: rev, style, template
367 367 paths: template
368 368 phase: public, draft, secret, force, rev
369 369 pull: update, force, confirm, rev, bookmark, branch, remote-hidden, ssh, remotecmd, insecure
370 370 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
371 371 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
372 372 recover: verify
373 373 remove: after, force, subrepos, include, exclude, dry-run
374 374 rename: forget, after, at-rev, force, include, exclude, dry-run
375 375 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
376 376 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
377 377 rollback: dry-run, force
378 378 root: template
379 379 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
380 380 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
381 381 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
382 382 summary: remote
383 383 tag: force, local, rev, remove, edit, message, date, user
384 384 tags: template
385 385 tip: patch, git, style, template
386 386 unbundle: update
387 387 unshelve: abort, continue, interactive, keep, name, tool, date
388 388 update: clean, check, merge, date, rev, tool
389 389 verify: full
390 390 version: template
391 391
392 392 $ hg init a
393 393 $ cd a
394 394 $ echo fee > fee
395 395 $ hg ci -q -Amfee
396 396 $ hg tag fee
397 397 $ mkdir fie
398 398 $ echo dead > fie/dead
399 399 $ echo live > fie/live
400 400 $ hg bookmark fo
401 401 $ hg branch -q fie
402 402 $ hg ci -q -Amfie
403 403 $ echo fo > fo
404 404 $ hg branch -qf default
405 405 $ hg ci -q -Amfo
406 406 $ echo Fum > Fum
407 407 $ hg ci -q -AmFum
408 408 $ hg bookmark Fum
409 409
410 410 Test debugpathcomplete
411 411
412 412 $ hg debugpathcomplete f
413 413 fee
414 414 fie
415 415 fo
416 416 $ hg debugpathcomplete -f f
417 417 fee
418 418 fie/dead
419 419 fie/live
420 420 fo
421 421
422 422 $ hg rm Fum
423 423 $ hg debugpathcomplete -r F
424 424 Fum
425 425
426 426 Test debugnamecomplete
427 427
428 428 $ hg debugnamecomplete
429 429 Fum
430 430 default
431 431 fee
432 432 fie
433 433 fo
434 434 tip
435 435 $ hg debugnamecomplete f
436 436 fee
437 437 fie
438 438 fo
439 439
440 440 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
441 441 used for completions in some shells.
442 442
443 443 $ hg debuglabelcomplete
444 444 Fum
445 445 default
446 446 fee
447 447 fie
448 448 fo
449 449 tip
450 450 $ hg debuglabelcomplete f
451 451 fee
452 452 fie
453 453 fo
General Comments 0
You need to be logged in to leave comments. Login now