##// END OF EJS Templates
debug-delta-find: add a --source option...
marmoute -
r50484:266bb5c8 default
parent child Browse files
Show More
@@ -1,5058 +1,5091 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 mdiff,
62 63 mergestate as mergestatemod,
63 64 metadata,
64 65 obsolete,
65 66 obsutil,
66 67 pathutil,
67 68 phases,
68 69 policy,
69 70 pvec,
70 71 pycompat,
71 72 registrar,
72 73 repair,
73 74 repoview,
74 75 requirements,
75 76 revlog,
76 77 revlogutils,
77 78 revset,
78 79 revsetlang,
79 80 scmutil,
80 81 setdiscovery,
81 82 simplemerge,
82 83 sshpeer,
83 84 sslutil,
84 85 streamclone,
85 86 strip,
86 87 tags as tagsmod,
87 88 templater,
88 89 treediscovery,
89 90 upgrade,
90 91 url as urlmod,
91 92 util,
92 93 vfs as vfsmod,
93 94 wireprotoframing,
94 95 wireprotoserver,
95 96 )
96 97 from .interfaces import repository
97 98 from .utils import (
98 99 cborutil,
99 100 compression,
100 101 dateutil,
101 102 procutil,
102 103 stringutil,
103 104 urlutil,
104 105 )
105 106
106 107 from .revlogutils import (
107 108 constants as revlog_constants,
108 109 debug as revlog_debug,
109 110 deltas as deltautil,
110 111 nodemap,
111 112 rewrite,
112 113 sidedata,
113 114 )
114 115
115 116 release = lockmod.release
116 117
117 118 table = {}
118 119 table.update(strip.command._table)
119 120 command = registrar.command(table)
120 121
121 122
122 123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 124 def debugancestor(ui, repo, *args):
124 125 """find the ancestor revision of two revisions in a given index"""
125 126 if len(args) == 3:
126 127 index, rev1, rev2 = args
127 128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 129 lookup = r.lookup
129 130 elif len(args) == 2:
130 131 if not repo:
131 132 raise error.Abort(
132 133 _(b'there is no Mercurial repository here (.hg not found)')
133 134 )
134 135 rev1, rev2 = args
135 136 r = repo.changelog
136 137 lookup = repo.lookup
137 138 else:
138 139 raise error.Abort(_(b'either two or three arguments required'))
139 140 a = r.ancestor(lookup(rev1), lookup(rev2))
140 141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 142
142 143
143 144 @command(b'debugantivirusrunning', [])
144 145 def debugantivirusrunning(ui, repo):
145 146 """attempt to trigger an antivirus scanner to see if one is active"""
146 147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 148 f.write(
148 149 util.b85decode(
149 150 # This is a base85-armored version of the EICAR test file. See
150 151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 154 )
154 155 )
155 156 # Give an AV engine time to scan the file.
156 157 time.sleep(2)
157 158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 159
159 160
160 161 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 162 def debugapplystreamclonebundle(ui, repo, fname):
162 163 """apply a stream clone bundle file"""
163 164 f = hg.openpath(ui, fname)
164 165 gen = exchange.readbundle(ui, f, fname)
165 166 gen.apply(repo)
166 167
167 168
168 169 @command(
169 170 b'debugbuilddag',
170 171 [
171 172 (
172 173 b'm',
173 174 b'mergeable-file',
174 175 None,
175 176 _(b'add single file mergeable changes'),
176 177 ),
177 178 (
178 179 b'o',
179 180 b'overwritten-file',
180 181 None,
181 182 _(b'add single file all revs overwrite'),
182 183 ),
183 184 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 185 (
185 186 b'',
186 187 b'from-existing',
187 188 None,
188 189 _(b'continue from a non-empty repository'),
189 190 ),
190 191 ],
191 192 _(b'[OPTION]... [TEXT]'),
192 193 )
193 194 def debugbuilddag(
194 195 ui,
195 196 repo,
196 197 text=None,
197 198 mergeable_file=False,
198 199 overwritten_file=False,
199 200 new_file=False,
200 201 from_existing=False,
201 202 ):
202 203 """builds a repo with a given DAG from scratch in the current empty repo
203 204
204 205 The description of the DAG is read from stdin if not given on the
205 206 command line.
206 207
207 208 Elements:
208 209
209 210 - "+n" is a linear run of n nodes based on the current default parent
210 211 - "." is a single node based on the current default parent
211 212 - "$" resets the default parent to null (implied at the start);
212 213 otherwise the default parent is always the last node created
213 214 - "<p" sets the default parent to the backref p
214 215 - "*p" is a fork at parent p, which is a backref
215 216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 217 - "/p2" is a merge of the preceding node and p2
217 218 - ":tag" defines a local tag for the preceding node
218 219 - "@branch" sets the named branch for subsequent nodes
219 220 - "#...\\n" is a comment up to the end of the line
220 221
221 222 Whitespace between the above elements is ignored.
222 223
223 224 A backref is either
224 225
225 226 - a number n, which references the node curr-n, where curr is the current
226 227 node, or
227 228 - the name of a local tag you placed earlier using ":tag", or
228 229 - empty to denote the default parent.
229 230
230 231 All string valued-elements are either strictly alphanumeric, or must
231 232 be enclosed in double quotes ("..."), with "\\" as escape character.
232 233 """
233 234
234 235 if text is None:
235 236 ui.status(_(b"reading DAG from stdin\n"))
236 237 text = ui.fin.read()
237 238
238 239 cl = repo.changelog
239 240 if len(cl) > 0 and not from_existing:
240 241 raise error.Abort(_(b'repository is not empty'))
241 242
242 243 # determine number of revs in DAG
243 244 total = 0
244 245 for type, data in dagparser.parsedag(text):
245 246 if type == b'n':
246 247 total += 1
247 248
248 249 if mergeable_file:
249 250 linesperrev = 2
250 251 # make a file with k lines per rev
251 252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 253 initialmergedlines.append(b"")
253 254
254 255 tags = []
255 256 progress = ui.makeprogress(
256 257 _(b'building'), unit=_(b'revisions'), total=total
257 258 )
258 259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 260 at = -1
260 261 atbranch = b'default'
261 262 nodeids = []
262 263 id = 0
263 264 progress.update(id)
264 265 for type, data in dagparser.parsedag(text):
265 266 if type == b'n':
266 267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 268 id, ps = data
268 269
269 270 files = []
270 271 filecontent = {}
271 272
272 273 p2 = None
273 274 if mergeable_file:
274 275 fn = b"mf"
275 276 p1 = repo[ps[0]]
276 277 if len(ps) > 1:
277 278 p2 = repo[ps[1]]
278 279 pa = p1.ancestor(p2)
279 280 base, local, other = [
280 281 x[fn].data() for x in (pa, p1, p2)
281 282 ]
282 283 m3 = simplemerge.Merge3Text(base, local, other)
283 284 ml = [
284 285 l.strip()
285 286 for l in simplemerge.render_minimized(m3)[0]
286 287 ]
287 288 ml.append(b"")
288 289 elif at > 0:
289 290 ml = p1[fn].data().split(b"\n")
290 291 else:
291 292 ml = initialmergedlines
292 293 ml[id * linesperrev] += b" r%i" % id
293 294 mergedtext = b"\n".join(ml)
294 295 files.append(fn)
295 296 filecontent[fn] = mergedtext
296 297
297 298 if overwritten_file:
298 299 fn = b"of"
299 300 files.append(fn)
300 301 filecontent[fn] = b"r%i\n" % id
301 302
302 303 if new_file:
303 304 fn = b"nf%i" % id
304 305 files.append(fn)
305 306 filecontent[fn] = b"r%i\n" % id
306 307 if len(ps) > 1:
307 308 if not p2:
308 309 p2 = repo[ps[1]]
309 310 for fn in p2:
310 311 if fn.startswith(b"nf"):
311 312 files.append(fn)
312 313 filecontent[fn] = p2[fn].data()
313 314
314 315 def fctxfn(repo, cx, path):
315 316 if path in filecontent:
316 317 return context.memfilectx(
317 318 repo, cx, path, filecontent[path]
318 319 )
319 320 return None
320 321
321 322 if len(ps) == 0 or ps[0] < 0:
322 323 pars = [None, None]
323 324 elif len(ps) == 1:
324 325 pars = [nodeids[ps[0]], None]
325 326 else:
326 327 pars = [nodeids[p] for p in ps]
327 328 cx = context.memctx(
328 329 repo,
329 330 pars,
330 331 b"r%i" % id,
331 332 files,
332 333 fctxfn,
333 334 date=(id, 0),
334 335 user=b"debugbuilddag",
335 336 extra={b'branch': atbranch},
336 337 )
337 338 nodeid = repo.commitctx(cx)
338 339 nodeids.append(nodeid)
339 340 at = id
340 341 elif type == b'l':
341 342 id, name = data
342 343 ui.note((b'tag %s\n' % name))
343 344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 345 elif type == b'a':
345 346 ui.note((b'branch %s\n' % data))
346 347 atbranch = data
347 348 progress.update(id)
348 349
349 350 if tags:
350 351 repo.vfs.write(b"localtags", b"".join(tags))
351 352
352 353
353 354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 355 indent_string = b' ' * indent
355 356 if all:
356 357 ui.writenoi18n(
357 358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 359 % indent_string
359 360 )
360 361
361 362 def showchunks(named):
362 363 ui.write(b"\n%s%s\n" % (indent_string, named))
363 364 for deltadata in gen.deltaiter():
364 365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 366 ui.write(
366 367 b"%s%s %s %s %s %s %d\n"
367 368 % (
368 369 indent_string,
369 370 hex(node),
370 371 hex(p1),
371 372 hex(p2),
372 373 hex(cs),
373 374 hex(deltabase),
374 375 len(delta),
375 376 )
376 377 )
377 378
378 379 gen.changelogheader()
379 380 showchunks(b"changelog")
380 381 gen.manifestheader()
381 382 showchunks(b"manifest")
382 383 for chunkdata in iter(gen.filelogheader, {}):
383 384 fname = chunkdata[b'filename']
384 385 showchunks(fname)
385 386 else:
386 387 if isinstance(gen, bundle2.unbundle20):
387 388 raise error.Abort(_(b'use debugbundle2 for this file'))
388 389 gen.changelogheader()
389 390 for deltadata in gen.deltaiter():
390 391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 393
393 394
394 395 def _debugobsmarkers(ui, part, indent=0, **opts):
395 396 """display version and markers contained in 'data'"""
396 397 opts = pycompat.byteskwargs(opts)
397 398 data = part.read()
398 399 indent_string = b' ' * indent
399 400 try:
400 401 version, markers = obsolete._readmarkers(data)
401 402 except error.UnknownVersion as exc:
402 403 msg = b"%sunsupported version: %s (%d bytes)\n"
403 404 msg %= indent_string, exc.version, len(data)
404 405 ui.write(msg)
405 406 else:
406 407 msg = b"%sversion: %d (%d bytes)\n"
407 408 msg %= indent_string, version, len(data)
408 409 ui.write(msg)
409 410 fm = ui.formatter(b'debugobsolete', opts)
410 411 for rawmarker in sorted(markers):
411 412 m = obsutil.marker(None, rawmarker)
412 413 fm.startitem()
413 414 fm.plain(indent_string)
414 415 cmdutil.showmarker(fm, m)
415 416 fm.end()
416 417
417 418
418 419 def _debugphaseheads(ui, data, indent=0):
419 420 """display version and markers contained in 'data'"""
420 421 indent_string = b' ' * indent
421 422 headsbyphase = phases.binarydecode(data)
422 423 for phase in phases.allphases:
423 424 for head in headsbyphase[phase]:
424 425 ui.write(indent_string)
425 426 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 427
427 428
428 429 def _quasirepr(thing):
429 430 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 431 return b'{%s}' % (
431 432 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 433 )
433 434 return pycompat.bytestr(repr(thing))
434 435
435 436
436 437 def _debugbundle2(ui, gen, all=None, **opts):
437 438 """lists the contents of a bundle2"""
438 439 if not isinstance(gen, bundle2.unbundle20):
439 440 raise error.Abort(_(b'not a bundle2 file'))
440 441 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 442 parttypes = opts.get('part_type', [])
442 443 for part in gen.iterparts():
443 444 if parttypes and part.type not in parttypes:
444 445 continue
445 446 msg = b'%s -- %s (mandatory: %r)\n'
446 447 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 448 if part.type == b'changegroup':
448 449 version = part.params.get(b'version', b'01')
449 450 cg = changegroup.getunbundler(version, part, b'UN')
450 451 if not ui.quiet:
451 452 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 453 if part.type == b'obsmarkers':
453 454 if not ui.quiet:
454 455 _debugobsmarkers(ui, part, indent=4, **opts)
455 456 if part.type == b'phase-heads':
456 457 if not ui.quiet:
457 458 _debugphaseheads(ui, part, indent=4)
458 459
459 460
460 461 @command(
461 462 b'debugbundle',
462 463 [
463 464 (b'a', b'all', None, _(b'show all details')),
464 465 (b'', b'part-type', [], _(b'show only the named part type')),
465 466 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 467 ],
467 468 _(b'FILE'),
468 469 norepo=True,
469 470 )
470 471 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 472 """lists the contents of a bundle"""
472 473 with hg.openpath(ui, bundlepath) as f:
473 474 if spec:
474 475 spec = exchange.getbundlespec(ui, f)
475 476 ui.write(b'%s\n' % spec)
476 477 return
477 478
478 479 gen = exchange.readbundle(ui, f, bundlepath)
479 480 if isinstance(gen, bundle2.unbundle20):
480 481 return _debugbundle2(ui, gen, all=all, **opts)
481 482 _debugchangegroup(ui, gen, all=all, **opts)
482 483
483 484
484 485 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 486 def debugcapabilities(ui, path, **opts):
486 487 """lists the capabilities of a remote peer"""
487 488 opts = pycompat.byteskwargs(opts)
488 489 peer = hg.peer(ui, opts, path)
489 490 try:
490 491 caps = peer.capabilities()
491 492 ui.writenoi18n(b'Main capabilities:\n')
492 493 for c in sorted(caps):
493 494 ui.write(b' %s\n' % c)
494 495 b2caps = bundle2.bundle2caps(peer)
495 496 if b2caps:
496 497 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 498 for key, values in sorted(b2caps.items()):
498 499 ui.write(b' %s\n' % key)
499 500 for v in values:
500 501 ui.write(b' %s\n' % v)
501 502 finally:
502 503 peer.close()
503 504
504 505
505 506 @command(
506 507 b'debugchangedfiles',
507 508 [
508 509 (
509 510 b'',
510 511 b'compute',
511 512 False,
512 513 b"compute information instead of reading it from storage",
513 514 ),
514 515 ],
515 516 b'REV',
516 517 )
517 518 def debugchangedfiles(ui, repo, rev, **opts):
518 519 """list the stored files changes for a revision"""
519 520 ctx = logcmdutil.revsingle(repo, rev, None)
520 521 files = None
521 522
522 523 if opts['compute']:
523 524 files = metadata.compute_all_files_changes(ctx)
524 525 else:
525 526 sd = repo.changelog.sidedata(ctx.rev())
526 527 files_block = sd.get(sidedata.SD_FILES)
527 528 if files_block is not None:
528 529 files = metadata.decode_files_sidedata(sd)
529 530 if files is not None:
530 531 for f in sorted(files.touched):
531 532 if f in files.added:
532 533 action = b"added"
533 534 elif f in files.removed:
534 535 action = b"removed"
535 536 elif f in files.merged:
536 537 action = b"merged"
537 538 elif f in files.salvaged:
538 539 action = b"salvaged"
539 540 else:
540 541 action = b"touched"
541 542
542 543 copy_parent = b""
543 544 copy_source = b""
544 545 if f in files.copied_from_p1:
545 546 copy_parent = b"p1"
546 547 copy_source = files.copied_from_p1[f]
547 548 elif f in files.copied_from_p2:
548 549 copy_parent = b"p2"
549 550 copy_source = files.copied_from_p2[f]
550 551
551 552 data = (action, copy_parent, f, copy_source)
552 553 template = b"%-8s %2s: %s, %s;\n"
553 554 ui.write(template % data)
554 555
555 556
556 557 @command(b'debugcheckstate', [], b'')
557 558 def debugcheckstate(ui, repo):
558 559 """validate the correctness of the current dirstate"""
559 560 parent1, parent2 = repo.dirstate.parents()
560 561 m1 = repo[parent1].manifest()
561 562 m2 = repo[parent2].manifest()
562 563 errors = 0
563 564 for err in repo.dirstate.verify(m1, m2):
564 565 ui.warn(err[0] % err[1:])
565 566 errors += 1
566 567 if errors:
567 568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 569 raise error.Abort(errstr)
569 570
570 571
571 572 @command(
572 573 b'debugcolor',
573 574 [(b'', b'style', None, _(b'show all configured styles'))],
574 575 b'hg debugcolor',
575 576 )
576 577 def debugcolor(ui, repo, **opts):
577 578 """show available color, effects or style"""
578 579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 580 if opts.get('style'):
580 581 return _debugdisplaystyle(ui)
581 582 else:
582 583 return _debugdisplaycolor(ui)
583 584
584 585
585 586 def _debugdisplaycolor(ui):
586 587 ui = ui.copy()
587 588 ui._styles.clear()
588 589 for effect in color._activeeffects(ui).keys():
589 590 ui._styles[effect] = effect
590 591 if ui._terminfoparams:
591 592 for k, v in ui.configitems(b'color'):
592 593 if k.startswith(b'color.'):
593 594 ui._styles[k] = k[6:]
594 595 elif k.startswith(b'terminfo.'):
595 596 ui._styles[k] = k[9:]
596 597 ui.write(_(b'available colors:\n'))
597 598 # sort label with a '_' after the other to group '_background' entry.
598 599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 600 for colorname, label in items:
600 601 ui.write(b'%s\n' % colorname, label=label)
601 602
602 603
603 604 def _debugdisplaystyle(ui):
604 605 ui.write(_(b'available style:\n'))
605 606 if not ui._styles:
606 607 return
607 608 width = max(len(s) for s in ui._styles)
608 609 for label, effects in sorted(ui._styles.items()):
609 610 ui.write(b'%s' % label, label=label)
610 611 if effects:
611 612 # 50
612 613 ui.write(b': ')
613 614 ui.write(b' ' * (max(0, width - len(label))))
614 615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 616 ui.write(b'\n')
616 617
617 618
618 619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 620 def debugcreatestreamclonebundle(ui, repo, fname):
620 621 """create a stream clone bundle file
621 622
622 623 Stream bundles are special bundles that are essentially archives of
623 624 revlog files. They are commonly used for cloning very quickly.
624 625 """
625 626 # TODO we may want to turn this into an abort when this functionality
626 627 # is moved into `hg bundle`.
627 628 if phases.hassecret(repo):
628 629 ui.warn(
629 630 _(
630 631 b'(warning: stream clone bundle will contain secret '
631 632 b'revisions)\n'
632 633 )
633 634 )
634 635
635 636 requirements, gen = streamclone.generatebundlev1(repo)
636 637 changegroup.writechunks(ui, gen, fname)
637 638
638 639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 640
640 641
641 642 @command(
642 643 b'debugdag',
643 644 [
644 645 (b't', b'tags', None, _(b'use tags as labels')),
645 646 (b'b', b'branches', None, _(b'annotate with branch names')),
646 647 (b'', b'dots', None, _(b'use dots for runs')),
647 648 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 649 ],
649 650 _(b'[OPTION]... [FILE [REV]...]'),
650 651 optionalrepo=True,
651 652 )
652 653 def debugdag(ui, repo, file_=None, *revs, **opts):
653 654 """format the changelog or an index DAG as a concise textual description
654 655
655 656 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 657 revision numbers, they get labeled in the output as rN.
657 658
658 659 Otherwise, the changelog DAG of the current repo is emitted.
659 660 """
660 661 spaces = opts.get('spaces')
661 662 dots = opts.get('dots')
662 663 if file_:
663 664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 665 revs = {int(r) for r in revs}
665 666
666 667 def events():
667 668 for r in rlog:
668 669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 670 if r in revs:
670 671 yield b'l', (r, b"r%i" % r)
671 672
672 673 elif repo:
673 674 cl = repo.changelog
674 675 tags = opts.get('tags')
675 676 branches = opts.get('branches')
676 677 if tags:
677 678 labels = {}
678 679 for l, n in repo.tags().items():
679 680 labels.setdefault(cl.rev(n), []).append(l)
680 681
681 682 def events():
682 683 b = b"default"
683 684 for r in cl:
684 685 if branches:
685 686 newb = cl.read(cl.node(r))[5][b'branch']
686 687 if newb != b:
687 688 yield b'a', newb
688 689 b = newb
689 690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 691 if tags:
691 692 ls = labels.get(r)
692 693 if ls:
693 694 for l in ls:
694 695 yield b'l', (r, l)
695 696
696 697 else:
697 698 raise error.Abort(_(b'need repo for changelog dag'))
698 699
699 700 for line in dagparser.dagtextlines(
700 701 events(),
701 702 addspaces=spaces,
702 703 wraplabels=True,
703 704 wrapannotations=True,
704 705 wrapnonlinear=dots,
705 706 usedots=dots,
706 707 maxlinewidth=70,
707 708 ):
708 709 ui.write(line)
709 710 ui.write(b"\n")
710 711
711 712
712 713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 714 def debugdata(ui, repo, file_, rev=None, **opts):
714 715 """dump the contents of a data file revision"""
715 716 opts = pycompat.byteskwargs(opts)
716 717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 718 if rev is not None:
718 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 720 file_, rev = None, file_
720 721 elif rev is None:
721 722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 724 try:
724 725 ui.write(r.rawdata(r.lookup(rev)))
725 726 except KeyError:
726 727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 728
728 729
729 730 @command(
730 731 b'debugdate',
731 732 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 733 _(b'[-e] DATE [RANGE]'),
733 734 norepo=True,
734 735 optionalrepo=True,
735 736 )
736 737 def debugdate(ui, date, range=None, **opts):
737 738 """parse and display a date"""
738 739 if opts["extended"]:
739 740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 741 else:
741 742 d = dateutil.parsedate(date)
742 743 ui.writenoi18n(b"internal: %d %d\n" % d)
743 744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 745 if range:
745 746 m = dateutil.matchdate(range)
746 747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 748
748 749
749 750 @command(
750 751 b'debugdeltachain',
751 752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 753 _(b'-c|-m|FILE'),
753 754 optionalrepo=True,
754 755 )
755 756 def debugdeltachain(ui, repo, file_=None, **opts):
756 757 """dump information about delta chains in a revlog
757 758
758 759 Output can be templatized. Available template keywords are:
759 760
760 761 :``rev``: revision number
761 762 :``p1``: parent 1 revision number (for reference)
762 763 :``p2``: parent 2 revision number (for reference)
763 764 :``chainid``: delta chain identifier (numbered by unique base)
764 765 :``chainlen``: delta chain length to this revision
765 766 :``prevrev``: previous revision in delta chain
766 767 :``deltatype``: role of delta / how it was computed
767 768 - base: a full snapshot
768 769 - snap: an intermediate snapshot
769 770 - p1: a delta against the first parent
770 771 - p2: a delta against the second parent
771 772 - skip1: a delta against the same base as p1
772 773 (when p1 has empty delta
773 774 - skip2: a delta against the same base as p2
774 775 (when p2 has empty delta
775 776 - prev: a delta against the previous revision
776 777 - other: a delta against an arbitrary revision
777 778 :``compsize``: compressed size of revision
778 779 :``uncompsize``: uncompressed size of revision
779 780 :``chainsize``: total size of compressed revisions in chain
780 781 :``chainratio``: total chain size divided by uncompressed revision size
781 782 (new delta chains typically start at ratio 2.00)
782 783 :``lindist``: linear distance from base revision in delta chain to end
783 784 of this revision
784 785 :``extradist``: total size of revisions not part of this delta chain from
785 786 base of delta chain to end of this revision; a measurement
786 787 of how much extra data we need to read/seek across to read
787 788 the delta chain for this revision
788 789 :``extraratio``: extradist divided by chainsize; another representation of
789 790 how much unrelated data is needed to load this delta chain
790 791
791 792 If the repository is configured to use the sparse read, additional keywords
792 793 are available:
793 794
794 795 :``readsize``: total size of data read from the disk for a revision
795 796 (sum of the sizes of all the blocks)
796 797 :``largestblock``: size of the largest block of data read from the disk
797 798 :``readdensity``: density of useful bytes in the data read from the disk
798 799 :``srchunks``: in how many data hunks the whole revision would be read
799 800
800 801 The sparse read can be enabled with experimental.sparse-read = True
801 802 """
802 803 opts = pycompat.byteskwargs(opts)
803 804 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 805 index = r.index
805 806 start = r.start
806 807 length = r.length
807 808 generaldelta = r._generaldelta
808 809 withsparseread = getattr(r, '_withsparseread', False)
809 810
810 811 # security to avoid crash on corrupted revlogs
811 812 total_revs = len(index)
812 813
813 814 def revinfo(rev):
814 815 e = index[rev]
815 816 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 817 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 818 chainsize = 0
818 819
819 820 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 821 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 822 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 823
823 824 # If the parents of a revision has an empty delta, we never try to delta
824 825 # against that parent, but directly against the delta base of that
825 826 # parent (recursively). It avoids adding a useless entry in the chain.
826 827 #
827 828 # However we need to detect that as a special case for delta-type, that
828 829 # is not simply "other".
829 830 p1_base = p1
830 831 if p1 != nullrev and p1 < total_revs:
831 832 e1 = index[p1]
832 833 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 834 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 835 if (
835 836 new_base == p1_base
836 837 or new_base == nullrev
837 838 or new_base >= total_revs
838 839 ):
839 840 break
840 841 p1_base = new_base
841 842 e1 = index[p1_base]
842 843 p2_base = p2
843 844 if p2 != nullrev and p2 < total_revs:
844 845 e2 = index[p2]
845 846 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 847 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 848 if (
848 849 new_base == p2_base
849 850 or new_base == nullrev
850 851 or new_base >= total_revs
851 852 ):
852 853 break
853 854 p2_base = new_base
854 855 e2 = index[p2_base]
855 856
856 857 if generaldelta:
857 858 if base == p1:
858 859 deltatype = b'p1'
859 860 elif base == p2:
860 861 deltatype = b'p2'
861 862 elif base == rev:
862 863 deltatype = b'base'
863 864 elif base == p1_base:
864 865 deltatype = b'skip1'
865 866 elif base == p2_base:
866 867 deltatype = b'skip2'
867 868 elif r.issnapshot(rev):
868 869 deltatype = b'snap'
869 870 elif base == rev - 1:
870 871 deltatype = b'prev'
871 872 else:
872 873 deltatype = b'other'
873 874 else:
874 875 if base == rev:
875 876 deltatype = b'base'
876 877 else:
877 878 deltatype = b'prev'
878 879
879 880 chain = r._deltachain(rev)[0]
880 881 for iterrev in chain:
881 882 e = index[iterrev]
882 883 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 884
884 885 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 886
886 887 fm = ui.formatter(b'debugdeltachain', opts)
887 888
888 889 fm.plain(
889 890 b' rev p1 p2 chain# chainlen prev delta '
890 891 b'size rawsize chainsize ratio lindist extradist '
891 892 b'extraratio'
892 893 )
893 894 if withsparseread:
894 895 fm.plain(b' readsize largestblk rddensity srchunks')
895 896 fm.plain(b'\n')
896 897
897 898 chainbases = {}
898 899 for rev in r:
899 900 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 901 chainbase = chain[0]
901 902 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 903 basestart = start(chainbase)
903 904 revstart = start(rev)
904 905 lineardist = revstart + comp - basestart
905 906 extradist = lineardist - chainsize
906 907 try:
907 908 prevrev = chain[-2]
908 909 except IndexError:
909 910 prevrev = -1
910 911
911 912 if uncomp != 0:
912 913 chainratio = float(chainsize) / float(uncomp)
913 914 else:
914 915 chainratio = chainsize
915 916
916 917 if chainsize != 0:
917 918 extraratio = float(extradist) / float(chainsize)
918 919 else:
919 920 extraratio = extradist
920 921
921 922 fm.startitem()
922 923 fm.write(
923 924 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 925 b'uncompsize chainsize chainratio lindist extradist '
925 926 b'extraratio',
926 927 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 928 rev,
928 929 p1,
929 930 p2,
930 931 chainid,
931 932 len(chain),
932 933 prevrev,
933 934 deltatype,
934 935 comp,
935 936 uncomp,
936 937 chainsize,
937 938 chainratio,
938 939 lineardist,
939 940 extradist,
940 941 extraratio,
941 942 rev=rev,
942 943 chainid=chainid,
943 944 chainlen=len(chain),
944 945 prevrev=prevrev,
945 946 deltatype=deltatype,
946 947 compsize=comp,
947 948 uncompsize=uncomp,
948 949 chainsize=chainsize,
949 950 chainratio=chainratio,
950 951 lindist=lineardist,
951 952 extradist=extradist,
952 953 extraratio=extraratio,
953 954 )
954 955 if withsparseread:
955 956 readsize = 0
956 957 largestblock = 0
957 958 srchunks = 0
958 959
959 960 for revschunk in deltautil.slicechunk(r, chain):
960 961 srchunks += 1
961 962 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 963 blksize = blkend - start(revschunk[0])
963 964
964 965 readsize += blksize
965 966 if largestblock < blksize:
966 967 largestblock = blksize
967 968
968 969 if readsize:
969 970 readdensity = float(chainsize) / float(readsize)
970 971 else:
971 972 readdensity = 1
972 973
973 974 fm.write(
974 975 b'readsize largestblock readdensity srchunks',
975 976 b' %10d %10d %9.5f %8d',
976 977 readsize,
977 978 largestblock,
978 979 readdensity,
979 980 srchunks,
980 981 readsize=readsize,
981 982 largestblock=largestblock,
982 983 readdensity=readdensity,
983 984 srchunks=srchunks,
984 985 )
985 986
986 987 fm.plain(b'\n')
987 988
988 989 fm.end()
989 990
990 991
991 992 @command(
992 993 b'debug-delta-find',
993 cmdutil.debugrevlogopts + cmdutil.formatteropts,
994 cmdutil.debugrevlogopts
995 + cmdutil.formatteropts
996 + [
997 (
998 b'',
999 b'source',
1000 b'full',
1001 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1002 ),
1003 ],
994 1004 _(b'-c|-m|FILE REV'),
995 1005 optionalrepo=True,
996 1006 )
997 def debugdeltafind(ui, repo, arg_1, arg_2=None, **opts):
1007 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
998 1008 """display the computation to get to a valid delta for storing REV
999 1009
1000 1010 This command will replay the process used to find the "best" delta to store
1001 1011 a revision and display information about all the steps used to get to that
1002 1012 result.
1003 1013
1014 By default, the process is fed with a the full-text for the revision. This
1015 can be controlled with the --source flag.
1016
1004 1017 The revision use the revision number of the target storage (not changelog
1005 1018 revision number).
1006 1019
1007 1020 note: the process is initiated from a full text of the revision to store.
1008 1021 """
1009 1022 opts = pycompat.byteskwargs(opts)
1010 1023 if arg_2 is None:
1011 1024 file_ = None
1012 1025 rev = arg_1
1013 1026 else:
1014 1027 file_ = arg_1
1015 1028 rev = arg_2
1016 1029
1017 1030 rev = int(rev)
1018 1031
1019 1032 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1020 1033
1021 1034 deltacomputer = deltautil.deltacomputer(
1022 1035 revlog,
1023 1036 write_debug=ui.write,
1024 1037 debug_search=not ui.quiet,
1025 1038 )
1026 1039
1027 1040 node = revlog.node(rev)
1028 1041 p1r, p2r = revlog.parentrevs(rev)
1029 1042 p1 = revlog.node(p1r)
1030 1043 p2 = revlog.node(p2r)
1031 btext = [revlog.revision(rev)]
1044 full_text = revlog.revision(rev)
1045 btext = [full_text]
1032 1046 textlen = len(btext[0])
1033 1047 cachedelta = None
1034 1048 flags = revlog.flags(rev)
1035 1049
1050 if source != b'full':
1051 if source == b'storage':
1052 base_rev = revlog.deltaparent(rev)
1053 elif source == b'p1':
1054 base_rev = p1r
1055 elif source == b'p2':
1056 base_rev = p2r
1057 elif source == b'prev':
1058 base_rev = rev - 1
1059 else:
1060 raise error.InputError(b"invalid --source value: %s" % source)
1061
1062 if base_rev != nullrev:
1063 base_text = revlog.revision(base_rev)
1064 delta = mdiff.textdiff(base_text, full_text)
1065
1066 cachedelta = (base_rev, delta)
1067 btext = [None]
1068
1036 1069 revinfo = revlogutils.revisioninfo(
1037 1070 node,
1038 1071 p1,
1039 1072 p2,
1040 1073 btext,
1041 1074 textlen,
1042 1075 cachedelta,
1043 1076 flags,
1044 1077 )
1045 1078
1046 1079 fh = revlog._datafp()
1047 1080 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1048 1081
1049 1082
1050 1083 @command(
1051 1084 b'debugdirstate|debugstate',
1052 1085 [
1053 1086 (
1054 1087 b'',
1055 1088 b'nodates',
1056 1089 None,
1057 1090 _(b'do not display the saved mtime (DEPRECATED)'),
1058 1091 ),
1059 1092 (b'', b'dates', True, _(b'display the saved mtime')),
1060 1093 (b'', b'datesort', None, _(b'sort by saved mtime')),
1061 1094 (
1062 1095 b'',
1063 1096 b'docket',
1064 1097 False,
1065 1098 _(b'display the docket (metadata file) instead'),
1066 1099 ),
1067 1100 (
1068 1101 b'',
1069 1102 b'all',
1070 1103 False,
1071 1104 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1072 1105 ),
1073 1106 ],
1074 1107 _(b'[OPTION]...'),
1075 1108 )
1076 1109 def debugstate(ui, repo, **opts):
1077 1110 """show the contents of the current dirstate"""
1078 1111
1079 1112 if opts.get("docket"):
1080 1113 if not repo.dirstate._use_dirstate_v2:
1081 1114 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1082 1115
1083 1116 docket = repo.dirstate._map.docket
1084 1117 (
1085 1118 start_offset,
1086 1119 root_nodes,
1087 1120 nodes_with_entry,
1088 1121 nodes_with_copy,
1089 1122 unused_bytes,
1090 1123 _unused,
1091 1124 ignore_pattern,
1092 1125 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1093 1126
1094 1127 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1095 1128 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1096 1129 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1097 1130 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1098 1131 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1099 1132 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1100 1133 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1101 1134 ui.write(
1102 1135 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1103 1136 )
1104 1137 return
1105 1138
1106 1139 nodates = not opts['dates']
1107 1140 if opts.get('nodates') is not None:
1108 1141 nodates = True
1109 1142 datesort = opts.get('datesort')
1110 1143
1111 1144 if datesort:
1112 1145
1113 1146 def keyfunc(entry):
1114 1147 filename, _state, _mode, _size, mtime = entry
1115 1148 return (mtime, filename)
1116 1149
1117 1150 else:
1118 1151 keyfunc = None # sort by filename
1119 1152 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1120 1153 entries.sort(key=keyfunc)
1121 1154 for entry in entries:
1122 1155 filename, state, mode, size, mtime = entry
1123 1156 if mtime == -1:
1124 1157 timestr = b'unset '
1125 1158 elif nodates:
1126 1159 timestr = b'set '
1127 1160 else:
1128 1161 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1129 1162 timestr = encoding.strtolocal(timestr)
1130 1163 if mode & 0o20000:
1131 1164 mode = b'lnk'
1132 1165 else:
1133 1166 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1134 1167 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1135 1168 for f in repo.dirstate.copies():
1136 1169 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1137 1170
1138 1171
1139 1172 @command(
1140 1173 b'debugdirstateignorepatternshash',
1141 1174 [],
1142 1175 _(b''),
1143 1176 )
1144 1177 def debugdirstateignorepatternshash(ui, repo, **opts):
1145 1178 """show the hash of ignore patterns stored in dirstate if v2,
1146 1179 or nothing for dirstate-v2
1147 1180 """
1148 1181 if repo.dirstate._use_dirstate_v2:
1149 1182 docket = repo.dirstate._map.docket
1150 1183 hash_len = 20 # 160 bits for SHA-1
1151 1184 hash_bytes = docket.tree_metadata[-hash_len:]
1152 1185 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1153 1186
1154 1187
1155 1188 @command(
1156 1189 b'debugdiscovery',
1157 1190 [
1158 1191 (b'', b'old', None, _(b'use old-style discovery')),
1159 1192 (
1160 1193 b'',
1161 1194 b'nonheads',
1162 1195 None,
1163 1196 _(b'use old-style discovery with non-heads included'),
1164 1197 ),
1165 1198 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1166 1199 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1167 1200 (
1168 1201 b'',
1169 1202 b'local-as-revs',
1170 1203 b"",
1171 1204 b'treat local has having these revisions only',
1172 1205 ),
1173 1206 (
1174 1207 b'',
1175 1208 b'remote-as-revs',
1176 1209 b"",
1177 1210 b'use local as remote, with only these revisions',
1178 1211 ),
1179 1212 ]
1180 1213 + cmdutil.remoteopts
1181 1214 + cmdutil.formatteropts,
1182 1215 _(b'[--rev REV] [OTHER]'),
1183 1216 )
1184 1217 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1185 1218 """runs the changeset discovery protocol in isolation
1186 1219
1187 1220 The local peer can be "replaced" by a subset of the local repository by
1188 1221 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1189 1222 can be "replaced" by a subset of the local repository using the
1190 1223 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1191 1224 discovery situations.
1192 1225
1193 1226 The following developer oriented config are relevant for people playing with this command:
1194 1227
1195 1228 * devel.discovery.exchange-heads=True
1196 1229
1197 1230 If False, the discovery will not start with
1198 1231 remote head fetching and local head querying.
1199 1232
1200 1233 * devel.discovery.grow-sample=True
1201 1234
1202 1235 If False, the sample size used in set discovery will not be increased
1203 1236 through the process
1204 1237
1205 1238 * devel.discovery.grow-sample.dynamic=True
1206 1239
1207 1240 When discovery.grow-sample.dynamic is True, the default, the sample size is
1208 1241 adapted to the shape of the undecided set (it is set to the max of:
1209 1242 <target-size>, len(roots(undecided)), len(heads(undecided)
1210 1243
1211 1244 * devel.discovery.grow-sample.rate=1.05
1212 1245
1213 1246 the rate at which the sample grow
1214 1247
1215 1248 * devel.discovery.randomize=True
1216 1249
1217 1250 If andom sampling during discovery are deterministic. It is meant for
1218 1251 integration tests.
1219 1252
1220 1253 * devel.discovery.sample-size=200
1221 1254
1222 1255 Control the initial size of the discovery sample
1223 1256
1224 1257 * devel.discovery.sample-size.initial=100
1225 1258
1226 1259 Control the initial size of the discovery for initial change
1227 1260 """
1228 1261 opts = pycompat.byteskwargs(opts)
1229 1262 unfi = repo.unfiltered()
1230 1263
1231 1264 # setup potential extra filtering
1232 1265 local_revs = opts[b"local_as_revs"]
1233 1266 remote_revs = opts[b"remote_as_revs"]
1234 1267
1235 1268 # make sure tests are repeatable
1236 1269 random.seed(int(opts[b'seed']))
1237 1270
1238 1271 if not remote_revs:
1239 1272
1240 1273 remoteurl, branches = urlutil.get_unique_pull_path(
1241 1274 b'debugdiscovery', repo, ui, remoteurl
1242 1275 )
1243 1276 remote = hg.peer(repo, opts, remoteurl)
1244 1277 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1245 1278 else:
1246 1279 branches = (None, [])
1247 1280 remote_filtered_revs = logcmdutil.revrange(
1248 1281 unfi, [b"not (::(%s))" % remote_revs]
1249 1282 )
1250 1283 remote_filtered_revs = frozenset(remote_filtered_revs)
1251 1284
1252 1285 def remote_func(x):
1253 1286 return remote_filtered_revs
1254 1287
1255 1288 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1256 1289
1257 1290 remote = repo.peer()
1258 1291 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1259 1292
1260 1293 if local_revs:
1261 1294 local_filtered_revs = logcmdutil.revrange(
1262 1295 unfi, [b"not (::(%s))" % local_revs]
1263 1296 )
1264 1297 local_filtered_revs = frozenset(local_filtered_revs)
1265 1298
1266 1299 def local_func(x):
1267 1300 return local_filtered_revs
1268 1301
1269 1302 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1270 1303 repo = repo.filtered(b'debug-discovery-local-filter')
1271 1304
1272 1305 data = {}
1273 1306 if opts.get(b'old'):
1274 1307
1275 1308 def doit(pushedrevs, remoteheads, remote=remote):
1276 1309 if not util.safehasattr(remote, b'branches'):
1277 1310 # enable in-client legacy support
1278 1311 remote = localrepo.locallegacypeer(remote.local())
1279 1312 if remote_revs:
1280 1313 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1281 1314 remote._repo = r
1282 1315 common, _in, hds = treediscovery.findcommonincoming(
1283 1316 repo, remote, force=True, audit=data
1284 1317 )
1285 1318 common = set(common)
1286 1319 if not opts.get(b'nonheads'):
1287 1320 ui.writenoi18n(
1288 1321 b"unpruned common: %s\n"
1289 1322 % b" ".join(sorted(short(n) for n in common))
1290 1323 )
1291 1324
1292 1325 clnode = repo.changelog.node
1293 1326 common = repo.revs(b'heads(::%ln)', common)
1294 1327 common = {clnode(r) for r in common}
1295 1328 return common, hds
1296 1329
1297 1330 else:
1298 1331
1299 1332 def doit(pushedrevs, remoteheads, remote=remote):
1300 1333 nodes = None
1301 1334 if pushedrevs:
1302 1335 revs = logcmdutil.revrange(repo, pushedrevs)
1303 1336 nodes = [repo[r].node() for r in revs]
1304 1337 common, any, hds = setdiscovery.findcommonheads(
1305 1338 ui,
1306 1339 repo,
1307 1340 remote,
1308 1341 ancestorsof=nodes,
1309 1342 audit=data,
1310 1343 abortwhenunrelated=False,
1311 1344 )
1312 1345 return common, hds
1313 1346
1314 1347 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1315 1348 localrevs = opts[b'rev']
1316 1349
1317 1350 fm = ui.formatter(b'debugdiscovery', opts)
1318 1351 if fm.strict_format:
1319 1352
1320 1353 @contextlib.contextmanager
1321 1354 def may_capture_output():
1322 1355 ui.pushbuffer()
1323 1356 yield
1324 1357 data[b'output'] = ui.popbuffer()
1325 1358
1326 1359 else:
1327 1360 may_capture_output = util.nullcontextmanager
1328 1361 with may_capture_output():
1329 1362 with util.timedcm('debug-discovery') as t:
1330 1363 common, hds = doit(localrevs, remoterevs)
1331 1364
1332 1365 # compute all statistics
1333 1366 if len(common) == 1 and repo.nullid in common:
1334 1367 common = set()
1335 1368 heads_common = set(common)
1336 1369 heads_remote = set(hds)
1337 1370 heads_local = set(repo.heads())
1338 1371 # note: they cannot be a local or remote head that is in common and not
1339 1372 # itself a head of common.
1340 1373 heads_common_local = heads_common & heads_local
1341 1374 heads_common_remote = heads_common & heads_remote
1342 1375 heads_common_both = heads_common & heads_remote & heads_local
1343 1376
1344 1377 all = repo.revs(b'all()')
1345 1378 common = repo.revs(b'::%ln', common)
1346 1379 roots_common = repo.revs(b'roots(::%ld)', common)
1347 1380 missing = repo.revs(b'not ::%ld', common)
1348 1381 heads_missing = repo.revs(b'heads(%ld)', missing)
1349 1382 roots_missing = repo.revs(b'roots(%ld)', missing)
1350 1383 assert len(common) + len(missing) == len(all)
1351 1384
1352 1385 initial_undecided = repo.revs(
1353 1386 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1354 1387 )
1355 1388 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1356 1389 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1357 1390 common_initial_undecided = initial_undecided & common
1358 1391 missing_initial_undecided = initial_undecided & missing
1359 1392
1360 1393 data[b'elapsed'] = t.elapsed
1361 1394 data[b'nb-common-heads'] = len(heads_common)
1362 1395 data[b'nb-common-heads-local'] = len(heads_common_local)
1363 1396 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1364 1397 data[b'nb-common-heads-both'] = len(heads_common_both)
1365 1398 data[b'nb-common-roots'] = len(roots_common)
1366 1399 data[b'nb-head-local'] = len(heads_local)
1367 1400 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1368 1401 data[b'nb-head-remote'] = len(heads_remote)
1369 1402 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1370 1403 heads_common_remote
1371 1404 )
1372 1405 data[b'nb-revs'] = len(all)
1373 1406 data[b'nb-revs-common'] = len(common)
1374 1407 data[b'nb-revs-missing'] = len(missing)
1375 1408 data[b'nb-missing-heads'] = len(heads_missing)
1376 1409 data[b'nb-missing-roots'] = len(roots_missing)
1377 1410 data[b'nb-ini_und'] = len(initial_undecided)
1378 1411 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1379 1412 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1380 1413 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1381 1414 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1382 1415
1383 1416 fm.startitem()
1384 1417 fm.data(**pycompat.strkwargs(data))
1385 1418 # display discovery summary
1386 1419 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1387 1420 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1388 1421 if b'total-round-trips-heads' in data:
1389 1422 fm.plain(
1390 1423 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1391 1424 )
1392 1425 if b'total-round-trips-branches' in data:
1393 1426 fm.plain(
1394 1427 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1395 1428 % data
1396 1429 )
1397 1430 if b'total-round-trips-between' in data:
1398 1431 fm.plain(
1399 1432 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1400 1433 )
1401 1434 fm.plain(b"queries: %(total-queries)9d\n" % data)
1402 1435 if b'total-queries-branches' in data:
1403 1436 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1404 1437 if b'total-queries-between' in data:
1405 1438 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1406 1439 fm.plain(b"heads summary:\n")
1407 1440 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1408 1441 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1409 1442 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1410 1443 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1411 1444 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1412 1445 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1413 1446 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1414 1447 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1415 1448 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1416 1449 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1417 1450 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1418 1451 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1419 1452 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1420 1453 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1421 1454 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1422 1455 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1423 1456 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1424 1457 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1425 1458 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1426 1459 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1427 1460 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1428 1461 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1429 1462
1430 1463 if ui.verbose:
1431 1464 fm.plain(
1432 1465 b"common heads: %s\n"
1433 1466 % b" ".join(sorted(short(n) for n in heads_common))
1434 1467 )
1435 1468 fm.end()
1436 1469
1437 1470
1438 1471 _chunksize = 4 << 10
1439 1472
1440 1473
1441 1474 @command(
1442 1475 b'debugdownload',
1443 1476 [
1444 1477 (b'o', b'output', b'', _(b'path')),
1445 1478 ],
1446 1479 optionalrepo=True,
1447 1480 )
1448 1481 def debugdownload(ui, repo, url, output=None, **opts):
1449 1482 """download a resource using Mercurial logic and config"""
1450 1483 fh = urlmod.open(ui, url, output)
1451 1484
1452 1485 dest = ui
1453 1486 if output:
1454 1487 dest = open(output, b"wb", _chunksize)
1455 1488 try:
1456 1489 data = fh.read(_chunksize)
1457 1490 while data:
1458 1491 dest.write(data)
1459 1492 data = fh.read(_chunksize)
1460 1493 finally:
1461 1494 if output:
1462 1495 dest.close()
1463 1496
1464 1497
1465 1498 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1466 1499 def debugextensions(ui, repo, **opts):
1467 1500 '''show information about active extensions'''
1468 1501 opts = pycompat.byteskwargs(opts)
1469 1502 exts = extensions.extensions(ui)
1470 1503 hgver = util.version()
1471 1504 fm = ui.formatter(b'debugextensions', opts)
1472 1505 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1473 1506 isinternal = extensions.ismoduleinternal(extmod)
1474 1507 extsource = None
1475 1508
1476 1509 if util.safehasattr(extmod, '__file__'):
1477 1510 extsource = pycompat.fsencode(extmod.__file__)
1478 1511 elif getattr(sys, 'oxidized', False):
1479 1512 extsource = pycompat.sysexecutable
1480 1513 if isinternal:
1481 1514 exttestedwith = [] # never expose magic string to users
1482 1515 else:
1483 1516 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1484 1517 extbuglink = getattr(extmod, 'buglink', None)
1485 1518
1486 1519 fm.startitem()
1487 1520
1488 1521 if ui.quiet or ui.verbose:
1489 1522 fm.write(b'name', b'%s\n', extname)
1490 1523 else:
1491 1524 fm.write(b'name', b'%s', extname)
1492 1525 if isinternal or hgver in exttestedwith:
1493 1526 fm.plain(b'\n')
1494 1527 elif not exttestedwith:
1495 1528 fm.plain(_(b' (untested!)\n'))
1496 1529 else:
1497 1530 lasttestedversion = exttestedwith[-1]
1498 1531 fm.plain(b' (%s!)\n' % lasttestedversion)
1499 1532
1500 1533 fm.condwrite(
1501 1534 ui.verbose and extsource,
1502 1535 b'source',
1503 1536 _(b' location: %s\n'),
1504 1537 extsource or b"",
1505 1538 )
1506 1539
1507 1540 if ui.verbose:
1508 1541 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1509 1542 fm.data(bundled=isinternal)
1510 1543
1511 1544 fm.condwrite(
1512 1545 ui.verbose and exttestedwith,
1513 1546 b'testedwith',
1514 1547 _(b' tested with: %s\n'),
1515 1548 fm.formatlist(exttestedwith, name=b'ver'),
1516 1549 )
1517 1550
1518 1551 fm.condwrite(
1519 1552 ui.verbose and extbuglink,
1520 1553 b'buglink',
1521 1554 _(b' bug reporting: %s\n'),
1522 1555 extbuglink or b"",
1523 1556 )
1524 1557
1525 1558 fm.end()
1526 1559
1527 1560
1528 1561 @command(
1529 1562 b'debugfileset',
1530 1563 [
1531 1564 (
1532 1565 b'r',
1533 1566 b'rev',
1534 1567 b'',
1535 1568 _(b'apply the filespec on this revision'),
1536 1569 _(b'REV'),
1537 1570 ),
1538 1571 (
1539 1572 b'',
1540 1573 b'all-files',
1541 1574 False,
1542 1575 _(b'test files from all revisions and working directory'),
1543 1576 ),
1544 1577 (
1545 1578 b's',
1546 1579 b'show-matcher',
1547 1580 None,
1548 1581 _(b'print internal representation of matcher'),
1549 1582 ),
1550 1583 (
1551 1584 b'p',
1552 1585 b'show-stage',
1553 1586 [],
1554 1587 _(b'print parsed tree at the given stage'),
1555 1588 _(b'NAME'),
1556 1589 ),
1557 1590 ],
1558 1591 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1559 1592 )
1560 1593 def debugfileset(ui, repo, expr, **opts):
1561 1594 '''parse and apply a fileset specification'''
1562 1595 from . import fileset
1563 1596
1564 1597 fileset.symbols # force import of fileset so we have predicates to optimize
1565 1598 opts = pycompat.byteskwargs(opts)
1566 1599 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1567 1600
1568 1601 stages = [
1569 1602 (b'parsed', pycompat.identity),
1570 1603 (b'analyzed', filesetlang.analyze),
1571 1604 (b'optimized', filesetlang.optimize),
1572 1605 ]
1573 1606 stagenames = {n for n, f in stages}
1574 1607
1575 1608 showalways = set()
1576 1609 if ui.verbose and not opts[b'show_stage']:
1577 1610 # show parsed tree by --verbose (deprecated)
1578 1611 showalways.add(b'parsed')
1579 1612 if opts[b'show_stage'] == [b'all']:
1580 1613 showalways.update(stagenames)
1581 1614 else:
1582 1615 for n in opts[b'show_stage']:
1583 1616 if n not in stagenames:
1584 1617 raise error.Abort(_(b'invalid stage name: %s') % n)
1585 1618 showalways.update(opts[b'show_stage'])
1586 1619
1587 1620 tree = filesetlang.parse(expr)
1588 1621 for n, f in stages:
1589 1622 tree = f(tree)
1590 1623 if n in showalways:
1591 1624 if opts[b'show_stage'] or n != b'parsed':
1592 1625 ui.write(b"* %s:\n" % n)
1593 1626 ui.write(filesetlang.prettyformat(tree), b"\n")
1594 1627
1595 1628 files = set()
1596 1629 if opts[b'all_files']:
1597 1630 for r in repo:
1598 1631 c = repo[r]
1599 1632 files.update(c.files())
1600 1633 files.update(c.substate)
1601 1634 if opts[b'all_files'] or ctx.rev() is None:
1602 1635 wctx = repo[None]
1603 1636 files.update(
1604 1637 repo.dirstate.walk(
1605 1638 scmutil.matchall(repo),
1606 1639 subrepos=list(wctx.substate),
1607 1640 unknown=True,
1608 1641 ignored=True,
1609 1642 )
1610 1643 )
1611 1644 files.update(wctx.substate)
1612 1645 else:
1613 1646 files.update(ctx.files())
1614 1647 files.update(ctx.substate)
1615 1648
1616 1649 m = ctx.matchfileset(repo.getcwd(), expr)
1617 1650 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1618 1651 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1619 1652 for f in sorted(files):
1620 1653 if not m(f):
1621 1654 continue
1622 1655 ui.write(b"%s\n" % f)
1623 1656
1624 1657
1625 1658 @command(
1626 1659 b"debug-repair-issue6528",
1627 1660 [
1628 1661 (
1629 1662 b'',
1630 1663 b'to-report',
1631 1664 b'',
1632 1665 _(b'build a report of affected revisions to this file'),
1633 1666 _(b'FILE'),
1634 1667 ),
1635 1668 (
1636 1669 b'',
1637 1670 b'from-report',
1638 1671 b'',
1639 1672 _(b'repair revisions listed in this report file'),
1640 1673 _(b'FILE'),
1641 1674 ),
1642 1675 (
1643 1676 b'',
1644 1677 b'paranoid',
1645 1678 False,
1646 1679 _(b'check that both detection methods do the same thing'),
1647 1680 ),
1648 1681 ]
1649 1682 + cmdutil.dryrunopts,
1650 1683 )
1651 1684 def debug_repair_issue6528(ui, repo, **opts):
1652 1685 """find affected revisions and repair them. See issue6528 for more details.
1653 1686
1654 1687 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1655 1688 computation of affected revisions for a given repository across clones.
1656 1689 The report format is line-based (with empty lines ignored):
1657 1690
1658 1691 ```
1659 1692 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1660 1693 ```
1661 1694
1662 1695 There can be multiple broken revisions per filelog, they are separated by
1663 1696 a comma with no spaces. The only space is between the revision(s) and the
1664 1697 filename.
1665 1698
1666 1699 Note that this does *not* mean that this repairs future affected revisions,
1667 1700 that needs a separate fix at the exchange level that was introduced in
1668 1701 Mercurial 5.9.1.
1669 1702
1670 1703 There is a `--paranoid` flag to test that the fast implementation is correct
1671 1704 by checking it against the slow implementation. Since this matter is quite
1672 1705 urgent and testing every edge-case is probably quite costly, we use this
1673 1706 method to test on large repositories as a fuzzing method of sorts.
1674 1707 """
1675 1708 cmdutil.check_incompatible_arguments(
1676 1709 opts, 'to_report', ['from_report', 'dry_run']
1677 1710 )
1678 1711 dry_run = opts.get('dry_run')
1679 1712 to_report = opts.get('to_report')
1680 1713 from_report = opts.get('from_report')
1681 1714 paranoid = opts.get('paranoid')
1682 1715 # TODO maybe add filelog pattern and revision pattern parameters to help
1683 1716 # narrow down the search for users that know what they're looking for?
1684 1717
1685 1718 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1686 1719 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1687 1720 raise error.Abort(_(msg))
1688 1721
1689 1722 rewrite.repair_issue6528(
1690 1723 ui,
1691 1724 repo,
1692 1725 dry_run=dry_run,
1693 1726 to_report=to_report,
1694 1727 from_report=from_report,
1695 1728 paranoid=paranoid,
1696 1729 )
1697 1730
1698 1731
1699 1732 @command(b'debugformat', [] + cmdutil.formatteropts)
1700 1733 def debugformat(ui, repo, **opts):
1701 1734 """display format information about the current repository
1702 1735
1703 1736 Use --verbose to get extra information about current config value and
1704 1737 Mercurial default."""
1705 1738 opts = pycompat.byteskwargs(opts)
1706 1739 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1707 1740 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1708 1741
1709 1742 def makeformatname(name):
1710 1743 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1711 1744
1712 1745 fm = ui.formatter(b'debugformat', opts)
1713 1746 if fm.isplain():
1714 1747
1715 1748 def formatvalue(value):
1716 1749 if util.safehasattr(value, b'startswith'):
1717 1750 return value
1718 1751 if value:
1719 1752 return b'yes'
1720 1753 else:
1721 1754 return b'no'
1722 1755
1723 1756 else:
1724 1757 formatvalue = pycompat.identity
1725 1758
1726 1759 fm.plain(b'format-variant')
1727 1760 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1728 1761 fm.plain(b' repo')
1729 1762 if ui.verbose:
1730 1763 fm.plain(b' config default')
1731 1764 fm.plain(b'\n')
1732 1765 for fv in upgrade.allformatvariant:
1733 1766 fm.startitem()
1734 1767 repovalue = fv.fromrepo(repo)
1735 1768 configvalue = fv.fromconfig(repo)
1736 1769
1737 1770 if repovalue != configvalue:
1738 1771 namelabel = b'formatvariant.name.mismatchconfig'
1739 1772 repolabel = b'formatvariant.repo.mismatchconfig'
1740 1773 elif repovalue != fv.default:
1741 1774 namelabel = b'formatvariant.name.mismatchdefault'
1742 1775 repolabel = b'formatvariant.repo.mismatchdefault'
1743 1776 else:
1744 1777 namelabel = b'formatvariant.name.uptodate'
1745 1778 repolabel = b'formatvariant.repo.uptodate'
1746 1779
1747 1780 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1748 1781 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1749 1782 if fv.default != configvalue:
1750 1783 configlabel = b'formatvariant.config.special'
1751 1784 else:
1752 1785 configlabel = b'formatvariant.config.default'
1753 1786 fm.condwrite(
1754 1787 ui.verbose,
1755 1788 b'config',
1756 1789 b' %6s',
1757 1790 formatvalue(configvalue),
1758 1791 label=configlabel,
1759 1792 )
1760 1793 fm.condwrite(
1761 1794 ui.verbose,
1762 1795 b'default',
1763 1796 b' %7s',
1764 1797 formatvalue(fv.default),
1765 1798 label=b'formatvariant.default',
1766 1799 )
1767 1800 fm.plain(b'\n')
1768 1801 fm.end()
1769 1802
1770 1803
1771 1804 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1772 1805 def debugfsinfo(ui, path=b"."):
1773 1806 """show information detected about current filesystem"""
1774 1807 ui.writenoi18n(b'path: %s\n' % path)
1775 1808 ui.writenoi18n(
1776 1809 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1777 1810 )
1778 1811 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1779 1812 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1780 1813 ui.writenoi18n(
1781 1814 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1782 1815 )
1783 1816 ui.writenoi18n(
1784 1817 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1785 1818 )
1786 1819 casesensitive = b'(unknown)'
1787 1820 try:
1788 1821 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1789 1822 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1790 1823 except OSError:
1791 1824 pass
1792 1825 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1793 1826
1794 1827
1795 1828 @command(
1796 1829 b'debuggetbundle',
1797 1830 [
1798 1831 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1799 1832 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1800 1833 (
1801 1834 b't',
1802 1835 b'type',
1803 1836 b'bzip2',
1804 1837 _(b'bundle compression type to use'),
1805 1838 _(b'TYPE'),
1806 1839 ),
1807 1840 ],
1808 1841 _(b'REPO FILE [-H|-C ID]...'),
1809 1842 norepo=True,
1810 1843 )
1811 1844 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1812 1845 """retrieves a bundle from a repo
1813 1846
1814 1847 Every ID must be a full-length hex node id string. Saves the bundle to the
1815 1848 given file.
1816 1849 """
1817 1850 opts = pycompat.byteskwargs(opts)
1818 1851 repo = hg.peer(ui, opts, repopath)
1819 1852 if not repo.capable(b'getbundle'):
1820 1853 raise error.Abort(b"getbundle() not supported by target repository")
1821 1854 args = {}
1822 1855 if common:
1823 1856 args['common'] = [bin(s) for s in common]
1824 1857 if head:
1825 1858 args['heads'] = [bin(s) for s in head]
1826 1859 # TODO: get desired bundlecaps from command line.
1827 1860 args['bundlecaps'] = None
1828 1861 bundle = repo.getbundle(b'debug', **args)
1829 1862
1830 1863 bundletype = opts.get(b'type', b'bzip2').lower()
1831 1864 btypes = {
1832 1865 b'none': b'HG10UN',
1833 1866 b'bzip2': b'HG10BZ',
1834 1867 b'gzip': b'HG10GZ',
1835 1868 b'bundle2': b'HG20',
1836 1869 }
1837 1870 bundletype = btypes.get(bundletype)
1838 1871 if bundletype not in bundle2.bundletypes:
1839 1872 raise error.Abort(_(b'unknown bundle type specified with --type'))
1840 1873 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1841 1874
1842 1875
1843 1876 @command(b'debugignore', [], b'[FILE]')
1844 1877 def debugignore(ui, repo, *files, **opts):
1845 1878 """display the combined ignore pattern and information about ignored files
1846 1879
1847 1880 With no argument display the combined ignore pattern.
1848 1881
1849 1882 Given space separated file names, shows if the given file is ignored and
1850 1883 if so, show the ignore rule (file and line number) that matched it.
1851 1884 """
1852 1885 ignore = repo.dirstate._ignore
1853 1886 if not files:
1854 1887 # Show all the patterns
1855 1888 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1856 1889 else:
1857 1890 m = scmutil.match(repo[None], pats=files)
1858 1891 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1859 1892 for f in m.files():
1860 1893 nf = util.normpath(f)
1861 1894 ignored = None
1862 1895 ignoredata = None
1863 1896 if nf != b'.':
1864 1897 if ignore(nf):
1865 1898 ignored = nf
1866 1899 ignoredata = repo.dirstate._ignorefileandline(nf)
1867 1900 else:
1868 1901 for p in pathutil.finddirs(nf):
1869 1902 if ignore(p):
1870 1903 ignored = p
1871 1904 ignoredata = repo.dirstate._ignorefileandline(p)
1872 1905 break
1873 1906 if ignored:
1874 1907 if ignored == nf:
1875 1908 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1876 1909 else:
1877 1910 ui.write(
1878 1911 _(
1879 1912 b"%s is ignored because of "
1880 1913 b"containing directory %s\n"
1881 1914 )
1882 1915 % (uipathfn(f), ignored)
1883 1916 )
1884 1917 ignorefile, lineno, line = ignoredata
1885 1918 ui.write(
1886 1919 _(b"(ignore rule in %s, line %d: '%s')\n")
1887 1920 % (ignorefile, lineno, line)
1888 1921 )
1889 1922 else:
1890 1923 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1891 1924
1892 1925
1893 1926 @command(
1894 1927 b'debug-revlog-index|debugindex',
1895 1928 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1896 1929 _(b'-c|-m|FILE'),
1897 1930 )
1898 1931 def debugindex(ui, repo, file_=None, **opts):
1899 1932 """dump index data for a revlog"""
1900 1933 opts = pycompat.byteskwargs(opts)
1901 1934 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1902 1935
1903 1936 fm = ui.formatter(b'debugindex', opts)
1904 1937
1905 1938 revlog = getattr(store, b'_revlog', store)
1906 1939
1907 1940 return revlog_debug.debug_index(
1908 1941 ui,
1909 1942 repo,
1910 1943 formatter=fm,
1911 1944 revlog=revlog,
1912 1945 full_node=ui.debugflag,
1913 1946 )
1914 1947
1915 1948
1916 1949 @command(
1917 1950 b'debugindexdot',
1918 1951 cmdutil.debugrevlogopts,
1919 1952 _(b'-c|-m|FILE'),
1920 1953 optionalrepo=True,
1921 1954 )
1922 1955 def debugindexdot(ui, repo, file_=None, **opts):
1923 1956 """dump an index DAG as a graphviz dot file"""
1924 1957 opts = pycompat.byteskwargs(opts)
1925 1958 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1926 1959 ui.writenoi18n(b"digraph G {\n")
1927 1960 for i in r:
1928 1961 node = r.node(i)
1929 1962 pp = r.parents(node)
1930 1963 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1931 1964 if pp[1] != repo.nullid:
1932 1965 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1933 1966 ui.write(b"}\n")
1934 1967
1935 1968
1936 1969 @command(b'debugindexstats', [])
1937 1970 def debugindexstats(ui, repo):
1938 1971 """show stats related to the changelog index"""
1939 1972 repo.changelog.shortest(repo.nullid, 1)
1940 1973 index = repo.changelog.index
1941 1974 if not util.safehasattr(index, b'stats'):
1942 1975 raise error.Abort(_(b'debugindexstats only works with native code'))
1943 1976 for k, v in sorted(index.stats().items()):
1944 1977 ui.write(b'%s: %d\n' % (k, v))
1945 1978
1946 1979
1947 1980 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1948 1981 def debuginstall(ui, **opts):
1949 1982 """test Mercurial installation
1950 1983
1951 1984 Returns 0 on success.
1952 1985 """
1953 1986 opts = pycompat.byteskwargs(opts)
1954 1987
1955 1988 problems = 0
1956 1989
1957 1990 fm = ui.formatter(b'debuginstall', opts)
1958 1991 fm.startitem()
1959 1992
1960 1993 # encoding might be unknown or wrong. don't translate these messages.
1961 1994 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1962 1995 err = None
1963 1996 try:
1964 1997 codecs.lookup(pycompat.sysstr(encoding.encoding))
1965 1998 except LookupError as inst:
1966 1999 err = stringutil.forcebytestr(inst)
1967 2000 problems += 1
1968 2001 fm.condwrite(
1969 2002 err,
1970 2003 b'encodingerror',
1971 2004 b" %s\n (check that your locale is properly set)\n",
1972 2005 err,
1973 2006 )
1974 2007
1975 2008 # Python
1976 2009 pythonlib = None
1977 2010 if util.safehasattr(os, '__file__'):
1978 2011 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1979 2012 elif getattr(sys, 'oxidized', False):
1980 2013 pythonlib = pycompat.sysexecutable
1981 2014
1982 2015 fm.write(
1983 2016 b'pythonexe',
1984 2017 _(b"checking Python executable (%s)\n"),
1985 2018 pycompat.sysexecutable or _(b"unknown"),
1986 2019 )
1987 2020 fm.write(
1988 2021 b'pythonimplementation',
1989 2022 _(b"checking Python implementation (%s)\n"),
1990 2023 pycompat.sysbytes(platform.python_implementation()),
1991 2024 )
1992 2025 fm.write(
1993 2026 b'pythonver',
1994 2027 _(b"checking Python version (%s)\n"),
1995 2028 (b"%d.%d.%d" % sys.version_info[:3]),
1996 2029 )
1997 2030 fm.write(
1998 2031 b'pythonlib',
1999 2032 _(b"checking Python lib (%s)...\n"),
2000 2033 pythonlib or _(b"unknown"),
2001 2034 )
2002 2035
2003 2036 try:
2004 2037 from . import rustext # pytype: disable=import-error
2005 2038
2006 2039 rustext.__doc__ # trigger lazy import
2007 2040 except ImportError:
2008 2041 rustext = None
2009 2042
2010 2043 security = set(sslutil.supportedprotocols)
2011 2044 if sslutil.hassni:
2012 2045 security.add(b'sni')
2013 2046
2014 2047 fm.write(
2015 2048 b'pythonsecurity',
2016 2049 _(b"checking Python security support (%s)\n"),
2017 2050 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2018 2051 )
2019 2052
2020 2053 # These are warnings, not errors. So don't increment problem count. This
2021 2054 # may change in the future.
2022 2055 if b'tls1.2' not in security:
2023 2056 fm.plain(
2024 2057 _(
2025 2058 b' TLS 1.2 not supported by Python install; '
2026 2059 b'network connections lack modern security\n'
2027 2060 )
2028 2061 )
2029 2062 if b'sni' not in security:
2030 2063 fm.plain(
2031 2064 _(
2032 2065 b' SNI not supported by Python install; may have '
2033 2066 b'connectivity issues with some servers\n'
2034 2067 )
2035 2068 )
2036 2069
2037 2070 fm.plain(
2038 2071 _(
2039 2072 b"checking Rust extensions (%s)\n"
2040 2073 % (b'missing' if rustext is None else b'installed')
2041 2074 ),
2042 2075 )
2043 2076
2044 2077 # TODO print CA cert info
2045 2078
2046 2079 # hg version
2047 2080 hgver = util.version()
2048 2081 fm.write(
2049 2082 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2050 2083 )
2051 2084 fm.write(
2052 2085 b'hgverextra',
2053 2086 _(b"checking Mercurial custom build (%s)\n"),
2054 2087 b'+'.join(hgver.split(b'+')[1:]),
2055 2088 )
2056 2089
2057 2090 # compiled modules
2058 2091 hgmodules = None
2059 2092 if util.safehasattr(sys.modules[__name__], '__file__'):
2060 2093 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2061 2094 elif getattr(sys, 'oxidized', False):
2062 2095 hgmodules = pycompat.sysexecutable
2063 2096
2064 2097 fm.write(
2065 2098 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2066 2099 )
2067 2100 fm.write(
2068 2101 b'hgmodules',
2069 2102 _(b"checking installed modules (%s)...\n"),
2070 2103 hgmodules or _(b"unknown"),
2071 2104 )
2072 2105
2073 2106 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2074 2107 rustext = rustandc # for now, that's the only case
2075 2108 cext = policy.policy in (b'c', b'allow') or rustandc
2076 2109 nopure = cext or rustext
2077 2110 if nopure:
2078 2111 err = None
2079 2112 try:
2080 2113 if cext:
2081 2114 from .cext import ( # pytype: disable=import-error
2082 2115 base85,
2083 2116 bdiff,
2084 2117 mpatch,
2085 2118 osutil,
2086 2119 )
2087 2120
2088 2121 # quiet pyflakes
2089 2122 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2090 2123 if rustext:
2091 2124 from .rustext import ( # pytype: disable=import-error
2092 2125 ancestor,
2093 2126 dirstate,
2094 2127 )
2095 2128
2096 2129 dir(ancestor), dir(dirstate) # quiet pyflakes
2097 2130 except Exception as inst:
2098 2131 err = stringutil.forcebytestr(inst)
2099 2132 problems += 1
2100 2133 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2101 2134
2102 2135 compengines = util.compengines._engines.values()
2103 2136 fm.write(
2104 2137 b'compengines',
2105 2138 _(b'checking registered compression engines (%s)\n'),
2106 2139 fm.formatlist(
2107 2140 sorted(e.name() for e in compengines),
2108 2141 name=b'compengine',
2109 2142 fmt=b'%s',
2110 2143 sep=b', ',
2111 2144 ),
2112 2145 )
2113 2146 fm.write(
2114 2147 b'compenginesavail',
2115 2148 _(b'checking available compression engines (%s)\n'),
2116 2149 fm.formatlist(
2117 2150 sorted(e.name() for e in compengines if e.available()),
2118 2151 name=b'compengine',
2119 2152 fmt=b'%s',
2120 2153 sep=b', ',
2121 2154 ),
2122 2155 )
2123 2156 wirecompengines = compression.compengines.supportedwireengines(
2124 2157 compression.SERVERROLE
2125 2158 )
2126 2159 fm.write(
2127 2160 b'compenginesserver',
2128 2161 _(
2129 2162 b'checking available compression engines '
2130 2163 b'for wire protocol (%s)\n'
2131 2164 ),
2132 2165 fm.formatlist(
2133 2166 [e.name() for e in wirecompengines if e.wireprotosupport()],
2134 2167 name=b'compengine',
2135 2168 fmt=b'%s',
2136 2169 sep=b', ',
2137 2170 ),
2138 2171 )
2139 2172 re2 = b'missing'
2140 2173 if util._re2:
2141 2174 re2 = b'available'
2142 2175 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2143 2176 fm.data(re2=bool(util._re2))
2144 2177
2145 2178 # templates
2146 2179 p = templater.templatedir()
2147 2180 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2148 2181 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2149 2182 if p:
2150 2183 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2151 2184 if m:
2152 2185 # template found, check if it is working
2153 2186 err = None
2154 2187 try:
2155 2188 templater.templater.frommapfile(m)
2156 2189 except Exception as inst:
2157 2190 err = stringutil.forcebytestr(inst)
2158 2191 p = None
2159 2192 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2160 2193 else:
2161 2194 p = None
2162 2195 fm.condwrite(
2163 2196 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2164 2197 )
2165 2198 fm.condwrite(
2166 2199 not m,
2167 2200 b'defaulttemplatenotfound',
2168 2201 _(b" template '%s' not found\n"),
2169 2202 b"default",
2170 2203 )
2171 2204 if not p:
2172 2205 problems += 1
2173 2206 fm.condwrite(
2174 2207 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2175 2208 )
2176 2209
2177 2210 # editor
2178 2211 editor = ui.geteditor()
2179 2212 editor = util.expandpath(editor)
2180 2213 editorbin = procutil.shellsplit(editor)[0]
2181 2214 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2182 2215 cmdpath = procutil.findexe(editorbin)
2183 2216 fm.condwrite(
2184 2217 not cmdpath and editor == b'vi',
2185 2218 b'vinotfound',
2186 2219 _(
2187 2220 b" No commit editor set and can't find %s in PATH\n"
2188 2221 b" (specify a commit editor in your configuration"
2189 2222 b" file)\n"
2190 2223 ),
2191 2224 not cmdpath and editor == b'vi' and editorbin,
2192 2225 )
2193 2226 fm.condwrite(
2194 2227 not cmdpath and editor != b'vi',
2195 2228 b'editornotfound',
2196 2229 _(
2197 2230 b" Can't find editor '%s' in PATH\n"
2198 2231 b" (specify a commit editor in your configuration"
2199 2232 b" file)\n"
2200 2233 ),
2201 2234 not cmdpath and editorbin,
2202 2235 )
2203 2236 if not cmdpath and editor != b'vi':
2204 2237 problems += 1
2205 2238
2206 2239 # check username
2207 2240 username = None
2208 2241 err = None
2209 2242 try:
2210 2243 username = ui.username()
2211 2244 except error.Abort as e:
2212 2245 err = e.message
2213 2246 problems += 1
2214 2247
2215 2248 fm.condwrite(
2216 2249 username, b'username', _(b"checking username (%s)\n"), username
2217 2250 )
2218 2251 fm.condwrite(
2219 2252 err,
2220 2253 b'usernameerror',
2221 2254 _(
2222 2255 b"checking username...\n %s\n"
2223 2256 b" (specify a username in your configuration file)\n"
2224 2257 ),
2225 2258 err,
2226 2259 )
2227 2260
2228 2261 for name, mod in extensions.extensions():
2229 2262 handler = getattr(mod, 'debuginstall', None)
2230 2263 if handler is not None:
2231 2264 problems += handler(ui, fm)
2232 2265
2233 2266 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2234 2267 if not problems:
2235 2268 fm.data(problems=problems)
2236 2269 fm.condwrite(
2237 2270 problems,
2238 2271 b'problems',
2239 2272 _(b"%d problems detected, please check your install!\n"),
2240 2273 problems,
2241 2274 )
2242 2275 fm.end()
2243 2276
2244 2277 return problems
2245 2278
2246 2279
2247 2280 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2248 2281 def debugknown(ui, repopath, *ids, **opts):
2249 2282 """test whether node ids are known to a repo
2250 2283
2251 2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2252 2285 and 1s indicating unknown/known.
2253 2286 """
2254 2287 opts = pycompat.byteskwargs(opts)
2255 2288 repo = hg.peer(ui, opts, repopath)
2256 2289 if not repo.capable(b'known'):
2257 2290 raise error.Abort(b"known() not supported by target repository")
2258 2291 flags = repo.known([bin(s) for s in ids])
2259 2292 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2260 2293
2261 2294
2262 2295 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2263 2296 def debuglabelcomplete(ui, repo, *args):
2264 2297 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2265 2298 debugnamecomplete(ui, repo, *args)
2266 2299
2267 2300
2268 2301 @command(
2269 2302 b'debuglocks',
2270 2303 [
2271 2304 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2272 2305 (
2273 2306 b'W',
2274 2307 b'force-free-wlock',
2275 2308 None,
2276 2309 _(b'free the working state lock (DANGEROUS)'),
2277 2310 ),
2278 2311 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2279 2312 (
2280 2313 b'S',
2281 2314 b'set-wlock',
2282 2315 None,
2283 2316 _(b'set the working state lock until stopped'),
2284 2317 ),
2285 2318 ],
2286 2319 _(b'[OPTION]...'),
2287 2320 )
2288 2321 def debuglocks(ui, repo, **opts):
2289 2322 """show or modify state of locks
2290 2323
2291 2324 By default, this command will show which locks are held. This
2292 2325 includes the user and process holding the lock, the amount of time
2293 2326 the lock has been held, and the machine name where the process is
2294 2327 running if it's not local.
2295 2328
2296 2329 Locks protect the integrity of Mercurial's data, so should be
2297 2330 treated with care. System crashes or other interruptions may cause
2298 2331 locks to not be properly released, though Mercurial will usually
2299 2332 detect and remove such stale locks automatically.
2300 2333
2301 2334 However, detecting stale locks may not always be possible (for
2302 2335 instance, on a shared filesystem). Removing locks may also be
2303 2336 blocked by filesystem permissions.
2304 2337
2305 2338 Setting a lock will prevent other commands from changing the data.
2306 2339 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2307 2340 The set locks are removed when the command exits.
2308 2341
2309 2342 Returns 0 if no locks are held.
2310 2343
2311 2344 """
2312 2345
2313 2346 if opts.get('force_free_lock'):
2314 2347 repo.svfs.tryunlink(b'lock')
2315 2348 if opts.get('force_free_wlock'):
2316 2349 repo.vfs.tryunlink(b'wlock')
2317 2350 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2318 2351 return 0
2319 2352
2320 2353 locks = []
2321 2354 try:
2322 2355 if opts.get('set_wlock'):
2323 2356 try:
2324 2357 locks.append(repo.wlock(False))
2325 2358 except error.LockHeld:
2326 2359 raise error.Abort(_(b'wlock is already held'))
2327 2360 if opts.get('set_lock'):
2328 2361 try:
2329 2362 locks.append(repo.lock(False))
2330 2363 except error.LockHeld:
2331 2364 raise error.Abort(_(b'lock is already held'))
2332 2365 if len(locks):
2333 2366 try:
2334 2367 if ui.interactive():
2335 2368 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2336 2369 ui.promptchoice(prompt)
2337 2370 else:
2338 2371 msg = b"%d locks held, waiting for signal\n"
2339 2372 msg %= len(locks)
2340 2373 ui.status(msg)
2341 2374 while True: # XXX wait for a signal
2342 2375 time.sleep(0.1)
2343 2376 except KeyboardInterrupt:
2344 2377 msg = b"signal-received releasing locks\n"
2345 2378 ui.status(msg)
2346 2379 return 0
2347 2380 finally:
2348 2381 release(*locks)
2349 2382
2350 2383 now = time.time()
2351 2384 held = 0
2352 2385
2353 2386 def report(vfs, name, method):
2354 2387 # this causes stale locks to get reaped for more accurate reporting
2355 2388 try:
2356 2389 l = method(False)
2357 2390 except error.LockHeld:
2358 2391 l = None
2359 2392
2360 2393 if l:
2361 2394 l.release()
2362 2395 else:
2363 2396 try:
2364 2397 st = vfs.lstat(name)
2365 2398 age = now - st[stat.ST_MTIME]
2366 2399 user = util.username(st.st_uid)
2367 2400 locker = vfs.readlock(name)
2368 2401 if b":" in locker:
2369 2402 host, pid = locker.split(b':')
2370 2403 if host == socket.gethostname():
2371 2404 locker = b'user %s, process %s' % (user or b'None', pid)
2372 2405 else:
2373 2406 locker = b'user %s, process %s, host %s' % (
2374 2407 user or b'None',
2375 2408 pid,
2376 2409 host,
2377 2410 )
2378 2411 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2379 2412 return 1
2380 2413 except FileNotFoundError:
2381 2414 pass
2382 2415
2383 2416 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2384 2417 return 0
2385 2418
2386 2419 held += report(repo.svfs, b"lock", repo.lock)
2387 2420 held += report(repo.vfs, b"wlock", repo.wlock)
2388 2421
2389 2422 return held
2390 2423
2391 2424
2392 2425 @command(
2393 2426 b'debugmanifestfulltextcache',
2394 2427 [
2395 2428 (b'', b'clear', False, _(b'clear the cache')),
2396 2429 (
2397 2430 b'a',
2398 2431 b'add',
2399 2432 [],
2400 2433 _(b'add the given manifest nodes to the cache'),
2401 2434 _(b'NODE'),
2402 2435 ),
2403 2436 ],
2404 2437 b'',
2405 2438 )
2406 2439 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2407 2440 """show, clear or amend the contents of the manifest fulltext cache"""
2408 2441
2409 2442 def getcache():
2410 2443 r = repo.manifestlog.getstorage(b'')
2411 2444 try:
2412 2445 return r._fulltextcache
2413 2446 except AttributeError:
2414 2447 msg = _(
2415 2448 b"Current revlog implementation doesn't appear to have a "
2416 2449 b"manifest fulltext cache\n"
2417 2450 )
2418 2451 raise error.Abort(msg)
2419 2452
2420 2453 if opts.get('clear'):
2421 2454 with repo.wlock():
2422 2455 cache = getcache()
2423 2456 cache.clear(clear_persisted_data=True)
2424 2457 return
2425 2458
2426 2459 if add:
2427 2460 with repo.wlock():
2428 2461 m = repo.manifestlog
2429 2462 store = m.getstorage(b'')
2430 2463 for n in add:
2431 2464 try:
2432 2465 manifest = m[store.lookup(n)]
2433 2466 except error.LookupError as e:
2434 2467 raise error.Abort(
2435 2468 bytes(e), hint=b"Check your manifest node id"
2436 2469 )
2437 2470 manifest.read() # stores revisision in cache too
2438 2471 return
2439 2472
2440 2473 cache = getcache()
2441 2474 if not len(cache):
2442 2475 ui.write(_(b'cache empty\n'))
2443 2476 else:
2444 2477 ui.write(
2445 2478 _(
2446 2479 b'cache contains %d manifest entries, in order of most to '
2447 2480 b'least recent:\n'
2448 2481 )
2449 2482 % (len(cache),)
2450 2483 )
2451 2484 totalsize = 0
2452 2485 for nodeid in cache:
2453 2486 # Use cache.get to not update the LRU order
2454 2487 data = cache.peek(nodeid)
2455 2488 size = len(data)
2456 2489 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2457 2490 ui.write(
2458 2491 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2459 2492 )
2460 2493 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2461 2494 ui.write(
2462 2495 _(b'total cache data size %s, on-disk %s\n')
2463 2496 % (util.bytecount(totalsize), util.bytecount(ondisk))
2464 2497 )
2465 2498
2466 2499
2467 2500 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2468 2501 def debugmergestate(ui, repo, *args, **opts):
2469 2502 """print merge state
2470 2503
2471 2504 Use --verbose to print out information about whether v1 or v2 merge state
2472 2505 was chosen."""
2473 2506
2474 2507 if ui.verbose:
2475 2508 ms = mergestatemod.mergestate(repo)
2476 2509
2477 2510 # sort so that reasonable information is on top
2478 2511 v1records = ms._readrecordsv1()
2479 2512 v2records = ms._readrecordsv2()
2480 2513
2481 2514 if not v1records and not v2records:
2482 2515 pass
2483 2516 elif not v2records:
2484 2517 ui.writenoi18n(b'no version 2 merge state\n')
2485 2518 elif ms._v1v2match(v1records, v2records):
2486 2519 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2487 2520 else:
2488 2521 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2489 2522
2490 2523 opts = pycompat.byteskwargs(opts)
2491 2524 if not opts[b'template']:
2492 2525 opts[b'template'] = (
2493 2526 b'{if(commits, "", "no merge state found\n")}'
2494 2527 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2495 2528 b'{files % "file: {path} (state \\"{state}\\")\n'
2496 2529 b'{if(local_path, "'
2497 2530 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2498 2531 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2499 2532 b' other path: {other_path} (node {other_node})\n'
2500 2533 b'")}'
2501 2534 b'{if(rename_side, "'
2502 2535 b' rename side: {rename_side}\n'
2503 2536 b' renamed path: {renamed_path}\n'
2504 2537 b'")}'
2505 2538 b'{extras % " extra: {key} = {value}\n"}'
2506 2539 b'"}'
2507 2540 b'{extras % "extra: {file} ({key} = {value})\n"}'
2508 2541 )
2509 2542
2510 2543 ms = mergestatemod.mergestate.read(repo)
2511 2544
2512 2545 fm = ui.formatter(b'debugmergestate', opts)
2513 2546 fm.startitem()
2514 2547
2515 2548 fm_commits = fm.nested(b'commits')
2516 2549 if ms.active():
2517 2550 for name, node, label_index in (
2518 2551 (b'local', ms.local, 0),
2519 2552 (b'other', ms.other, 1),
2520 2553 ):
2521 2554 fm_commits.startitem()
2522 2555 fm_commits.data(name=name)
2523 2556 fm_commits.data(node=hex(node))
2524 2557 if ms._labels and len(ms._labels) > label_index:
2525 2558 fm_commits.data(label=ms._labels[label_index])
2526 2559 fm_commits.end()
2527 2560
2528 2561 fm_files = fm.nested(b'files')
2529 2562 if ms.active():
2530 2563 for f in ms:
2531 2564 fm_files.startitem()
2532 2565 fm_files.data(path=f)
2533 2566 state = ms._state[f]
2534 2567 fm_files.data(state=state[0])
2535 2568 if state[0] in (
2536 2569 mergestatemod.MERGE_RECORD_UNRESOLVED,
2537 2570 mergestatemod.MERGE_RECORD_RESOLVED,
2538 2571 ):
2539 2572 fm_files.data(local_key=state[1])
2540 2573 fm_files.data(local_path=state[2])
2541 2574 fm_files.data(ancestor_path=state[3])
2542 2575 fm_files.data(ancestor_node=state[4])
2543 2576 fm_files.data(other_path=state[5])
2544 2577 fm_files.data(other_node=state[6])
2545 2578 fm_files.data(local_flags=state[7])
2546 2579 elif state[0] in (
2547 2580 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2548 2581 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2549 2582 ):
2550 2583 fm_files.data(renamed_path=state[1])
2551 2584 fm_files.data(rename_side=state[2])
2552 2585 fm_extras = fm_files.nested(b'extras')
2553 2586 for k, v in sorted(ms.extras(f).items()):
2554 2587 fm_extras.startitem()
2555 2588 fm_extras.data(key=k)
2556 2589 fm_extras.data(value=v)
2557 2590 fm_extras.end()
2558 2591
2559 2592 fm_files.end()
2560 2593
2561 2594 fm_extras = fm.nested(b'extras')
2562 2595 for f, d in sorted(ms.allextras().items()):
2563 2596 if f in ms:
2564 2597 # If file is in mergestate, we have already processed it's extras
2565 2598 continue
2566 2599 for k, v in d.items():
2567 2600 fm_extras.startitem()
2568 2601 fm_extras.data(file=f)
2569 2602 fm_extras.data(key=k)
2570 2603 fm_extras.data(value=v)
2571 2604 fm_extras.end()
2572 2605
2573 2606 fm.end()
2574 2607
2575 2608
2576 2609 @command(b'debugnamecomplete', [], _(b'NAME...'))
2577 2610 def debugnamecomplete(ui, repo, *args):
2578 2611 '''complete "names" - tags, open branch names, bookmark names'''
2579 2612
2580 2613 names = set()
2581 2614 # since we previously only listed open branches, we will handle that
2582 2615 # specially (after this for loop)
2583 2616 for name, ns in repo.names.items():
2584 2617 if name != b'branches':
2585 2618 names.update(ns.listnames(repo))
2586 2619 names.update(
2587 2620 tag
2588 2621 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2589 2622 if not closed
2590 2623 )
2591 2624 completions = set()
2592 2625 if not args:
2593 2626 args = [b'']
2594 2627 for a in args:
2595 2628 completions.update(n for n in names if n.startswith(a))
2596 2629 ui.write(b'\n'.join(sorted(completions)))
2597 2630 ui.write(b'\n')
2598 2631
2599 2632
2600 2633 @command(
2601 2634 b'debugnodemap',
2602 2635 [
2603 2636 (
2604 2637 b'',
2605 2638 b'dump-new',
2606 2639 False,
2607 2640 _(b'write a (new) persistent binary nodemap on stdout'),
2608 2641 ),
2609 2642 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2610 2643 (
2611 2644 b'',
2612 2645 b'check',
2613 2646 False,
2614 2647 _(b'check that the data on disk data are correct.'),
2615 2648 ),
2616 2649 (
2617 2650 b'',
2618 2651 b'metadata',
2619 2652 False,
2620 2653 _(b'display the on disk meta data for the nodemap'),
2621 2654 ),
2622 2655 ],
2623 2656 )
2624 2657 def debugnodemap(ui, repo, **opts):
2625 2658 """write and inspect on disk nodemap"""
2626 2659 if opts['dump_new']:
2627 2660 unfi = repo.unfiltered()
2628 2661 cl = unfi.changelog
2629 2662 if util.safehasattr(cl.index, "nodemap_data_all"):
2630 2663 data = cl.index.nodemap_data_all()
2631 2664 else:
2632 2665 data = nodemap.persistent_data(cl.index)
2633 2666 ui.write(data)
2634 2667 elif opts['dump_disk']:
2635 2668 unfi = repo.unfiltered()
2636 2669 cl = unfi.changelog
2637 2670 nm_data = nodemap.persisted_data(cl)
2638 2671 if nm_data is not None:
2639 2672 docket, data = nm_data
2640 2673 ui.write(data[:])
2641 2674 elif opts['check']:
2642 2675 unfi = repo.unfiltered()
2643 2676 cl = unfi.changelog
2644 2677 nm_data = nodemap.persisted_data(cl)
2645 2678 if nm_data is not None:
2646 2679 docket, data = nm_data
2647 2680 return nodemap.check_data(ui, cl.index, data)
2648 2681 elif opts['metadata']:
2649 2682 unfi = repo.unfiltered()
2650 2683 cl = unfi.changelog
2651 2684 nm_data = nodemap.persisted_data(cl)
2652 2685 if nm_data is not None:
2653 2686 docket, data = nm_data
2654 2687 ui.write((b"uid: %s\n") % docket.uid)
2655 2688 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2656 2689 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2657 2690 ui.write((b"data-length: %d\n") % docket.data_length)
2658 2691 ui.write((b"data-unused: %d\n") % docket.data_unused)
2659 2692 unused_perc = docket.data_unused * 100.0 / docket.data_length
2660 2693 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2661 2694
2662 2695
2663 2696 @command(
2664 2697 b'debugobsolete',
2665 2698 [
2666 2699 (b'', b'flags', 0, _(b'markers flag')),
2667 2700 (
2668 2701 b'',
2669 2702 b'record-parents',
2670 2703 False,
2671 2704 _(b'record parent information for the precursor'),
2672 2705 ),
2673 2706 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2674 2707 (
2675 2708 b'',
2676 2709 b'exclusive',
2677 2710 False,
2678 2711 _(b'restrict display to markers only relevant to REV'),
2679 2712 ),
2680 2713 (b'', b'index', False, _(b'display index of the marker')),
2681 2714 (b'', b'delete', [], _(b'delete markers specified by indices')),
2682 2715 ]
2683 2716 + cmdutil.commitopts2
2684 2717 + cmdutil.formatteropts,
2685 2718 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2686 2719 )
2687 2720 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2688 2721 """create arbitrary obsolete marker
2689 2722
2690 2723 With no arguments, displays the list of obsolescence markers."""
2691 2724
2692 2725 opts = pycompat.byteskwargs(opts)
2693 2726
2694 2727 def parsenodeid(s):
2695 2728 try:
2696 2729 # We do not use revsingle/revrange functions here to accept
2697 2730 # arbitrary node identifiers, possibly not present in the
2698 2731 # local repository.
2699 2732 n = bin(s)
2700 2733 if len(n) != repo.nodeconstants.nodelen:
2701 2734 raise ValueError
2702 2735 return n
2703 2736 except ValueError:
2704 2737 raise error.InputError(
2705 2738 b'changeset references must be full hexadecimal '
2706 2739 b'node identifiers'
2707 2740 )
2708 2741
2709 2742 if opts.get(b'delete'):
2710 2743 indices = []
2711 2744 for v in opts.get(b'delete'):
2712 2745 try:
2713 2746 indices.append(int(v))
2714 2747 except ValueError:
2715 2748 raise error.InputError(
2716 2749 _(b'invalid index value: %r') % v,
2717 2750 hint=_(b'use integers for indices'),
2718 2751 )
2719 2752
2720 2753 if repo.currenttransaction():
2721 2754 raise error.Abort(
2722 2755 _(b'cannot delete obsmarkers in the middle of transaction.')
2723 2756 )
2724 2757
2725 2758 with repo.lock():
2726 2759 n = repair.deleteobsmarkers(repo.obsstore, indices)
2727 2760 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2728 2761
2729 2762 return
2730 2763
2731 2764 if precursor is not None:
2732 2765 if opts[b'rev']:
2733 2766 raise error.InputError(
2734 2767 b'cannot select revision when creating marker'
2735 2768 )
2736 2769 metadata = {}
2737 2770 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2738 2771 succs = tuple(parsenodeid(succ) for succ in successors)
2739 2772 l = repo.lock()
2740 2773 try:
2741 2774 tr = repo.transaction(b'debugobsolete')
2742 2775 try:
2743 2776 date = opts.get(b'date')
2744 2777 if date:
2745 2778 date = dateutil.parsedate(date)
2746 2779 else:
2747 2780 date = None
2748 2781 prec = parsenodeid(precursor)
2749 2782 parents = None
2750 2783 if opts[b'record_parents']:
2751 2784 if prec not in repo.unfiltered():
2752 2785 raise error.Abort(
2753 2786 b'cannot used --record-parents on '
2754 2787 b'unknown changesets'
2755 2788 )
2756 2789 parents = repo.unfiltered()[prec].parents()
2757 2790 parents = tuple(p.node() for p in parents)
2758 2791 repo.obsstore.create(
2759 2792 tr,
2760 2793 prec,
2761 2794 succs,
2762 2795 opts[b'flags'],
2763 2796 parents=parents,
2764 2797 date=date,
2765 2798 metadata=metadata,
2766 2799 ui=ui,
2767 2800 )
2768 2801 tr.close()
2769 2802 except ValueError as exc:
2770 2803 raise error.Abort(
2771 2804 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2772 2805 )
2773 2806 finally:
2774 2807 tr.release()
2775 2808 finally:
2776 2809 l.release()
2777 2810 else:
2778 2811 if opts[b'rev']:
2779 2812 revs = logcmdutil.revrange(repo, opts[b'rev'])
2780 2813 nodes = [repo[r].node() for r in revs]
2781 2814 markers = list(
2782 2815 obsutil.getmarkers(
2783 2816 repo, nodes=nodes, exclusive=opts[b'exclusive']
2784 2817 )
2785 2818 )
2786 2819 markers.sort(key=lambda x: x._data)
2787 2820 else:
2788 2821 markers = obsutil.getmarkers(repo)
2789 2822
2790 2823 markerstoiter = markers
2791 2824 isrelevant = lambda m: True
2792 2825 if opts.get(b'rev') and opts.get(b'index'):
2793 2826 markerstoiter = obsutil.getmarkers(repo)
2794 2827 markerset = set(markers)
2795 2828 isrelevant = lambda m: m in markerset
2796 2829
2797 2830 fm = ui.formatter(b'debugobsolete', opts)
2798 2831 for i, m in enumerate(markerstoiter):
2799 2832 if not isrelevant(m):
2800 2833 # marker can be irrelevant when we're iterating over a set
2801 2834 # of markers (markerstoiter) which is bigger than the set
2802 2835 # of markers we want to display (markers)
2803 2836 # this can happen if both --index and --rev options are
2804 2837 # provided and thus we need to iterate over all of the markers
2805 2838 # to get the correct indices, but only display the ones that
2806 2839 # are relevant to --rev value
2807 2840 continue
2808 2841 fm.startitem()
2809 2842 ind = i if opts.get(b'index') else None
2810 2843 cmdutil.showmarker(fm, m, index=ind)
2811 2844 fm.end()
2812 2845
2813 2846
2814 2847 @command(
2815 2848 b'debugp1copies',
2816 2849 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2817 2850 _(b'[-r REV]'),
2818 2851 )
2819 2852 def debugp1copies(ui, repo, **opts):
2820 2853 """dump copy information compared to p1"""
2821 2854
2822 2855 opts = pycompat.byteskwargs(opts)
2823 2856 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2824 2857 for dst, src in ctx.p1copies().items():
2825 2858 ui.write(b'%s -> %s\n' % (src, dst))
2826 2859
2827 2860
2828 2861 @command(
2829 2862 b'debugp2copies',
2830 2863 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2831 2864 _(b'[-r REV]'),
2832 2865 )
2833 2866 def debugp2copies(ui, repo, **opts):
2834 2867 """dump copy information compared to p2"""
2835 2868
2836 2869 opts = pycompat.byteskwargs(opts)
2837 2870 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2838 2871 for dst, src in ctx.p2copies().items():
2839 2872 ui.write(b'%s -> %s\n' % (src, dst))
2840 2873
2841 2874
2842 2875 @command(
2843 2876 b'debugpathcomplete',
2844 2877 [
2845 2878 (b'f', b'full', None, _(b'complete an entire path')),
2846 2879 (b'n', b'normal', None, _(b'show only normal files')),
2847 2880 (b'a', b'added', None, _(b'show only added files')),
2848 2881 (b'r', b'removed', None, _(b'show only removed files')),
2849 2882 ],
2850 2883 _(b'FILESPEC...'),
2851 2884 )
2852 2885 def debugpathcomplete(ui, repo, *specs, **opts):
2853 2886 """complete part or all of a tracked path
2854 2887
2855 2888 This command supports shells that offer path name completion. It
2856 2889 currently completes only files already known to the dirstate.
2857 2890
2858 2891 Completion extends only to the next path segment unless
2859 2892 --full is specified, in which case entire paths are used."""
2860 2893
2861 2894 def complete(path, acceptable):
2862 2895 dirstate = repo.dirstate
2863 2896 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2864 2897 rootdir = repo.root + pycompat.ossep
2865 2898 if spec != repo.root and not spec.startswith(rootdir):
2866 2899 return [], []
2867 2900 if os.path.isdir(spec):
2868 2901 spec += b'/'
2869 2902 spec = spec[len(rootdir) :]
2870 2903 fixpaths = pycompat.ossep != b'/'
2871 2904 if fixpaths:
2872 2905 spec = spec.replace(pycompat.ossep, b'/')
2873 2906 speclen = len(spec)
2874 2907 fullpaths = opts['full']
2875 2908 files, dirs = set(), set()
2876 2909 adddir, addfile = dirs.add, files.add
2877 2910 for f, st in dirstate.items():
2878 2911 if f.startswith(spec) and st.state in acceptable:
2879 2912 if fixpaths:
2880 2913 f = f.replace(b'/', pycompat.ossep)
2881 2914 if fullpaths:
2882 2915 addfile(f)
2883 2916 continue
2884 2917 s = f.find(pycompat.ossep, speclen)
2885 2918 if s >= 0:
2886 2919 adddir(f[:s])
2887 2920 else:
2888 2921 addfile(f)
2889 2922 return files, dirs
2890 2923
2891 2924 acceptable = b''
2892 2925 if opts['normal']:
2893 2926 acceptable += b'nm'
2894 2927 if opts['added']:
2895 2928 acceptable += b'a'
2896 2929 if opts['removed']:
2897 2930 acceptable += b'r'
2898 2931 cwd = repo.getcwd()
2899 2932 if not specs:
2900 2933 specs = [b'.']
2901 2934
2902 2935 files, dirs = set(), set()
2903 2936 for spec in specs:
2904 2937 f, d = complete(spec, acceptable or b'nmar')
2905 2938 files.update(f)
2906 2939 dirs.update(d)
2907 2940 files.update(dirs)
2908 2941 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2909 2942 ui.write(b'\n')
2910 2943
2911 2944
2912 2945 @command(
2913 2946 b'debugpathcopies',
2914 2947 cmdutil.walkopts,
2915 2948 b'hg debugpathcopies REV1 REV2 [FILE]',
2916 2949 inferrepo=True,
2917 2950 )
2918 2951 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2919 2952 """show copies between two revisions"""
2920 2953 ctx1 = scmutil.revsingle(repo, rev1)
2921 2954 ctx2 = scmutil.revsingle(repo, rev2)
2922 2955 m = scmutil.match(ctx1, pats, opts)
2923 2956 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2924 2957 ui.write(b'%s -> %s\n' % (src, dst))
2925 2958
2926 2959
2927 2960 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2928 2961 def debugpeer(ui, path):
2929 2962 """establish a connection to a peer repository"""
2930 2963 # Always enable peer request logging. Requires --debug to display
2931 2964 # though.
2932 2965 overrides = {
2933 2966 (b'devel', b'debug.peer-request'): True,
2934 2967 }
2935 2968
2936 2969 with ui.configoverride(overrides):
2937 2970 peer = hg.peer(ui, {}, path)
2938 2971
2939 2972 try:
2940 2973 local = peer.local() is not None
2941 2974 canpush = peer.canpush()
2942 2975
2943 2976 ui.write(_(b'url: %s\n') % peer.url())
2944 2977 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2945 2978 ui.write(
2946 2979 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2947 2980 )
2948 2981 finally:
2949 2982 peer.close()
2950 2983
2951 2984
2952 2985 @command(
2953 2986 b'debugpickmergetool',
2954 2987 [
2955 2988 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2956 2989 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2957 2990 ]
2958 2991 + cmdutil.walkopts
2959 2992 + cmdutil.mergetoolopts,
2960 2993 _(b'[PATTERN]...'),
2961 2994 inferrepo=True,
2962 2995 )
2963 2996 def debugpickmergetool(ui, repo, *pats, **opts):
2964 2997 """examine which merge tool is chosen for specified file
2965 2998
2966 2999 As described in :hg:`help merge-tools`, Mercurial examines
2967 3000 configurations below in this order to decide which merge tool is
2968 3001 chosen for specified file.
2969 3002
2970 3003 1. ``--tool`` option
2971 3004 2. ``HGMERGE`` environment variable
2972 3005 3. configurations in ``merge-patterns`` section
2973 3006 4. configuration of ``ui.merge``
2974 3007 5. configurations in ``merge-tools`` section
2975 3008 6. ``hgmerge`` tool (for historical reason only)
2976 3009 7. default tool for fallback (``:merge`` or ``:prompt``)
2977 3010
2978 3011 This command writes out examination result in the style below::
2979 3012
2980 3013 FILE = MERGETOOL
2981 3014
2982 3015 By default, all files known in the first parent context of the
2983 3016 working directory are examined. Use file patterns and/or -I/-X
2984 3017 options to limit target files. -r/--rev is also useful to examine
2985 3018 files in another context without actual updating to it.
2986 3019
2987 3020 With --debug, this command shows warning messages while matching
2988 3021 against ``merge-patterns`` and so on, too. It is recommended to
2989 3022 use this option with explicit file patterns and/or -I/-X options,
2990 3023 because this option increases amount of output per file according
2991 3024 to configurations in hgrc.
2992 3025
2993 3026 With -v/--verbose, this command shows configurations below at
2994 3027 first (only if specified).
2995 3028
2996 3029 - ``--tool`` option
2997 3030 - ``HGMERGE`` environment variable
2998 3031 - configuration of ``ui.merge``
2999 3032
3000 3033 If merge tool is chosen before matching against
3001 3034 ``merge-patterns``, this command can't show any helpful
3002 3035 information, even with --debug. In such case, information above is
3003 3036 useful to know why a merge tool is chosen.
3004 3037 """
3005 3038 opts = pycompat.byteskwargs(opts)
3006 3039 overrides = {}
3007 3040 if opts[b'tool']:
3008 3041 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3009 3042 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3010 3043
3011 3044 with ui.configoverride(overrides, b'debugmergepatterns'):
3012 3045 hgmerge = encoding.environ.get(b"HGMERGE")
3013 3046 if hgmerge is not None:
3014 3047 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3015 3048 uimerge = ui.config(b"ui", b"merge")
3016 3049 if uimerge:
3017 3050 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3018 3051
3019 3052 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3020 3053 m = scmutil.match(ctx, pats, opts)
3021 3054 changedelete = opts[b'changedelete']
3022 3055 for path in ctx.walk(m):
3023 3056 fctx = ctx[path]
3024 3057 with ui.silent(
3025 3058 error=True
3026 3059 ) if not ui.debugflag else util.nullcontextmanager():
3027 3060 tool, toolpath = filemerge._picktool(
3028 3061 repo,
3029 3062 ui,
3030 3063 path,
3031 3064 fctx.isbinary(),
3032 3065 b'l' in fctx.flags(),
3033 3066 changedelete,
3034 3067 )
3035 3068 ui.write(b'%s = %s\n' % (path, tool))
3036 3069
3037 3070
3038 3071 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3039 3072 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3040 3073 """access the pushkey key/value protocol
3041 3074
3042 3075 With two args, list the keys in the given namespace.
3043 3076
3044 3077 With five args, set a key to new if it currently is set to old.
3045 3078 Reports success or failure.
3046 3079 """
3047 3080
3048 3081 target = hg.peer(ui, {}, repopath)
3049 3082 try:
3050 3083 if keyinfo:
3051 3084 key, old, new = keyinfo
3052 3085 with target.commandexecutor() as e:
3053 3086 r = e.callcommand(
3054 3087 b'pushkey',
3055 3088 {
3056 3089 b'namespace': namespace,
3057 3090 b'key': key,
3058 3091 b'old': old,
3059 3092 b'new': new,
3060 3093 },
3061 3094 ).result()
3062 3095
3063 3096 ui.status(pycompat.bytestr(r) + b'\n')
3064 3097 return not r
3065 3098 else:
3066 3099 for k, v in sorted(target.listkeys(namespace).items()):
3067 3100 ui.write(
3068 3101 b"%s\t%s\n"
3069 3102 % (stringutil.escapestr(k), stringutil.escapestr(v))
3070 3103 )
3071 3104 finally:
3072 3105 target.close()
3073 3106
3074 3107
3075 3108 @command(b'debugpvec', [], _(b'A B'))
3076 3109 def debugpvec(ui, repo, a, b=None):
3077 3110 ca = scmutil.revsingle(repo, a)
3078 3111 cb = scmutil.revsingle(repo, b)
3079 3112 pa = pvec.ctxpvec(ca)
3080 3113 pb = pvec.ctxpvec(cb)
3081 3114 if pa == pb:
3082 3115 rel = b"="
3083 3116 elif pa > pb:
3084 3117 rel = b">"
3085 3118 elif pa < pb:
3086 3119 rel = b"<"
3087 3120 elif pa | pb:
3088 3121 rel = b"|"
3089 3122 ui.write(_(b"a: %s\n") % pa)
3090 3123 ui.write(_(b"b: %s\n") % pb)
3091 3124 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3092 3125 ui.write(
3093 3126 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3094 3127 % (
3095 3128 abs(pa._depth - pb._depth),
3096 3129 pvec._hamming(pa._vec, pb._vec),
3097 3130 pa.distance(pb),
3098 3131 rel,
3099 3132 )
3100 3133 )
3101 3134
3102 3135
3103 3136 @command(
3104 3137 b'debugrebuilddirstate|debugrebuildstate',
3105 3138 [
3106 3139 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3107 3140 (
3108 3141 b'',
3109 3142 b'minimal',
3110 3143 None,
3111 3144 _(
3112 3145 b'only rebuild files that are inconsistent with '
3113 3146 b'the working copy parent'
3114 3147 ),
3115 3148 ),
3116 3149 ],
3117 3150 _(b'[-r REV]'),
3118 3151 )
3119 3152 def debugrebuilddirstate(ui, repo, rev, **opts):
3120 3153 """rebuild the dirstate as it would look like for the given revision
3121 3154
3122 3155 If no revision is specified the first current parent will be used.
3123 3156
3124 3157 The dirstate will be set to the files of the given revision.
3125 3158 The actual working directory content or existing dirstate
3126 3159 information such as adds or removes is not considered.
3127 3160
3128 3161 ``minimal`` will only rebuild the dirstate status for files that claim to be
3129 3162 tracked but are not in the parent manifest, or that exist in the parent
3130 3163 manifest but are not in the dirstate. It will not change adds, removes, or
3131 3164 modified files that are in the working copy parent.
3132 3165
3133 3166 One use of this command is to make the next :hg:`status` invocation
3134 3167 check the actual file content.
3135 3168 """
3136 3169 ctx = scmutil.revsingle(repo, rev)
3137 3170 with repo.wlock():
3138 3171 dirstate = repo.dirstate
3139 3172 changedfiles = None
3140 3173 # See command doc for what minimal does.
3141 3174 if opts.get('minimal'):
3142 3175 manifestfiles = set(ctx.manifest().keys())
3143 3176 dirstatefiles = set(dirstate)
3144 3177 manifestonly = manifestfiles - dirstatefiles
3145 3178 dsonly = dirstatefiles - manifestfiles
3146 3179 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3147 3180 changedfiles = manifestonly | dsnotadded
3148 3181
3149 3182 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3150 3183
3151 3184
3152 3185 @command(
3153 3186 b'debugrebuildfncache',
3154 3187 [
3155 3188 (
3156 3189 b'',
3157 3190 b'only-data',
3158 3191 False,
3159 3192 _(b'only look for wrong .d files (much faster)'),
3160 3193 )
3161 3194 ],
3162 3195 b'',
3163 3196 )
3164 3197 def debugrebuildfncache(ui, repo, **opts):
3165 3198 """rebuild the fncache file"""
3166 3199 opts = pycompat.byteskwargs(opts)
3167 3200 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3168 3201
3169 3202
3170 3203 @command(
3171 3204 b'debugrename',
3172 3205 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3173 3206 _(b'[-r REV] [FILE]...'),
3174 3207 )
3175 3208 def debugrename(ui, repo, *pats, **opts):
3176 3209 """dump rename information"""
3177 3210
3178 3211 opts = pycompat.byteskwargs(opts)
3179 3212 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3180 3213 m = scmutil.match(ctx, pats, opts)
3181 3214 for abs in ctx.walk(m):
3182 3215 fctx = ctx[abs]
3183 3216 o = fctx.filelog().renamed(fctx.filenode())
3184 3217 rel = repo.pathto(abs)
3185 3218 if o:
3186 3219 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3187 3220 else:
3188 3221 ui.write(_(b"%s not renamed\n") % rel)
3189 3222
3190 3223
3191 3224 @command(b'debugrequires|debugrequirements', [], b'')
3192 3225 def debugrequirements(ui, repo):
3193 3226 """print the current repo requirements"""
3194 3227 for r in sorted(repo.requirements):
3195 3228 ui.write(b"%s\n" % r)
3196 3229
3197 3230
3198 3231 @command(
3199 3232 b'debugrevlog',
3200 3233 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3201 3234 _(b'-c|-m|FILE'),
3202 3235 optionalrepo=True,
3203 3236 )
3204 3237 def debugrevlog(ui, repo, file_=None, **opts):
3205 3238 """show data and statistics about a revlog"""
3206 3239 opts = pycompat.byteskwargs(opts)
3207 3240 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3208 3241
3209 3242 if opts.get(b"dump"):
3210 3243 numrevs = len(r)
3211 3244 ui.write(
3212 3245 (
3213 3246 b"# rev p1rev p2rev start end deltastart base p1 p2"
3214 3247 b" rawsize totalsize compression heads chainlen\n"
3215 3248 )
3216 3249 )
3217 3250 ts = 0
3218 3251 heads = set()
3219 3252
3220 3253 for rev in range(numrevs):
3221 3254 dbase = r.deltaparent(rev)
3222 3255 if dbase == -1:
3223 3256 dbase = rev
3224 3257 cbase = r.chainbase(rev)
3225 3258 clen = r.chainlen(rev)
3226 3259 p1, p2 = r.parentrevs(rev)
3227 3260 rs = r.rawsize(rev)
3228 3261 ts = ts + rs
3229 3262 heads -= set(r.parentrevs(rev))
3230 3263 heads.add(rev)
3231 3264 try:
3232 3265 compression = ts / r.end(rev)
3233 3266 except ZeroDivisionError:
3234 3267 compression = 0
3235 3268 ui.write(
3236 3269 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3237 3270 b"%11d %5d %8d\n"
3238 3271 % (
3239 3272 rev,
3240 3273 p1,
3241 3274 p2,
3242 3275 r.start(rev),
3243 3276 r.end(rev),
3244 3277 r.start(dbase),
3245 3278 r.start(cbase),
3246 3279 r.start(p1),
3247 3280 r.start(p2),
3248 3281 rs,
3249 3282 ts,
3250 3283 compression,
3251 3284 len(heads),
3252 3285 clen,
3253 3286 )
3254 3287 )
3255 3288 return 0
3256 3289
3257 3290 format = r._format_version
3258 3291 v = r._format_flags
3259 3292 flags = []
3260 3293 gdelta = False
3261 3294 if v & revlog.FLAG_INLINE_DATA:
3262 3295 flags.append(b'inline')
3263 3296 if v & revlog.FLAG_GENERALDELTA:
3264 3297 gdelta = True
3265 3298 flags.append(b'generaldelta')
3266 3299 if not flags:
3267 3300 flags = [b'(none)']
3268 3301
3269 3302 ### tracks merge vs single parent
3270 3303 nummerges = 0
3271 3304
3272 3305 ### tracks ways the "delta" are build
3273 3306 # nodelta
3274 3307 numempty = 0
3275 3308 numemptytext = 0
3276 3309 numemptydelta = 0
3277 3310 # full file content
3278 3311 numfull = 0
3279 3312 # intermediate snapshot against a prior snapshot
3280 3313 numsemi = 0
3281 3314 # snapshot count per depth
3282 3315 numsnapdepth = collections.defaultdict(lambda: 0)
3283 3316 # delta against previous revision
3284 3317 numprev = 0
3285 3318 # delta against first or second parent (not prev)
3286 3319 nump1 = 0
3287 3320 nump2 = 0
3288 3321 # delta against neither prev nor parents
3289 3322 numother = 0
3290 3323 # delta against prev that are also first or second parent
3291 3324 # (details of `numprev`)
3292 3325 nump1prev = 0
3293 3326 nump2prev = 0
3294 3327
3295 3328 # data about delta chain of each revs
3296 3329 chainlengths = []
3297 3330 chainbases = []
3298 3331 chainspans = []
3299 3332
3300 3333 # data about each revision
3301 3334 datasize = [None, 0, 0]
3302 3335 fullsize = [None, 0, 0]
3303 3336 semisize = [None, 0, 0]
3304 3337 # snapshot count per depth
3305 3338 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3306 3339 deltasize = [None, 0, 0]
3307 3340 chunktypecounts = {}
3308 3341 chunktypesizes = {}
3309 3342
3310 3343 def addsize(size, l):
3311 3344 if l[0] is None or size < l[0]:
3312 3345 l[0] = size
3313 3346 if size > l[1]:
3314 3347 l[1] = size
3315 3348 l[2] += size
3316 3349
3317 3350 numrevs = len(r)
3318 3351 for rev in range(numrevs):
3319 3352 p1, p2 = r.parentrevs(rev)
3320 3353 delta = r.deltaparent(rev)
3321 3354 if format > 0:
3322 3355 addsize(r.rawsize(rev), datasize)
3323 3356 if p2 != nullrev:
3324 3357 nummerges += 1
3325 3358 size = r.length(rev)
3326 3359 if delta == nullrev:
3327 3360 chainlengths.append(0)
3328 3361 chainbases.append(r.start(rev))
3329 3362 chainspans.append(size)
3330 3363 if size == 0:
3331 3364 numempty += 1
3332 3365 numemptytext += 1
3333 3366 else:
3334 3367 numfull += 1
3335 3368 numsnapdepth[0] += 1
3336 3369 addsize(size, fullsize)
3337 3370 addsize(size, snapsizedepth[0])
3338 3371 else:
3339 3372 chainlengths.append(chainlengths[delta] + 1)
3340 3373 baseaddr = chainbases[delta]
3341 3374 revaddr = r.start(rev)
3342 3375 chainbases.append(baseaddr)
3343 3376 chainspans.append((revaddr - baseaddr) + size)
3344 3377 if size == 0:
3345 3378 numempty += 1
3346 3379 numemptydelta += 1
3347 3380 elif r.issnapshot(rev):
3348 3381 addsize(size, semisize)
3349 3382 numsemi += 1
3350 3383 depth = r.snapshotdepth(rev)
3351 3384 numsnapdepth[depth] += 1
3352 3385 addsize(size, snapsizedepth[depth])
3353 3386 else:
3354 3387 addsize(size, deltasize)
3355 3388 if delta == rev - 1:
3356 3389 numprev += 1
3357 3390 if delta == p1:
3358 3391 nump1prev += 1
3359 3392 elif delta == p2:
3360 3393 nump2prev += 1
3361 3394 elif delta == p1:
3362 3395 nump1 += 1
3363 3396 elif delta == p2:
3364 3397 nump2 += 1
3365 3398 elif delta != nullrev:
3366 3399 numother += 1
3367 3400
3368 3401 # Obtain data on the raw chunks in the revlog.
3369 3402 if util.safehasattr(r, b'_getsegmentforrevs'):
3370 3403 segment = r._getsegmentforrevs(rev, rev)[1]
3371 3404 else:
3372 3405 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3373 3406 if segment:
3374 3407 chunktype = bytes(segment[0:1])
3375 3408 else:
3376 3409 chunktype = b'empty'
3377 3410
3378 3411 if chunktype not in chunktypecounts:
3379 3412 chunktypecounts[chunktype] = 0
3380 3413 chunktypesizes[chunktype] = 0
3381 3414
3382 3415 chunktypecounts[chunktype] += 1
3383 3416 chunktypesizes[chunktype] += size
3384 3417
3385 3418 # Adjust size min value for empty cases
3386 3419 for size in (datasize, fullsize, semisize, deltasize):
3387 3420 if size[0] is None:
3388 3421 size[0] = 0
3389 3422
3390 3423 numdeltas = numrevs - numfull - numempty - numsemi
3391 3424 numoprev = numprev - nump1prev - nump2prev
3392 3425 totalrawsize = datasize[2]
3393 3426 datasize[2] /= numrevs
3394 3427 fulltotal = fullsize[2]
3395 3428 if numfull == 0:
3396 3429 fullsize[2] = 0
3397 3430 else:
3398 3431 fullsize[2] /= numfull
3399 3432 semitotal = semisize[2]
3400 3433 snaptotal = {}
3401 3434 if numsemi > 0:
3402 3435 semisize[2] /= numsemi
3403 3436 for depth in snapsizedepth:
3404 3437 snaptotal[depth] = snapsizedepth[depth][2]
3405 3438 snapsizedepth[depth][2] /= numsnapdepth[depth]
3406 3439
3407 3440 deltatotal = deltasize[2]
3408 3441 if numdeltas > 0:
3409 3442 deltasize[2] /= numdeltas
3410 3443 totalsize = fulltotal + semitotal + deltatotal
3411 3444 avgchainlen = sum(chainlengths) / numrevs
3412 3445 maxchainlen = max(chainlengths)
3413 3446 maxchainspan = max(chainspans)
3414 3447 compratio = 1
3415 3448 if totalsize:
3416 3449 compratio = totalrawsize / totalsize
3417 3450
3418 3451 basedfmtstr = b'%%%dd\n'
3419 3452 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3420 3453
3421 3454 def dfmtstr(max):
3422 3455 return basedfmtstr % len(str(max))
3423 3456
3424 3457 def pcfmtstr(max, padding=0):
3425 3458 return basepcfmtstr % (len(str(max)), b' ' * padding)
3426 3459
3427 3460 def pcfmt(value, total):
3428 3461 if total:
3429 3462 return (value, 100 * float(value) / total)
3430 3463 else:
3431 3464 return value, 100.0
3432 3465
3433 3466 ui.writenoi18n(b'format : %d\n' % format)
3434 3467 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3435 3468
3436 3469 ui.write(b'\n')
3437 3470 fmt = pcfmtstr(totalsize)
3438 3471 fmt2 = dfmtstr(totalsize)
3439 3472 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3440 3473 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3441 3474 ui.writenoi18n(
3442 3475 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3443 3476 )
3444 3477 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3445 3478 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3446 3479 ui.writenoi18n(
3447 3480 b' text : '
3448 3481 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3449 3482 )
3450 3483 ui.writenoi18n(
3451 3484 b' delta : '
3452 3485 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3453 3486 )
3454 3487 ui.writenoi18n(
3455 3488 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3456 3489 )
3457 3490 for depth in sorted(numsnapdepth):
3458 3491 ui.write(
3459 3492 (b' lvl-%-3d : ' % depth)
3460 3493 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3461 3494 )
3462 3495 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3463 3496 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3464 3497 ui.writenoi18n(
3465 3498 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3466 3499 )
3467 3500 for depth in sorted(numsnapdepth):
3468 3501 ui.write(
3469 3502 (b' lvl-%-3d : ' % depth)
3470 3503 + fmt % pcfmt(snaptotal[depth], totalsize)
3471 3504 )
3472 3505 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3473 3506
3474 3507 def fmtchunktype(chunktype):
3475 3508 if chunktype == b'empty':
3476 3509 return b' %s : ' % chunktype
3477 3510 elif chunktype in pycompat.bytestr(string.ascii_letters):
3478 3511 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3479 3512 else:
3480 3513 return b' 0x%s : ' % hex(chunktype)
3481 3514
3482 3515 ui.write(b'\n')
3483 3516 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3484 3517 for chunktype in sorted(chunktypecounts):
3485 3518 ui.write(fmtchunktype(chunktype))
3486 3519 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3487 3520 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3488 3521 for chunktype in sorted(chunktypecounts):
3489 3522 ui.write(fmtchunktype(chunktype))
3490 3523 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3491 3524
3492 3525 ui.write(b'\n')
3493 3526 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3494 3527 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3495 3528 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3496 3529 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3497 3530 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3498 3531
3499 3532 if format > 0:
3500 3533 ui.write(b'\n')
3501 3534 ui.writenoi18n(
3502 3535 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3503 3536 % tuple(datasize)
3504 3537 )
3505 3538 ui.writenoi18n(
3506 3539 b'full revision size (min/max/avg) : %d / %d / %d\n'
3507 3540 % tuple(fullsize)
3508 3541 )
3509 3542 ui.writenoi18n(
3510 3543 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3511 3544 % tuple(semisize)
3512 3545 )
3513 3546 for depth in sorted(snapsizedepth):
3514 3547 if depth == 0:
3515 3548 continue
3516 3549 ui.writenoi18n(
3517 3550 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3518 3551 % ((depth,) + tuple(snapsizedepth[depth]))
3519 3552 )
3520 3553 ui.writenoi18n(
3521 3554 b'delta size (min/max/avg) : %d / %d / %d\n'
3522 3555 % tuple(deltasize)
3523 3556 )
3524 3557
3525 3558 if numdeltas > 0:
3526 3559 ui.write(b'\n')
3527 3560 fmt = pcfmtstr(numdeltas)
3528 3561 fmt2 = pcfmtstr(numdeltas, 4)
3529 3562 ui.writenoi18n(
3530 3563 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3531 3564 )
3532 3565 if numprev > 0:
3533 3566 ui.writenoi18n(
3534 3567 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3535 3568 )
3536 3569 ui.writenoi18n(
3537 3570 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3538 3571 )
3539 3572 ui.writenoi18n(
3540 3573 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3541 3574 )
3542 3575 if gdelta:
3543 3576 ui.writenoi18n(
3544 3577 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3545 3578 )
3546 3579 ui.writenoi18n(
3547 3580 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3548 3581 )
3549 3582 ui.writenoi18n(
3550 3583 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3551 3584 )
3552 3585
3553 3586
3554 3587 @command(
3555 3588 b'debugrevlogindex',
3556 3589 cmdutil.debugrevlogopts
3557 3590 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3558 3591 _(b'[-f FORMAT] -c|-m|FILE'),
3559 3592 optionalrepo=True,
3560 3593 )
3561 3594 def debugrevlogindex(ui, repo, file_=None, **opts):
3562 3595 """dump the contents of a revlog index"""
3563 3596 opts = pycompat.byteskwargs(opts)
3564 3597 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3565 3598 format = opts.get(b'format', 0)
3566 3599 if format not in (0, 1):
3567 3600 raise error.Abort(_(b"unknown format %d") % format)
3568 3601
3569 3602 if ui.debugflag:
3570 3603 shortfn = hex
3571 3604 else:
3572 3605 shortfn = short
3573 3606
3574 3607 # There might not be anything in r, so have a sane default
3575 3608 idlen = 12
3576 3609 for i in r:
3577 3610 idlen = len(shortfn(r.node(i)))
3578 3611 break
3579 3612
3580 3613 if format == 0:
3581 3614 if ui.verbose:
3582 3615 ui.writenoi18n(
3583 3616 b" rev offset length linkrev %s %s p2\n"
3584 3617 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3585 3618 )
3586 3619 else:
3587 3620 ui.writenoi18n(
3588 3621 b" rev linkrev %s %s p2\n"
3589 3622 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3590 3623 )
3591 3624 elif format == 1:
3592 3625 if ui.verbose:
3593 3626 ui.writenoi18n(
3594 3627 (
3595 3628 b" rev flag offset length size link p1"
3596 3629 b" p2 %s\n"
3597 3630 )
3598 3631 % b"nodeid".rjust(idlen)
3599 3632 )
3600 3633 else:
3601 3634 ui.writenoi18n(
3602 3635 b" rev flag size link p1 p2 %s\n"
3603 3636 % b"nodeid".rjust(idlen)
3604 3637 )
3605 3638
3606 3639 for i in r:
3607 3640 node = r.node(i)
3608 3641 if format == 0:
3609 3642 try:
3610 3643 pp = r.parents(node)
3611 3644 except Exception:
3612 3645 pp = [repo.nullid, repo.nullid]
3613 3646 if ui.verbose:
3614 3647 ui.write(
3615 3648 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3616 3649 % (
3617 3650 i,
3618 3651 r.start(i),
3619 3652 r.length(i),
3620 3653 r.linkrev(i),
3621 3654 shortfn(node),
3622 3655 shortfn(pp[0]),
3623 3656 shortfn(pp[1]),
3624 3657 )
3625 3658 )
3626 3659 else:
3627 3660 ui.write(
3628 3661 b"% 6d % 7d %s %s %s\n"
3629 3662 % (
3630 3663 i,
3631 3664 r.linkrev(i),
3632 3665 shortfn(node),
3633 3666 shortfn(pp[0]),
3634 3667 shortfn(pp[1]),
3635 3668 )
3636 3669 )
3637 3670 elif format == 1:
3638 3671 pr = r.parentrevs(i)
3639 3672 if ui.verbose:
3640 3673 ui.write(
3641 3674 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3642 3675 % (
3643 3676 i,
3644 3677 r.flags(i),
3645 3678 r.start(i),
3646 3679 r.length(i),
3647 3680 r.rawsize(i),
3648 3681 r.linkrev(i),
3649 3682 pr[0],
3650 3683 pr[1],
3651 3684 shortfn(node),
3652 3685 )
3653 3686 )
3654 3687 else:
3655 3688 ui.write(
3656 3689 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3657 3690 % (
3658 3691 i,
3659 3692 r.flags(i),
3660 3693 r.rawsize(i),
3661 3694 r.linkrev(i),
3662 3695 pr[0],
3663 3696 pr[1],
3664 3697 shortfn(node),
3665 3698 )
3666 3699 )
3667 3700
3668 3701
3669 3702 @command(
3670 3703 b'debugrevspec',
3671 3704 [
3672 3705 (
3673 3706 b'',
3674 3707 b'optimize',
3675 3708 None,
3676 3709 _(b'print parsed tree after optimizing (DEPRECATED)'),
3677 3710 ),
3678 3711 (
3679 3712 b'',
3680 3713 b'show-revs',
3681 3714 True,
3682 3715 _(b'print list of result revisions (default)'),
3683 3716 ),
3684 3717 (
3685 3718 b's',
3686 3719 b'show-set',
3687 3720 None,
3688 3721 _(b'print internal representation of result set'),
3689 3722 ),
3690 3723 (
3691 3724 b'p',
3692 3725 b'show-stage',
3693 3726 [],
3694 3727 _(b'print parsed tree at the given stage'),
3695 3728 _(b'NAME'),
3696 3729 ),
3697 3730 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3698 3731 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3699 3732 ],
3700 3733 b'REVSPEC',
3701 3734 )
3702 3735 def debugrevspec(ui, repo, expr, **opts):
3703 3736 """parse and apply a revision specification
3704 3737
3705 3738 Use -p/--show-stage option to print the parsed tree at the given stages.
3706 3739 Use -p all to print tree at every stage.
3707 3740
3708 3741 Use --no-show-revs option with -s or -p to print only the set
3709 3742 representation or the parsed tree respectively.
3710 3743
3711 3744 Use --verify-optimized to compare the optimized result with the unoptimized
3712 3745 one. Returns 1 if the optimized result differs.
3713 3746 """
3714 3747 opts = pycompat.byteskwargs(opts)
3715 3748 aliases = ui.configitems(b'revsetalias')
3716 3749 stages = [
3717 3750 (b'parsed', lambda tree: tree),
3718 3751 (
3719 3752 b'expanded',
3720 3753 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3721 3754 ),
3722 3755 (b'concatenated', revsetlang.foldconcat),
3723 3756 (b'analyzed', revsetlang.analyze),
3724 3757 (b'optimized', revsetlang.optimize),
3725 3758 ]
3726 3759 if opts[b'no_optimized']:
3727 3760 stages = stages[:-1]
3728 3761 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3729 3762 raise error.Abort(
3730 3763 _(b'cannot use --verify-optimized with --no-optimized')
3731 3764 )
3732 3765 stagenames = {n for n, f in stages}
3733 3766
3734 3767 showalways = set()
3735 3768 showchanged = set()
3736 3769 if ui.verbose and not opts[b'show_stage']:
3737 3770 # show parsed tree by --verbose (deprecated)
3738 3771 showalways.add(b'parsed')
3739 3772 showchanged.update([b'expanded', b'concatenated'])
3740 3773 if opts[b'optimize']:
3741 3774 showalways.add(b'optimized')
3742 3775 if opts[b'show_stage'] and opts[b'optimize']:
3743 3776 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3744 3777 if opts[b'show_stage'] == [b'all']:
3745 3778 showalways.update(stagenames)
3746 3779 else:
3747 3780 for n in opts[b'show_stage']:
3748 3781 if n not in stagenames:
3749 3782 raise error.Abort(_(b'invalid stage name: %s') % n)
3750 3783 showalways.update(opts[b'show_stage'])
3751 3784
3752 3785 treebystage = {}
3753 3786 printedtree = None
3754 3787 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3755 3788 for n, f in stages:
3756 3789 treebystage[n] = tree = f(tree)
3757 3790 if n in showalways or (n in showchanged and tree != printedtree):
3758 3791 if opts[b'show_stage'] or n != b'parsed':
3759 3792 ui.write(b"* %s:\n" % n)
3760 3793 ui.write(revsetlang.prettyformat(tree), b"\n")
3761 3794 printedtree = tree
3762 3795
3763 3796 if opts[b'verify_optimized']:
3764 3797 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3765 3798 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3766 3799 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3767 3800 ui.writenoi18n(
3768 3801 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3769 3802 )
3770 3803 ui.writenoi18n(
3771 3804 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3772 3805 )
3773 3806 arevs = list(arevs)
3774 3807 brevs = list(brevs)
3775 3808 if arevs == brevs:
3776 3809 return 0
3777 3810 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3778 3811 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3779 3812 sm = difflib.SequenceMatcher(None, arevs, brevs)
3780 3813 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3781 3814 if tag in ('delete', 'replace'):
3782 3815 for c in arevs[alo:ahi]:
3783 3816 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3784 3817 if tag in ('insert', 'replace'):
3785 3818 for c in brevs[blo:bhi]:
3786 3819 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3787 3820 if tag == 'equal':
3788 3821 for c in arevs[alo:ahi]:
3789 3822 ui.write(b' %d\n' % c)
3790 3823 return 1
3791 3824
3792 3825 func = revset.makematcher(tree)
3793 3826 revs = func(repo)
3794 3827 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3795 3828 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3796 3829 if not opts[b'show_revs']:
3797 3830 return
3798 3831 for c in revs:
3799 3832 ui.write(b"%d\n" % c)
3800 3833
3801 3834
3802 3835 @command(
3803 3836 b'debugserve',
3804 3837 [
3805 3838 (
3806 3839 b'',
3807 3840 b'sshstdio',
3808 3841 False,
3809 3842 _(b'run an SSH server bound to process handles'),
3810 3843 ),
3811 3844 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3812 3845 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3813 3846 ],
3814 3847 b'',
3815 3848 )
3816 3849 def debugserve(ui, repo, **opts):
3817 3850 """run a server with advanced settings
3818 3851
3819 3852 This command is similar to :hg:`serve`. It exists partially as a
3820 3853 workaround to the fact that ``hg serve --stdio`` must have specific
3821 3854 arguments for security reasons.
3822 3855 """
3823 3856 opts = pycompat.byteskwargs(opts)
3824 3857
3825 3858 if not opts[b'sshstdio']:
3826 3859 raise error.Abort(_(b'only --sshstdio is currently supported'))
3827 3860
3828 3861 logfh = None
3829 3862
3830 3863 if opts[b'logiofd'] and opts[b'logiofile']:
3831 3864 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3832 3865
3833 3866 if opts[b'logiofd']:
3834 3867 # Ideally we would be line buffered. But line buffering in binary
3835 3868 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3836 3869 # buffering could have performance impacts. But since this isn't
3837 3870 # performance critical code, it should be fine.
3838 3871 try:
3839 3872 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3840 3873 except OSError as e:
3841 3874 if e.errno != errno.ESPIPE:
3842 3875 raise
3843 3876 # can't seek a pipe, so `ab` mode fails on py3
3844 3877 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3845 3878 elif opts[b'logiofile']:
3846 3879 logfh = open(opts[b'logiofile'], b'ab', 0)
3847 3880
3848 3881 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3849 3882 s.serve_forever()
3850 3883
3851 3884
3852 3885 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3853 3886 def debugsetparents(ui, repo, rev1, rev2=None):
3854 3887 """manually set the parents of the current working directory (DANGEROUS)
3855 3888
3856 3889 This command is not what you are looking for and should not be used. Using
3857 3890 this command will most certainly results in slight corruption of the file
3858 3891 level histories withing your repository. DO NOT USE THIS COMMAND.
3859 3892
3860 3893 The command update the p1 and p2 field in the dirstate, and not touching
3861 3894 anything else. This useful for writing repository conversion tools, but
3862 3895 should be used with extreme care. For example, neither the working
3863 3896 directory nor the dirstate is updated, so file status may be incorrect
3864 3897 after running this command. Only used if you are one of the few people that
3865 3898 deeply unstand both conversion tools and file level histories. If you are
3866 3899 reading this help, you are not one of this people (most of them sailed west
3867 3900 from Mithlond anyway.
3868 3901
3869 3902 So one last time DO NOT USE THIS COMMAND.
3870 3903
3871 3904 Returns 0 on success.
3872 3905 """
3873 3906
3874 3907 node1 = scmutil.revsingle(repo, rev1).node()
3875 3908 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3876 3909
3877 3910 with repo.wlock():
3878 3911 repo.setparents(node1, node2)
3879 3912
3880 3913
3881 3914 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3882 3915 def debugsidedata(ui, repo, file_, rev=None, **opts):
3883 3916 """dump the side data for a cl/manifest/file revision
3884 3917
3885 3918 Use --verbose to dump the sidedata content."""
3886 3919 opts = pycompat.byteskwargs(opts)
3887 3920 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3888 3921 if rev is not None:
3889 3922 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3890 3923 file_, rev = None, file_
3891 3924 elif rev is None:
3892 3925 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3893 3926 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3894 3927 r = getattr(r, '_revlog', r)
3895 3928 try:
3896 3929 sidedata = r.sidedata(r.lookup(rev))
3897 3930 except KeyError:
3898 3931 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3899 3932 if sidedata:
3900 3933 sidedata = list(sidedata.items())
3901 3934 sidedata.sort()
3902 3935 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3903 3936 for key, value in sidedata:
3904 3937 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3905 3938 if ui.verbose:
3906 3939 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3907 3940
3908 3941
3909 3942 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3910 3943 def debugssl(ui, repo, source=None, **opts):
3911 3944 """test a secure connection to a server
3912 3945
3913 3946 This builds the certificate chain for the server on Windows, installing the
3914 3947 missing intermediates and trusted root via Windows Update if necessary. It
3915 3948 does nothing on other platforms.
3916 3949
3917 3950 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3918 3951 that server is used. See :hg:`help urls` for more information.
3919 3952
3920 3953 If the update succeeds, retry the original operation. Otherwise, the cause
3921 3954 of the SSL error is likely another issue.
3922 3955 """
3923 3956 if not pycompat.iswindows:
3924 3957 raise error.Abort(
3925 3958 _(b'certificate chain building is only possible on Windows')
3926 3959 )
3927 3960
3928 3961 if not source:
3929 3962 if not repo:
3930 3963 raise error.Abort(
3931 3964 _(
3932 3965 b"there is no Mercurial repository here, and no "
3933 3966 b"server specified"
3934 3967 )
3935 3968 )
3936 3969 source = b"default"
3937 3970
3938 3971 source, branches = urlutil.get_unique_pull_path(
3939 3972 b'debugssl', repo, ui, source
3940 3973 )
3941 3974 url = urlutil.url(source)
3942 3975
3943 3976 defaultport = {b'https': 443, b'ssh': 22}
3944 3977 if url.scheme in defaultport:
3945 3978 try:
3946 3979 addr = (url.host, int(url.port or defaultport[url.scheme]))
3947 3980 except ValueError:
3948 3981 raise error.Abort(_(b"malformed port number in URL"))
3949 3982 else:
3950 3983 raise error.Abort(_(b"only https and ssh connections are supported"))
3951 3984
3952 3985 from . import win32
3953 3986
3954 3987 s = ssl.wrap_socket(
3955 3988 socket.socket(),
3956 3989 ssl_version=ssl.PROTOCOL_TLS,
3957 3990 cert_reqs=ssl.CERT_NONE,
3958 3991 ca_certs=None,
3959 3992 )
3960 3993
3961 3994 try:
3962 3995 s.connect(addr)
3963 3996 cert = s.getpeercert(True)
3964 3997
3965 3998 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3966 3999
3967 4000 complete = win32.checkcertificatechain(cert, build=False)
3968 4001
3969 4002 if not complete:
3970 4003 ui.status(_(b'certificate chain is incomplete, updating... '))
3971 4004
3972 4005 if not win32.checkcertificatechain(cert):
3973 4006 ui.status(_(b'failed.\n'))
3974 4007 else:
3975 4008 ui.status(_(b'done.\n'))
3976 4009 else:
3977 4010 ui.status(_(b'full certificate chain is available\n'))
3978 4011 finally:
3979 4012 s.close()
3980 4013
3981 4014
3982 4015 @command(
3983 4016 b"debugbackupbundle",
3984 4017 [
3985 4018 (
3986 4019 b"",
3987 4020 b"recover",
3988 4021 b"",
3989 4022 b"brings the specified changeset back into the repository",
3990 4023 )
3991 4024 ]
3992 4025 + cmdutil.logopts,
3993 4026 _(b"hg debugbackupbundle [--recover HASH]"),
3994 4027 )
3995 4028 def debugbackupbundle(ui, repo, *pats, **opts):
3996 4029 """lists the changesets available in backup bundles
3997 4030
3998 4031 Without any arguments, this command prints a list of the changesets in each
3999 4032 backup bundle.
4000 4033
4001 4034 --recover takes a changeset hash and unbundles the first bundle that
4002 4035 contains that hash, which puts that changeset back in your repository.
4003 4036
4004 4037 --verbose will print the entire commit message and the bundle path for that
4005 4038 backup.
4006 4039 """
4007 4040 backups = list(
4008 4041 filter(
4009 4042 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
4010 4043 )
4011 4044 )
4012 4045 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
4013 4046
4014 4047 opts = pycompat.byteskwargs(opts)
4015 4048 opts[b"bundle"] = b""
4016 4049 opts[b"force"] = None
4017 4050 limit = logcmdutil.getlimit(opts)
4018 4051
4019 4052 def display(other, chlist, displayer):
4020 4053 if opts.get(b"newest_first"):
4021 4054 chlist.reverse()
4022 4055 count = 0
4023 4056 for n in chlist:
4024 4057 if limit is not None and count >= limit:
4025 4058 break
4026 4059 parents = [
4027 4060 True for p in other.changelog.parents(n) if p != repo.nullid
4028 4061 ]
4029 4062 if opts.get(b"no_merges") and len(parents) == 2:
4030 4063 continue
4031 4064 count += 1
4032 4065 displayer.show(other[n])
4033 4066
4034 4067 recovernode = opts.get(b"recover")
4035 4068 if recovernode:
4036 4069 if scmutil.isrevsymbol(repo, recovernode):
4037 4070 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
4038 4071 return
4039 4072 elif backups:
4040 4073 msg = _(
4041 4074 b"Recover changesets using: hg debugbackupbundle --recover "
4042 4075 b"<changeset hash>\n\nAvailable backup changesets:"
4043 4076 )
4044 4077 ui.status(msg, label=b"status.removed")
4045 4078 else:
4046 4079 ui.status(_(b"no backup changesets found\n"))
4047 4080 return
4048 4081
4049 4082 for backup in backups:
4050 4083 # Much of this is copied from the hg incoming logic
4051 4084 source = os.path.relpath(backup, encoding.getcwd())
4052 4085 source, branches = urlutil.get_unique_pull_path(
4053 4086 b'debugbackupbundle',
4054 4087 repo,
4055 4088 ui,
4056 4089 source,
4057 4090 default_branches=opts.get(b'branch'),
4058 4091 )
4059 4092 try:
4060 4093 other = hg.peer(repo, opts, source)
4061 4094 except error.LookupError as ex:
4062 4095 msg = _(b"\nwarning: unable to open bundle %s") % source
4063 4096 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
4064 4097 ui.warn(msg, hint=hint)
4065 4098 continue
4066 4099 revs, checkout = hg.addbranchrevs(
4067 4100 repo, other, branches, opts.get(b"rev")
4068 4101 )
4069 4102
4070 4103 if revs:
4071 4104 revs = [other.lookup(rev) for rev in revs]
4072 4105
4073 4106 with ui.silent():
4074 4107 try:
4075 4108 other, chlist, cleanupfn = bundlerepo.getremotechanges(
4076 4109 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
4077 4110 )
4078 4111 except error.LookupError:
4079 4112 continue
4080 4113
4081 4114 try:
4082 4115 if not chlist:
4083 4116 continue
4084 4117 if recovernode:
4085 4118 with repo.lock(), repo.transaction(b"unbundle") as tr:
4086 4119 if scmutil.isrevsymbol(other, recovernode):
4087 4120 ui.status(_(b"Unbundling %s\n") % (recovernode))
4088 4121 f = hg.openpath(ui, source)
4089 4122 gen = exchange.readbundle(ui, f, source)
4090 4123 if isinstance(gen, bundle2.unbundle20):
4091 4124 bundle2.applybundle(
4092 4125 repo,
4093 4126 gen,
4094 4127 tr,
4095 4128 source=b"unbundle",
4096 4129 url=b"bundle:" + source,
4097 4130 )
4098 4131 else:
4099 4132 gen.apply(repo, b"unbundle", b"bundle:" + source)
4100 4133 break
4101 4134 else:
4102 4135 backupdate = encoding.strtolocal(
4103 4136 time.strftime(
4104 4137 "%a %H:%M, %Y-%m-%d",
4105 4138 time.localtime(os.path.getmtime(source)),
4106 4139 )
4107 4140 )
4108 4141 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
4109 4142 if ui.verbose:
4110 4143 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
4111 4144 else:
4112 4145 opts[
4113 4146 b"template"
4114 4147 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
4115 4148 displayer = logcmdutil.changesetdisplayer(
4116 4149 ui, other, opts, False
4117 4150 )
4118 4151 display(other, chlist, displayer)
4119 4152 displayer.close()
4120 4153 finally:
4121 4154 cleanupfn()
4122 4155
4123 4156
4124 4157 @command(
4125 4158 b'debugsub',
4126 4159 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4127 4160 _(b'[-r REV] [REV]'),
4128 4161 )
4129 4162 def debugsub(ui, repo, rev=None):
4130 4163 ctx = scmutil.revsingle(repo, rev, None)
4131 4164 for k, v in sorted(ctx.substate.items()):
4132 4165 ui.writenoi18n(b'path %s\n' % k)
4133 4166 ui.writenoi18n(b' source %s\n' % v[0])
4134 4167 ui.writenoi18n(b' revision %s\n' % v[1])
4135 4168
4136 4169
4137 4170 @command(b'debugshell', optionalrepo=True)
4138 4171 def debugshell(ui, repo):
4139 4172 """run an interactive Python interpreter
4140 4173
4141 4174 The local namespace is provided with a reference to the ui and
4142 4175 the repo instance (if available).
4143 4176 """
4144 4177 import code
4145 4178
4146 4179 imported_objects = {
4147 4180 'ui': ui,
4148 4181 'repo': repo,
4149 4182 }
4150 4183
4151 4184 code.interact(local=imported_objects)
4152 4185
4153 4186
4154 4187 @command(
4155 4188 b'debugsuccessorssets',
4156 4189 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4157 4190 _(b'[REV]'),
4158 4191 )
4159 4192 def debugsuccessorssets(ui, repo, *revs, **opts):
4160 4193 """show set of successors for revision
4161 4194
4162 4195 A successors set of changeset A is a consistent group of revisions that
4163 4196 succeed A. It contains non-obsolete changesets only unless closests
4164 4197 successors set is set.
4165 4198
4166 4199 In most cases a changeset A has a single successors set containing a single
4167 4200 successor (changeset A replaced by A').
4168 4201
4169 4202 A changeset that is made obsolete with no successors are called "pruned".
4170 4203 Such changesets have no successors sets at all.
4171 4204
4172 4205 A changeset that has been "split" will have a successors set containing
4173 4206 more than one successor.
4174 4207
4175 4208 A changeset that has been rewritten in multiple different ways is called
4176 4209 "divergent". Such changesets have multiple successor sets (each of which
4177 4210 may also be split, i.e. have multiple successors).
4178 4211
4179 4212 Results are displayed as follows::
4180 4213
4181 4214 <rev1>
4182 4215 <successors-1A>
4183 4216 <rev2>
4184 4217 <successors-2A>
4185 4218 <successors-2B1> <successors-2B2> <successors-2B3>
4186 4219
4187 4220 Here rev2 has two possible (i.e. divergent) successors sets. The first
4188 4221 holds one element, whereas the second holds three (i.e. the changeset has
4189 4222 been split).
4190 4223 """
4191 4224 # passed to successorssets caching computation from one call to another
4192 4225 cache = {}
4193 4226 ctx2str = bytes
4194 4227 node2str = short
4195 4228 for rev in logcmdutil.revrange(repo, revs):
4196 4229 ctx = repo[rev]
4197 4230 ui.write(b'%s\n' % ctx2str(ctx))
4198 4231 for succsset in obsutil.successorssets(
4199 4232 repo, ctx.node(), closest=opts['closest'], cache=cache
4200 4233 ):
4201 4234 if succsset:
4202 4235 ui.write(b' ')
4203 4236 ui.write(node2str(succsset[0]))
4204 4237 for node in succsset[1:]:
4205 4238 ui.write(b' ')
4206 4239 ui.write(node2str(node))
4207 4240 ui.write(b'\n')
4208 4241
4209 4242
4210 4243 @command(b'debugtagscache', [])
4211 4244 def debugtagscache(ui, repo):
4212 4245 """display the contents of .hg/cache/hgtagsfnodes1"""
4213 4246 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4214 4247 flog = repo.file(b'.hgtags')
4215 4248 for r in repo:
4216 4249 node = repo[r].node()
4217 4250 tagsnode = cache.getfnode(node, computemissing=False)
4218 4251 if tagsnode:
4219 4252 tagsnodedisplay = hex(tagsnode)
4220 4253 if not flog.hasnode(tagsnode):
4221 4254 tagsnodedisplay += b' (unknown node)'
4222 4255 elif tagsnode is None:
4223 4256 tagsnodedisplay = b'missing'
4224 4257 else:
4225 4258 tagsnodedisplay = b'invalid'
4226 4259
4227 4260 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4228 4261
4229 4262
4230 4263 @command(
4231 4264 b'debugtemplate',
4232 4265 [
4233 4266 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4234 4267 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4235 4268 ],
4236 4269 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4237 4270 optionalrepo=True,
4238 4271 )
4239 4272 def debugtemplate(ui, repo, tmpl, **opts):
4240 4273 """parse and apply a template
4241 4274
4242 4275 If -r/--rev is given, the template is processed as a log template and
4243 4276 applied to the given changesets. Otherwise, it is processed as a generic
4244 4277 template.
4245 4278
4246 4279 Use --verbose to print the parsed tree.
4247 4280 """
4248 4281 revs = None
4249 4282 if opts['rev']:
4250 4283 if repo is None:
4251 4284 raise error.RepoError(
4252 4285 _(b'there is no Mercurial repository here (.hg not found)')
4253 4286 )
4254 4287 revs = logcmdutil.revrange(repo, opts['rev'])
4255 4288
4256 4289 props = {}
4257 4290 for d in opts['define']:
4258 4291 try:
4259 4292 k, v = (e.strip() for e in d.split(b'=', 1))
4260 4293 if not k or k == b'ui':
4261 4294 raise ValueError
4262 4295 props[k] = v
4263 4296 except ValueError:
4264 4297 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4265 4298
4266 4299 if ui.verbose:
4267 4300 aliases = ui.configitems(b'templatealias')
4268 4301 tree = templater.parse(tmpl)
4269 4302 ui.note(templater.prettyformat(tree), b'\n')
4270 4303 newtree = templater.expandaliases(tree, aliases)
4271 4304 if newtree != tree:
4272 4305 ui.notenoi18n(
4273 4306 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4274 4307 )
4275 4308
4276 4309 if revs is None:
4277 4310 tres = formatter.templateresources(ui, repo)
4278 4311 t = formatter.maketemplater(ui, tmpl, resources=tres)
4279 4312 if ui.verbose:
4280 4313 kwds, funcs = t.symbolsuseddefault()
4281 4314 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4282 4315 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4283 4316 ui.write(t.renderdefault(props))
4284 4317 else:
4285 4318 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4286 4319 if ui.verbose:
4287 4320 kwds, funcs = displayer.t.symbolsuseddefault()
4288 4321 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4289 4322 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4290 4323 for r in revs:
4291 4324 displayer.show(repo[r], **pycompat.strkwargs(props))
4292 4325 displayer.close()
4293 4326
4294 4327
4295 4328 @command(
4296 4329 b'debuguigetpass',
4297 4330 [
4298 4331 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4299 4332 ],
4300 4333 _(b'[-p TEXT]'),
4301 4334 norepo=True,
4302 4335 )
4303 4336 def debuguigetpass(ui, prompt=b''):
4304 4337 """show prompt to type password"""
4305 4338 r = ui.getpass(prompt)
4306 4339 if r is None:
4307 4340 r = b"<default response>"
4308 4341 ui.writenoi18n(b'response: %s\n' % r)
4309 4342
4310 4343
4311 4344 @command(
4312 4345 b'debuguiprompt',
4313 4346 [
4314 4347 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4315 4348 ],
4316 4349 _(b'[-p TEXT]'),
4317 4350 norepo=True,
4318 4351 )
4319 4352 def debuguiprompt(ui, prompt=b''):
4320 4353 """show plain prompt"""
4321 4354 r = ui.prompt(prompt)
4322 4355 ui.writenoi18n(b'response: %s\n' % r)
4323 4356
4324 4357
4325 4358 @command(b'debugupdatecaches', [])
4326 4359 def debugupdatecaches(ui, repo, *pats, **opts):
4327 4360 """warm all known caches in the repository"""
4328 4361 with repo.wlock(), repo.lock():
4329 4362 repo.updatecaches(caches=repository.CACHES_ALL)
4330 4363
4331 4364
4332 4365 @command(
4333 4366 b'debugupgraderepo',
4334 4367 [
4335 4368 (
4336 4369 b'o',
4337 4370 b'optimize',
4338 4371 [],
4339 4372 _(b'extra optimization to perform'),
4340 4373 _(b'NAME'),
4341 4374 ),
4342 4375 (b'', b'run', False, _(b'performs an upgrade')),
4343 4376 (b'', b'backup', True, _(b'keep the old repository content around')),
4344 4377 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4345 4378 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4346 4379 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4347 4380 ],
4348 4381 )
4349 4382 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4350 4383 """upgrade a repository to use different features
4351 4384
4352 4385 If no arguments are specified, the repository is evaluated for upgrade
4353 4386 and a list of problems and potential optimizations is printed.
4354 4387
4355 4388 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4356 4389 can be influenced via additional arguments. More details will be provided
4357 4390 by the command output when run without ``--run``.
4358 4391
4359 4392 During the upgrade, the repository will be locked and no writes will be
4360 4393 allowed.
4361 4394
4362 4395 At the end of the upgrade, the repository may not be readable while new
4363 4396 repository data is swapped in. This window will be as long as it takes to
4364 4397 rename some directories inside the ``.hg`` directory. On most machines, this
4365 4398 should complete almost instantaneously and the chances of a consumer being
4366 4399 unable to access the repository should be low.
4367 4400
4368 4401 By default, all revlogs will be upgraded. You can restrict this using flags
4369 4402 such as `--manifest`:
4370 4403
4371 4404 * `--manifest`: only optimize the manifest
4372 4405 * `--no-manifest`: optimize all revlog but the manifest
4373 4406 * `--changelog`: optimize the changelog only
4374 4407 * `--no-changelog --no-manifest`: optimize filelogs only
4375 4408 * `--filelogs`: optimize the filelogs only
4376 4409 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4377 4410 """
4378 4411 return upgrade.upgraderepo(
4379 4412 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4380 4413 )
4381 4414
4382 4415
4383 4416 @command(
4384 4417 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4385 4418 )
4386 4419 def debugwalk(ui, repo, *pats, **opts):
4387 4420 """show how files match on given patterns"""
4388 4421 opts = pycompat.byteskwargs(opts)
4389 4422 m = scmutil.match(repo[None], pats, opts)
4390 4423 if ui.verbose:
4391 4424 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4392 4425 items = list(repo[None].walk(m))
4393 4426 if not items:
4394 4427 return
4395 4428 f = lambda fn: fn
4396 4429 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4397 4430 f = lambda fn: util.normpath(fn)
4398 4431 fmt = b'f %%-%ds %%-%ds %%s' % (
4399 4432 max([len(abs) for abs in items]),
4400 4433 max([len(repo.pathto(abs)) for abs in items]),
4401 4434 )
4402 4435 for abs in items:
4403 4436 line = fmt % (
4404 4437 abs,
4405 4438 f(repo.pathto(abs)),
4406 4439 m.exact(abs) and b'exact' or b'',
4407 4440 )
4408 4441 ui.write(b"%s\n" % line.rstrip())
4409 4442
4410 4443
4411 4444 @command(b'debugwhyunstable', [], _(b'REV'))
4412 4445 def debugwhyunstable(ui, repo, rev):
4413 4446 """explain instabilities of a changeset"""
4414 4447 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4415 4448 dnodes = b''
4416 4449 if entry.get(b'divergentnodes'):
4417 4450 dnodes = (
4418 4451 b' '.join(
4419 4452 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4420 4453 for ctx in entry[b'divergentnodes']
4421 4454 )
4422 4455 + b' '
4423 4456 )
4424 4457 ui.write(
4425 4458 b'%s: %s%s %s\n'
4426 4459 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4427 4460 )
4428 4461
4429 4462
4430 4463 @command(
4431 4464 b'debugwireargs',
4432 4465 [
4433 4466 (b'', b'three', b'', b'three'),
4434 4467 (b'', b'four', b'', b'four'),
4435 4468 (b'', b'five', b'', b'five'),
4436 4469 ]
4437 4470 + cmdutil.remoteopts,
4438 4471 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4439 4472 norepo=True,
4440 4473 )
4441 4474 def debugwireargs(ui, repopath, *vals, **opts):
4442 4475 opts = pycompat.byteskwargs(opts)
4443 4476 repo = hg.peer(ui, opts, repopath)
4444 4477 try:
4445 4478 for opt in cmdutil.remoteopts:
4446 4479 del opts[opt[1]]
4447 4480 args = {}
4448 4481 for k, v in opts.items():
4449 4482 if v:
4450 4483 args[k] = v
4451 4484 args = pycompat.strkwargs(args)
4452 4485 # run twice to check that we don't mess up the stream for the next command
4453 4486 res1 = repo.debugwireargs(*vals, **args)
4454 4487 res2 = repo.debugwireargs(*vals, **args)
4455 4488 ui.write(b"%s\n" % res1)
4456 4489 if res1 != res2:
4457 4490 ui.warn(b"%s\n" % res2)
4458 4491 finally:
4459 4492 repo.close()
4460 4493
4461 4494
4462 4495 def _parsewirelangblocks(fh):
4463 4496 activeaction = None
4464 4497 blocklines = []
4465 4498 lastindent = 0
4466 4499
4467 4500 for line in fh:
4468 4501 line = line.rstrip()
4469 4502 if not line:
4470 4503 continue
4471 4504
4472 4505 if line.startswith(b'#'):
4473 4506 continue
4474 4507
4475 4508 if not line.startswith(b' '):
4476 4509 # New block. Flush previous one.
4477 4510 if activeaction:
4478 4511 yield activeaction, blocklines
4479 4512
4480 4513 activeaction = line
4481 4514 blocklines = []
4482 4515 lastindent = 0
4483 4516 continue
4484 4517
4485 4518 # Else we start with an indent.
4486 4519
4487 4520 if not activeaction:
4488 4521 raise error.Abort(_(b'indented line outside of block'))
4489 4522
4490 4523 indent = len(line) - len(line.lstrip())
4491 4524
4492 4525 # If this line is indented more than the last line, concatenate it.
4493 4526 if indent > lastindent and blocklines:
4494 4527 blocklines[-1] += line.lstrip()
4495 4528 else:
4496 4529 blocklines.append(line)
4497 4530 lastindent = indent
4498 4531
4499 4532 # Flush last block.
4500 4533 if activeaction:
4501 4534 yield activeaction, blocklines
4502 4535
4503 4536
4504 4537 @command(
4505 4538 b'debugwireproto',
4506 4539 [
4507 4540 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4508 4541 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4509 4542 (
4510 4543 b'',
4511 4544 b'noreadstderr',
4512 4545 False,
4513 4546 _(b'do not read from stderr of the remote'),
4514 4547 ),
4515 4548 (
4516 4549 b'',
4517 4550 b'nologhandshake',
4518 4551 False,
4519 4552 _(b'do not log I/O related to the peer handshake'),
4520 4553 ),
4521 4554 ]
4522 4555 + cmdutil.remoteopts,
4523 4556 _(b'[PATH]'),
4524 4557 optionalrepo=True,
4525 4558 )
4526 4559 def debugwireproto(ui, repo, path=None, **opts):
4527 4560 """send wire protocol commands to a server
4528 4561
4529 4562 This command can be used to issue wire protocol commands to remote
4530 4563 peers and to debug the raw data being exchanged.
4531 4564
4532 4565 ``--localssh`` will start an SSH server against the current repository
4533 4566 and connect to that. By default, the connection will perform a handshake
4534 4567 and establish an appropriate peer instance.
4535 4568
4536 4569 ``--peer`` can be used to bypass the handshake protocol and construct a
4537 4570 peer instance using the specified class type. Valid values are ``raw``,
4538 4571 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4539 4572 don't support higher-level command actions.
4540 4573
4541 4574 ``--noreadstderr`` can be used to disable automatic reading from stderr
4542 4575 of the peer (for SSH connections only). Disabling automatic reading of
4543 4576 stderr is useful for making output more deterministic.
4544 4577
4545 4578 Commands are issued via a mini language which is specified via stdin.
4546 4579 The language consists of individual actions to perform. An action is
4547 4580 defined by a block. A block is defined as a line with no leading
4548 4581 space followed by 0 or more lines with leading space. Blocks are
4549 4582 effectively a high-level command with additional metadata.
4550 4583
4551 4584 Lines beginning with ``#`` are ignored.
4552 4585
4553 4586 The following sections denote available actions.
4554 4587
4555 4588 raw
4556 4589 ---
4557 4590
4558 4591 Send raw data to the server.
4559 4592
4560 4593 The block payload contains the raw data to send as one atomic send
4561 4594 operation. The data may not actually be delivered in a single system
4562 4595 call: it depends on the abilities of the transport being used.
4563 4596
4564 4597 Each line in the block is de-indented and concatenated. Then, that
4565 4598 value is evaluated as a Python b'' literal. This allows the use of
4566 4599 backslash escaping, etc.
4567 4600
4568 4601 raw+
4569 4602 ----
4570 4603
4571 4604 Behaves like ``raw`` except flushes output afterwards.
4572 4605
4573 4606 command <X>
4574 4607 -----------
4575 4608
4576 4609 Send a request to run a named command, whose name follows the ``command``
4577 4610 string.
4578 4611
4579 4612 Arguments to the command are defined as lines in this block. The format of
4580 4613 each line is ``<key> <value>``. e.g.::
4581 4614
4582 4615 command listkeys
4583 4616 namespace bookmarks
4584 4617
4585 4618 If the value begins with ``eval:``, it will be interpreted as a Python
4586 4619 literal expression. Otherwise values are interpreted as Python b'' literals.
4587 4620 This allows sending complex types and encoding special byte sequences via
4588 4621 backslash escaping.
4589 4622
4590 4623 The following arguments have special meaning:
4591 4624
4592 4625 ``PUSHFILE``
4593 4626 When defined, the *push* mechanism of the peer will be used instead
4594 4627 of the static request-response mechanism and the content of the
4595 4628 file specified in the value of this argument will be sent as the
4596 4629 command payload.
4597 4630
4598 4631 This can be used to submit a local bundle file to the remote.
4599 4632
4600 4633 batchbegin
4601 4634 ----------
4602 4635
4603 4636 Instruct the peer to begin a batched send.
4604 4637
4605 4638 All ``command`` blocks are queued for execution until the next
4606 4639 ``batchsubmit`` block.
4607 4640
4608 4641 batchsubmit
4609 4642 -----------
4610 4643
4611 4644 Submit previously queued ``command`` blocks as a batch request.
4612 4645
4613 4646 This action MUST be paired with a ``batchbegin`` action.
4614 4647
4615 4648 httprequest <method> <path>
4616 4649 ---------------------------
4617 4650
4618 4651 (HTTP peer only)
4619 4652
4620 4653 Send an HTTP request to the peer.
4621 4654
4622 4655 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4623 4656
4624 4657 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4625 4658 headers to add to the request. e.g. ``Accept: foo``.
4626 4659
4627 4660 The following arguments are special:
4628 4661
4629 4662 ``BODYFILE``
4630 4663 The content of the file defined as the value to this argument will be
4631 4664 transferred verbatim as the HTTP request body.
4632 4665
4633 4666 ``frame <type> <flags> <payload>``
4634 4667 Send a unified protocol frame as part of the request body.
4635 4668
4636 4669 All frames will be collected and sent as the body to the HTTP
4637 4670 request.
4638 4671
4639 4672 close
4640 4673 -----
4641 4674
4642 4675 Close the connection to the server.
4643 4676
4644 4677 flush
4645 4678 -----
4646 4679
4647 4680 Flush data written to the server.
4648 4681
4649 4682 readavailable
4650 4683 -------------
4651 4684
4652 4685 Close the write end of the connection and read all available data from
4653 4686 the server.
4654 4687
4655 4688 If the connection to the server encompasses multiple pipes, we poll both
4656 4689 pipes and read available data.
4657 4690
4658 4691 readline
4659 4692 --------
4660 4693
4661 4694 Read a line of output from the server. If there are multiple output
4662 4695 pipes, reads only the main pipe.
4663 4696
4664 4697 ereadline
4665 4698 ---------
4666 4699
4667 4700 Like ``readline``, but read from the stderr pipe, if available.
4668 4701
4669 4702 read <X>
4670 4703 --------
4671 4704
4672 4705 ``read()`` N bytes from the server's main output pipe.
4673 4706
4674 4707 eread <X>
4675 4708 ---------
4676 4709
4677 4710 ``read()`` N bytes from the server's stderr pipe, if available.
4678 4711
4679 4712 Specifying Unified Frame-Based Protocol Frames
4680 4713 ----------------------------------------------
4681 4714
4682 4715 It is possible to emit a *Unified Frame-Based Protocol* by using special
4683 4716 syntax.
4684 4717
4685 4718 A frame is composed as a type, flags, and payload. These can be parsed
4686 4719 from a string of the form:
4687 4720
4688 4721 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4689 4722
4690 4723 ``request-id`` and ``stream-id`` are integers defining the request and
4691 4724 stream identifiers.
4692 4725
4693 4726 ``type`` can be an integer value for the frame type or the string name
4694 4727 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4695 4728 ``command-name``.
4696 4729
4697 4730 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4698 4731 components. Each component (and there can be just one) can be an integer
4699 4732 or a flag name for stream flags or frame flags, respectively. Values are
4700 4733 resolved to integers and then bitwise OR'd together.
4701 4734
4702 4735 ``payload`` represents the raw frame payload. If it begins with
4703 4736 ``cbor:``, the following string is evaluated as Python code and the
4704 4737 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4705 4738 as a Python byte string literal.
4706 4739 """
4707 4740 opts = pycompat.byteskwargs(opts)
4708 4741
4709 4742 if opts[b'localssh'] and not repo:
4710 4743 raise error.Abort(_(b'--localssh requires a repository'))
4711 4744
4712 4745 if opts[b'peer'] and opts[b'peer'] not in (
4713 4746 b'raw',
4714 4747 b'ssh1',
4715 4748 ):
4716 4749 raise error.Abort(
4717 4750 _(b'invalid value for --peer'),
4718 4751 hint=_(b'valid values are "raw" and "ssh1"'),
4719 4752 )
4720 4753
4721 4754 if path and opts[b'localssh']:
4722 4755 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4723 4756
4724 4757 if ui.interactive():
4725 4758 ui.write(_(b'(waiting for commands on stdin)\n'))
4726 4759
4727 4760 blocks = list(_parsewirelangblocks(ui.fin))
4728 4761
4729 4762 proc = None
4730 4763 stdin = None
4731 4764 stdout = None
4732 4765 stderr = None
4733 4766 opener = None
4734 4767
4735 4768 if opts[b'localssh']:
4736 4769 # We start the SSH server in its own process so there is process
4737 4770 # separation. This prevents a whole class of potential bugs around
4738 4771 # shared state from interfering with server operation.
4739 4772 args = procutil.hgcmd() + [
4740 4773 b'-R',
4741 4774 repo.root,
4742 4775 b'debugserve',
4743 4776 b'--sshstdio',
4744 4777 ]
4745 4778 proc = subprocess.Popen(
4746 4779 pycompat.rapply(procutil.tonativestr, args),
4747 4780 stdin=subprocess.PIPE,
4748 4781 stdout=subprocess.PIPE,
4749 4782 stderr=subprocess.PIPE,
4750 4783 bufsize=0,
4751 4784 )
4752 4785
4753 4786 stdin = proc.stdin
4754 4787 stdout = proc.stdout
4755 4788 stderr = proc.stderr
4756 4789
4757 4790 # We turn the pipes into observers so we can log I/O.
4758 4791 if ui.verbose or opts[b'peer'] == b'raw':
4759 4792 stdin = util.makeloggingfileobject(
4760 4793 ui, proc.stdin, b'i', logdata=True
4761 4794 )
4762 4795 stdout = util.makeloggingfileobject(
4763 4796 ui, proc.stdout, b'o', logdata=True
4764 4797 )
4765 4798 stderr = util.makeloggingfileobject(
4766 4799 ui, proc.stderr, b'e', logdata=True
4767 4800 )
4768 4801
4769 4802 # --localssh also implies the peer connection settings.
4770 4803
4771 4804 url = b'ssh://localserver'
4772 4805 autoreadstderr = not opts[b'noreadstderr']
4773 4806
4774 4807 if opts[b'peer'] == b'ssh1':
4775 4808 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4776 4809 peer = sshpeer.sshv1peer(
4777 4810 ui,
4778 4811 url,
4779 4812 proc,
4780 4813 stdin,
4781 4814 stdout,
4782 4815 stderr,
4783 4816 None,
4784 4817 autoreadstderr=autoreadstderr,
4785 4818 )
4786 4819 elif opts[b'peer'] == b'raw':
4787 4820 ui.write(_(b'using raw connection to peer\n'))
4788 4821 peer = None
4789 4822 else:
4790 4823 ui.write(_(b'creating ssh peer from handshake results\n'))
4791 4824 peer = sshpeer.makepeer(
4792 4825 ui,
4793 4826 url,
4794 4827 proc,
4795 4828 stdin,
4796 4829 stdout,
4797 4830 stderr,
4798 4831 autoreadstderr=autoreadstderr,
4799 4832 )
4800 4833
4801 4834 elif path:
4802 4835 # We bypass hg.peer() so we can proxy the sockets.
4803 4836 # TODO consider not doing this because we skip
4804 4837 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4805 4838 u = urlutil.url(path)
4806 4839 if u.scheme != b'http':
4807 4840 raise error.Abort(_(b'only http:// paths are currently supported'))
4808 4841
4809 4842 url, authinfo = u.authinfo()
4810 4843 openerargs = {
4811 4844 'useragent': b'Mercurial debugwireproto',
4812 4845 }
4813 4846
4814 4847 # Turn pipes/sockets into observers so we can log I/O.
4815 4848 if ui.verbose:
4816 4849 openerargs.update(
4817 4850 {
4818 4851 'loggingfh': ui,
4819 4852 'loggingname': b's',
4820 4853 'loggingopts': {
4821 4854 'logdata': True,
4822 4855 'logdataapis': False,
4823 4856 },
4824 4857 }
4825 4858 )
4826 4859
4827 4860 if ui.debugflag:
4828 4861 openerargs['loggingopts']['logdataapis'] = True
4829 4862
4830 4863 # Don't send default headers when in raw mode. This allows us to
4831 4864 # bypass most of the behavior of our URL handling code so we can
4832 4865 # have near complete control over what's sent on the wire.
4833 4866 if opts[b'peer'] == b'raw':
4834 4867 openerargs['sendaccept'] = False
4835 4868
4836 4869 opener = urlmod.opener(ui, authinfo, **openerargs)
4837 4870
4838 4871 if opts[b'peer'] == b'raw':
4839 4872 ui.write(_(b'using raw connection to peer\n'))
4840 4873 peer = None
4841 4874 elif opts[b'peer']:
4842 4875 raise error.Abort(
4843 4876 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4844 4877 )
4845 4878 else:
4846 4879 peer = httppeer.makepeer(ui, path, opener=opener)
4847 4880
4848 4881 # We /could/ populate stdin/stdout with sock.makefile()...
4849 4882 else:
4850 4883 raise error.Abort(_(b'unsupported connection configuration'))
4851 4884
4852 4885 batchedcommands = None
4853 4886
4854 4887 # Now perform actions based on the parsed wire language instructions.
4855 4888 for action, lines in blocks:
4856 4889 if action in (b'raw', b'raw+'):
4857 4890 if not stdin:
4858 4891 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4859 4892
4860 4893 # Concatenate the data together.
4861 4894 data = b''.join(l.lstrip() for l in lines)
4862 4895 data = stringutil.unescapestr(data)
4863 4896 stdin.write(data)
4864 4897
4865 4898 if action == b'raw+':
4866 4899 stdin.flush()
4867 4900 elif action == b'flush':
4868 4901 if not stdin:
4869 4902 raise error.Abort(_(b'cannot call flush on this peer'))
4870 4903 stdin.flush()
4871 4904 elif action.startswith(b'command'):
4872 4905 if not peer:
4873 4906 raise error.Abort(
4874 4907 _(
4875 4908 b'cannot send commands unless peer instance '
4876 4909 b'is available'
4877 4910 )
4878 4911 )
4879 4912
4880 4913 command = action.split(b' ', 1)[1]
4881 4914
4882 4915 args = {}
4883 4916 for line in lines:
4884 4917 # We need to allow empty values.
4885 4918 fields = line.lstrip().split(b' ', 1)
4886 4919 if len(fields) == 1:
4887 4920 key = fields[0]
4888 4921 value = b''
4889 4922 else:
4890 4923 key, value = fields
4891 4924
4892 4925 if value.startswith(b'eval:'):
4893 4926 value = stringutil.evalpythonliteral(value[5:])
4894 4927 else:
4895 4928 value = stringutil.unescapestr(value)
4896 4929
4897 4930 args[key] = value
4898 4931
4899 4932 if batchedcommands is not None:
4900 4933 batchedcommands.append((command, args))
4901 4934 continue
4902 4935
4903 4936 ui.status(_(b'sending %s command\n') % command)
4904 4937
4905 4938 if b'PUSHFILE' in args:
4906 4939 with open(args[b'PUSHFILE'], 'rb') as fh:
4907 4940 del args[b'PUSHFILE']
4908 4941 res, output = peer._callpush(
4909 4942 command, fh, **pycompat.strkwargs(args)
4910 4943 )
4911 4944 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4912 4945 ui.status(
4913 4946 _(b'remote output: %s\n') % stringutil.escapestr(output)
4914 4947 )
4915 4948 else:
4916 4949 with peer.commandexecutor() as e:
4917 4950 res = e.callcommand(command, args).result()
4918 4951
4919 4952 ui.status(
4920 4953 _(b'response: %s\n')
4921 4954 % stringutil.pprint(res, bprefix=True, indent=2)
4922 4955 )
4923 4956
4924 4957 elif action == b'batchbegin':
4925 4958 if batchedcommands is not None:
4926 4959 raise error.Abort(_(b'nested batchbegin not allowed'))
4927 4960
4928 4961 batchedcommands = []
4929 4962 elif action == b'batchsubmit':
4930 4963 # There is a batching API we could go through. But it would be
4931 4964 # difficult to normalize requests into function calls. It is easier
4932 4965 # to bypass this layer and normalize to commands + args.
4933 4966 ui.status(
4934 4967 _(b'sending batch with %d sub-commands\n')
4935 4968 % len(batchedcommands)
4936 4969 )
4937 4970 assert peer is not None
4938 4971 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4939 4972 ui.status(
4940 4973 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4941 4974 )
4942 4975
4943 4976 batchedcommands = None
4944 4977
4945 4978 elif action.startswith(b'httprequest '):
4946 4979 if not opener:
4947 4980 raise error.Abort(
4948 4981 _(b'cannot use httprequest without an HTTP peer')
4949 4982 )
4950 4983
4951 4984 request = action.split(b' ', 2)
4952 4985 if len(request) != 3:
4953 4986 raise error.Abort(
4954 4987 _(
4955 4988 b'invalid httprequest: expected format is '
4956 4989 b'"httprequest <method> <path>'
4957 4990 )
4958 4991 )
4959 4992
4960 4993 method, httppath = request[1:]
4961 4994 headers = {}
4962 4995 body = None
4963 4996 frames = []
4964 4997 for line in lines:
4965 4998 line = line.lstrip()
4966 4999 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4967 5000 if m:
4968 5001 # Headers need to use native strings.
4969 5002 key = pycompat.strurl(m.group(1))
4970 5003 value = pycompat.strurl(m.group(2))
4971 5004 headers[key] = value
4972 5005 continue
4973 5006
4974 5007 if line.startswith(b'BODYFILE '):
4975 5008 with open(line.split(b' ', 1), b'rb') as fh:
4976 5009 body = fh.read()
4977 5010 elif line.startswith(b'frame '):
4978 5011 frame = wireprotoframing.makeframefromhumanstring(
4979 5012 line[len(b'frame ') :]
4980 5013 )
4981 5014
4982 5015 frames.append(frame)
4983 5016 else:
4984 5017 raise error.Abort(
4985 5018 _(b'unknown argument to httprequest: %s') % line
4986 5019 )
4987 5020
4988 5021 url = path + httppath
4989 5022
4990 5023 if frames:
4991 5024 body = b''.join(bytes(f) for f in frames)
4992 5025
4993 5026 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4994 5027
4995 5028 # urllib.Request insists on using has_data() as a proxy for
4996 5029 # determining the request method. Override that to use our
4997 5030 # explicitly requested method.
4998 5031 req.get_method = lambda: pycompat.sysstr(method)
4999 5032
5000 5033 try:
5001 5034 res = opener.open(req)
5002 5035 body = res.read()
5003 5036 except util.urlerr.urlerror as e:
5004 5037 # read() method must be called, but only exists in Python 2
5005 5038 getattr(e, 'read', lambda: None)()
5006 5039 continue
5007 5040
5008 5041 ct = res.headers.get('Content-Type')
5009 5042 if ct == 'application/mercurial-cbor':
5010 5043 ui.write(
5011 5044 _(b'cbor> %s\n')
5012 5045 % stringutil.pprint(
5013 5046 cborutil.decodeall(body), bprefix=True, indent=2
5014 5047 )
5015 5048 )
5016 5049
5017 5050 elif action == b'close':
5018 5051 assert peer is not None
5019 5052 peer.close()
5020 5053 elif action == b'readavailable':
5021 5054 if not stdout or not stderr:
5022 5055 raise error.Abort(
5023 5056 _(b'readavailable not available on this peer')
5024 5057 )
5025 5058
5026 5059 stdin.close()
5027 5060 stdout.read()
5028 5061 stderr.read()
5029 5062
5030 5063 elif action == b'readline':
5031 5064 if not stdout:
5032 5065 raise error.Abort(_(b'readline not available on this peer'))
5033 5066 stdout.readline()
5034 5067 elif action == b'ereadline':
5035 5068 if not stderr:
5036 5069 raise error.Abort(_(b'ereadline not available on this peer'))
5037 5070 stderr.readline()
5038 5071 elif action.startswith(b'read '):
5039 5072 count = int(action.split(b' ', 1)[1])
5040 5073 if not stdout:
5041 5074 raise error.Abort(_(b'read not available on this peer'))
5042 5075 stdout.read(count)
5043 5076 elif action.startswith(b'eread '):
5044 5077 count = int(action.split(b' ', 1)[1])
5045 5078 if not stderr:
5046 5079 raise error.Abort(_(b'eread not available on this peer'))
5047 5080 stderr.read(count)
5048 5081 else:
5049 5082 raise error.Abort(_(b'unknown action: %s') % action)
5050 5083
5051 5084 if batchedcommands is not None:
5052 5085 raise error.Abort(_(b'unclosed "batchbegin" request'))
5053 5086
5054 5087 if peer:
5055 5088 peer.close()
5056 5089
5057 5090 if proc:
5058 5091 proc.kill()
@@ -1,449 +1,449 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 abort
4 4 add
5 5 addremove
6 6 annotate
7 7 archive
8 8 backout
9 9 bisect
10 10 bookmarks
11 11 branch
12 12 branches
13 13 bundle
14 14 cat
15 15 clone
16 16 commit
17 17 config
18 18 continue
19 19 copy
20 20 diff
21 21 export
22 22 files
23 23 forget
24 24 graft
25 25 grep
26 26 heads
27 27 help
28 28 identify
29 29 import
30 30 incoming
31 31 init
32 32 locate
33 33 log
34 34 manifest
35 35 merge
36 36 outgoing
37 37 parents
38 38 paths
39 39 phase
40 40 pull
41 41 purge
42 42 push
43 43 recover
44 44 remove
45 45 rename
46 46 resolve
47 47 revert
48 48 rollback
49 49 root
50 50 serve
51 51 shelve
52 52 status
53 53 summary
54 54 tag
55 55 tags
56 56 tip
57 57 unbundle
58 58 unshelve
59 59 update
60 60 verify
61 61 version
62 62
63 63 Show all commands that start with "a"
64 64 $ hg debugcomplete a
65 65 abort
66 66 add
67 67 addremove
68 68 annotate
69 69 archive
70 70
71 71 Do not show debug commands if there are other candidates
72 72 $ hg debugcomplete d
73 73 diff
74 74
75 75 Show debug commands if there are no other candidates
76 76 $ hg debugcomplete debug
77 77 debug-delta-find
78 78 debug-repair-issue6528
79 79 debug-revlog-index
80 80 debugancestor
81 81 debugantivirusrunning
82 82 debugapplystreamclonebundle
83 83 debugbackupbundle
84 84 debugbuilddag
85 85 debugbundle
86 86 debugcapabilities
87 87 debugchangedfiles
88 88 debugcheckstate
89 89 debugcolor
90 90 debugcommands
91 91 debugcomplete
92 92 debugconfig
93 93 debugcreatestreamclonebundle
94 94 debugdag
95 95 debugdata
96 96 debugdate
97 97 debugdeltachain
98 98 debugdirstate
99 99 debugdirstateignorepatternshash
100 100 debugdiscovery
101 101 debugdownload
102 102 debugextensions
103 103 debugfileset
104 104 debugformat
105 105 debugfsinfo
106 106 debuggetbundle
107 107 debugignore
108 108 debugindexdot
109 109 debugindexstats
110 110 debuginstall
111 111 debugknown
112 112 debuglabelcomplete
113 113 debuglocks
114 114 debugmanifestfulltextcache
115 115 debugmergestate
116 116 debugnamecomplete
117 117 debugnodemap
118 118 debugobsolete
119 119 debugp1copies
120 120 debugp2copies
121 121 debugpathcomplete
122 122 debugpathcopies
123 123 debugpeer
124 124 debugpickmergetool
125 125 debugpushkey
126 126 debugpvec
127 127 debugrebuilddirstate
128 128 debugrebuildfncache
129 129 debugrename
130 130 debugrequires
131 131 debugrevlog
132 132 debugrevlogindex
133 133 debugrevspec
134 134 debugserve
135 135 debugsetparents
136 136 debugshell
137 137 debugsidedata
138 138 debugssl
139 139 debugstrip
140 140 debugsub
141 141 debugsuccessorssets
142 142 debugtagscache
143 143 debugtemplate
144 144 debuguigetpass
145 145 debuguiprompt
146 146 debugupdatecaches
147 147 debugupgraderepo
148 148 debugwalk
149 149 debugwhyunstable
150 150 debugwireargs
151 151 debugwireproto
152 152
153 153 Do not show the alias of a debug command if there are other candidates
154 154 (this should hide rawcommit)
155 155 $ hg debugcomplete r
156 156 recover
157 157 remove
158 158 rename
159 159 resolve
160 160 revert
161 161 rollback
162 162 root
163 163 Show the alias of a debug command if there are no other candidates
164 164 $ hg debugcomplete rawc
165 165
166 166
167 167 Show the global options
168 168 $ hg debugcomplete --options | sort
169 169 --color
170 170 --config
171 171 --cwd
172 172 --debug
173 173 --debugger
174 174 --encoding
175 175 --encodingmode
176 176 --help
177 177 --hidden
178 178 --noninteractive
179 179 --pager
180 180 --profile
181 181 --quiet
182 182 --repository
183 183 --time
184 184 --traceback
185 185 --verbose
186 186 --version
187 187 -R
188 188 -h
189 189 -q
190 190 -v
191 191 -y
192 192
193 193 Show the options for the "serve" command
194 194 $ hg debugcomplete --options serve | sort
195 195 --accesslog
196 196 --address
197 197 --certificate
198 198 --cmdserver
199 199 --color
200 200 --config
201 201 --cwd
202 202 --daemon
203 203 --daemon-postexec
204 204 --debug
205 205 --debugger
206 206 --encoding
207 207 --encodingmode
208 208 --errorlog
209 209 --help
210 210 --hidden
211 211 --ipv6
212 212 --name
213 213 --noninteractive
214 214 --pager
215 215 --pid-file
216 216 --port
217 217 --prefix
218 218 --print-url
219 219 --profile
220 220 --quiet
221 221 --repository
222 222 --stdio
223 223 --style
224 224 --subrepos
225 225 --templates
226 226 --time
227 227 --traceback
228 228 --verbose
229 229 --version
230 230 --web-conf
231 231 -6
232 232 -A
233 233 -E
234 234 -R
235 235 -S
236 236 -a
237 237 -d
238 238 -h
239 239 -n
240 240 -p
241 241 -q
242 242 -t
243 243 -v
244 244 -y
245 245
246 246 Show an error if we use --options with an ambiguous abbreviation
247 247 $ hg debugcomplete --options s
248 248 hg: command 's' is ambiguous:
249 249 serve shelve showconfig status summary
250 250 [10]
251 251
252 252 Show all commands + options
253 253 $ hg debugcommands
254 254 abort: dry-run
255 255 add: include, exclude, subrepos, dry-run
256 256 addremove: similarity, subrepos, include, exclude, dry-run
257 257 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
258 258 archive: no-decode, prefix, rev, type, subrepos, include, exclude
259 259 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
260 260 bisect: reset, good, bad, skip, extend, command, noupdate
261 261 bookmarks: force, rev, delete, rename, inactive, list, template
262 262 branch: force, clean, rev
263 263 branches: active, closed, rev, template
264 264 bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
265 265 cat: output, rev, decode, include, exclude, template
266 266 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
267 267 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
268 268 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
269 269 continue: dry-run
270 270 copy: forget, after, at-rev, force, include, exclude, dry-run
271 debug-delta-find: changelog, manifest, dir, template
271 debug-delta-find: changelog, manifest, dir, template, source
272 272 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
273 273 debug-revlog-index: changelog, manifest, dir, template
274 274 debugancestor:
275 275 debugantivirusrunning:
276 276 debugapplystreamclonebundle:
277 277 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
278 278 debugbuilddag: mergeable-file, overwritten-file, new-file, from-existing
279 279 debugbundle: all, part-type, spec
280 280 debugcapabilities:
281 281 debugchangedfiles: compute
282 282 debugcheckstate:
283 283 debugcolor: style
284 284 debugcommands:
285 285 debugcomplete: options
286 286 debugcreatestreamclonebundle:
287 287 debugdag: tags, branches, dots, spaces
288 288 debugdata: changelog, manifest, dir
289 289 debugdate: extended
290 290 debugdeltachain: changelog, manifest, dir, template
291 291 debugdirstateignorepatternshash:
292 292 debugdirstate: nodates, dates, datesort, docket, all
293 293 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
294 294 debugdownload: output
295 295 debugextensions: template
296 296 debugfileset: rev, all-files, show-matcher, show-stage
297 297 debugformat: template
298 298 debugfsinfo:
299 299 debuggetbundle: head, common, type
300 300 debugignore:
301 301 debugindexdot: changelog, manifest, dir
302 302 debugindexstats:
303 303 debuginstall: template
304 304 debugknown:
305 305 debuglabelcomplete:
306 306 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
307 307 debugmanifestfulltextcache: clear, add
308 308 debugmergestate: style, template
309 309 debugnamecomplete:
310 310 debugnodemap: dump-new, dump-disk, check, metadata
311 311 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
312 312 debugp1copies: rev
313 313 debugp2copies: rev
314 314 debugpathcomplete: full, normal, added, removed
315 315 debugpathcopies: include, exclude
316 316 debugpeer:
317 317 debugpickmergetool: rev, changedelete, include, exclude, tool
318 318 debugpushkey:
319 319 debugpvec:
320 320 debugrebuilddirstate: rev, minimal
321 321 debugrebuildfncache: only-data
322 322 debugrename: rev
323 323 debugrequires:
324 324 debugrevlog: changelog, manifest, dir, dump
325 325 debugrevlogindex: changelog, manifest, dir, format
326 326 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
327 327 debugserve: sshstdio, logiofd, logiofile
328 328 debugsetparents:
329 329 debugshell:
330 330 debugsidedata: changelog, manifest, dir
331 331 debugssl:
332 332 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
333 333 debugsub: rev
334 334 debugsuccessorssets: closest
335 335 debugtagscache:
336 336 debugtemplate: rev, define
337 337 debuguigetpass: prompt
338 338 debuguiprompt: prompt
339 339 debugupdatecaches:
340 340 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
341 341 debugwalk: include, exclude
342 342 debugwhyunstable:
343 343 debugwireargs: three, four, five, ssh, remotecmd, insecure
344 344 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
345 345 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
346 346 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
347 347 files: rev, print0, include, exclude, template, subrepos
348 348 forget: interactive, include, exclude, dry-run
349 349 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
350 350 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
351 351 heads: rev, topo, active, closed, style, template
352 352 help: extension, command, keyword, system
353 353 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
354 354 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
355 355 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
356 356 init: ssh, remotecmd, insecure
357 357 locate: rev, print0, fullpath, include, exclude
358 358 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
359 359 manifest: rev, all, template
360 360 merge: force, rev, preview, abort, tool
361 361 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
362 362 parents: rev, style, template
363 363 paths: template
364 364 phase: public, draft, secret, force, rev
365 365 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
366 366 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
367 367 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
368 368 recover: verify
369 369 remove: after, force, subrepos, include, exclude, dry-run
370 370 rename: forget, after, at-rev, force, include, exclude, dry-run
371 371 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
372 372 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
373 373 rollback: dry-run, force
374 374 root: template
375 375 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
376 376 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
377 377 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
378 378 summary: remote
379 379 tag: force, local, rev, remove, edit, message, date, user
380 380 tags: template
381 381 tip: patch, git, style, template
382 382 unbundle: update
383 383 unshelve: abort, continue, interactive, keep, name, tool, date
384 384 update: clean, check, merge, date, rev, tool
385 385 verify: full
386 386 version: template
387 387
388 388 $ hg init a
389 389 $ cd a
390 390 $ echo fee > fee
391 391 $ hg ci -q -Amfee
392 392 $ hg tag fee
393 393 $ mkdir fie
394 394 $ echo dead > fie/dead
395 395 $ echo live > fie/live
396 396 $ hg bookmark fo
397 397 $ hg branch -q fie
398 398 $ hg ci -q -Amfie
399 399 $ echo fo > fo
400 400 $ hg branch -qf default
401 401 $ hg ci -q -Amfo
402 402 $ echo Fum > Fum
403 403 $ hg ci -q -AmFum
404 404 $ hg bookmark Fum
405 405
406 406 Test debugpathcomplete
407 407
408 408 $ hg debugpathcomplete f
409 409 fee
410 410 fie
411 411 fo
412 412 $ hg debugpathcomplete -f f
413 413 fee
414 414 fie/dead
415 415 fie/live
416 416 fo
417 417
418 418 $ hg rm Fum
419 419 $ hg debugpathcomplete -r F
420 420 Fum
421 421
422 422 Test debugnamecomplete
423 423
424 424 $ hg debugnamecomplete
425 425 Fum
426 426 default
427 427 fee
428 428 fie
429 429 fo
430 430 tip
431 431 $ hg debugnamecomplete f
432 432 fee
433 433 fie
434 434 fo
435 435
436 436 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
437 437 used for completions in some shells.
438 438
439 439 $ hg debuglabelcomplete
440 440 Fum
441 441 default
442 442 fee
443 443 fie
444 444 fo
445 445 tip
446 446 $ hg debuglabelcomplete f
447 447 fee
448 448 fie
449 449 fo
@@ -1,194 +1,346 b''
1 1 ====================================
2 2 Test delta choice with sparse revlog
3 3 ====================================
4 4
5 5 Sparse-revlog usually shows the most gain on Manifest. However, it is simpler
6 6 to general an appropriate file, so we test with a single file instead. The
7 7 goal is to observe intermediate snapshot being created.
8 8
9 9 We need a large enough file. Part of the content needs to be replaced
10 10 repeatedly while some of it changes rarely.
11 11
12 12 $ bundlepath="$TESTDIR/artifacts/cache/big-file-churn.hg"
13 13
14 14 $ expectedhash=`cat "$bundlepath".md5`
15 15
16 16 #if slow
17 17
18 18 $ if [ ! -f "$bundlepath" ]; then
19 19 > "$TESTDIR"/artifacts/scripts/generate-churning-bundle.py > /dev/null
20 20 > fi
21 21
22 22 #else
23 23
24 24 $ if [ ! -f "$bundlepath" ]; then
25 25 > echo 'skipped: missing artifact, run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
26 26 > exit 80
27 27 > fi
28 28
29 29 #endif
30 30
31 31 $ currenthash=`f -M "$bundlepath" | cut -d = -f 2`
32 32 $ if [ "$currenthash" != "$expectedhash" ]; then
33 33 > echo 'skipped: outdated artifact, md5 "'"$currenthash"'" expected "'"$expectedhash"'" run "'"$TESTDIR"'/artifacts/scripts/generate-churning-bundle.py"'
34 34 > exit 80
35 35 > fi
36 36
37 37 $ cat >> $HGRCPATH << EOF
38 38 > [format]
39 39 > sparse-revlog = yes
40 40 > maxchainlen = 15
41 41 > [storage]
42 42 > revlog.optimize-delta-parent-choice = yes
43 43 > revlog.reuse-external-delta = no
44 44 > EOF
45 45 $ hg init sparse-repo
46 46 $ cd sparse-repo
47 47 $ hg unbundle $bundlepath
48 48 adding changesets
49 49 adding manifests
50 50 adding file changes
51 51 added 5001 changesets with 5001 changes to 1 files (+89 heads)
52 52 new changesets 9706f5af64f4:d9032adc8114 (5001 drafts)
53 53 (run 'hg heads' to see heads, 'hg merge' to merge)
54 54 $ hg up
55 55 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
56 56 updated to "d9032adc8114: commit #5000"
57 57 89 other heads for branch "default"
58 58
59 59 $ hg log --stat -r 0:3
60 60 changeset: 0:9706f5af64f4
61 61 user: test
62 62 date: Thu Jan 01 00:00:00 1970 +0000
63 63 summary: initial commit
64 64
65 65 SPARSE-REVLOG-TEST-FILE | 10500 ++++++++++++++++++++++++++++++++++++++++++++++
66 66 1 files changed, 10500 insertions(+), 0 deletions(-)
67 67
68 68 changeset: 1:724907deaa5e
69 69 user: test
70 70 date: Thu Jan 01 00:00:00 1970 +0000
71 71 summary: commit #1
72 72
73 73 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
74 74 1 files changed, 534 insertions(+), 534 deletions(-)
75 75
76 76 changeset: 2:62c41bce3e5d
77 77 user: test
78 78 date: Thu Jan 01 00:00:00 1970 +0000
79 79 summary: commit #2
80 80
81 81 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
82 82 1 files changed, 534 insertions(+), 534 deletions(-)
83 83
84 84 changeset: 3:348a9cbd6959
85 85 user: test
86 86 date: Thu Jan 01 00:00:00 1970 +0000
87 87 summary: commit #3
88 88
89 89 SPARSE-REVLOG-TEST-FILE | 1068 +++++++++++++++++++++++-----------------------
90 90 1 files changed, 534 insertions(+), 534 deletions(-)
91 91
92 92
93 93 $ f -s .hg/store/data/*.d
94 94 .hg/store/data/_s_p_a_r_s_e-_r_e_v_l_o_g-_t_e_s_t-_f_i_l_e.d: size=58616973
95 95 $ hg debugrevlog *
96 96 format : 1
97 97 flags : generaldelta
98 98
99 99 revisions : 5001
100 100 merges : 625 (12.50%)
101 101 normal : 4376 (87.50%)
102 102 revisions : 5001
103 103 empty : 0 ( 0.00%)
104 104 text : 0 (100.00%)
105 105 delta : 0 (100.00%)
106 106 snapshot : 383 ( 7.66%)
107 107 lvl-0 : 3 ( 0.06%)
108 108 lvl-1 : 18 ( 0.36%)
109 109 lvl-2 : 62 ( 1.24%)
110 110 lvl-3 : 108 ( 2.16%)
111 111 lvl-4 : 191 ( 3.82%)
112 112 lvl-5 : 1 ( 0.02%)
113 113 deltas : 4618 (92.34%)
114 114 revision size : 58616973
115 115 snapshot : 9247844 (15.78%)
116 116 lvl-0 : 539532 ( 0.92%)
117 117 lvl-1 : 1467743 ( 2.50%)
118 118 lvl-2 : 1873820 ( 3.20%)
119 119 lvl-3 : 2326874 ( 3.97%)
120 120 lvl-4 : 3029118 ( 5.17%)
121 121 lvl-5 : 10757 ( 0.02%)
122 122 deltas : 49369129 (84.22%)
123 123
124 124 chunks : 5001
125 125 0x28 : 5001 (100.00%)
126 126 chunks size : 58616973
127 127 0x28 : 58616973 (100.00%)
128 128
129 129 avg chain length : 9
130 130 max chain length : 15
131 131 max chain reach : 27366701
132 132 compression ratio : 29
133 133
134 134 uncompressed data size (min/max/avg) : 346468 / 346472 / 346471
135 135 full revision size (min/max/avg) : 179288 / 180786 / 179844
136 136 inter-snapshot size (min/max/avg) : 10757 / 169507 / 22916
137 137 level-1 (min/max/avg) : 13905 / 169507 / 81541
138 138 level-2 (min/max/avg) : 10887 / 83873 / 30222
139 139 level-3 (min/max/avg) : 10911 / 43047 / 21545
140 140 level-4 (min/max/avg) : 10838 / 21390 / 15859
141 141 level-5 (min/max/avg) : 10757 / 10757 / 10757
142 142 delta size (min/max/avg) : 9672 / 108072 / 10690
143 143
144 144 deltas against prev : 3906 (84.58%)
145 145 where prev = p1 : 3906 (100.00%)
146 146 where prev = p2 : 0 ( 0.00%)
147 147 other : 0 ( 0.00%)
148 148 deltas against p1 : 649 (14.05%)
149 149 deltas against p2 : 63 ( 1.36%)
150 150 deltas against other : 0 ( 0.00%)
151 151
152 152
153 153 Test `debug-delta-find`
154 154 -----------------------
155 155
156 156 $ ls -1
157 157 SPARSE-REVLOG-TEST-FILE
158 158 $ hg debugdeltachain SPARSE-REVLOG-TEST-FILE | grep snap | tail -1
159 159 4971 4970 -1 3 5 4930 snap 19179 346472 427596 1.23414 15994877 15567281 36.40652 427596 179288 1.00000 5
160 160 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971
161 161 DBG-DELTAS-SEARCH: SEARCH rev=4971
162 162 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
163 163 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
164 164 DBG-DELTAS-SEARCH: type=snapshot-4
165 165 DBG-DELTAS-SEARCH: size=18296
166 166 DBG-DELTAS-SEARCH: base=4930
167 167 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
168 168 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
169 169 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
170 170 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
171 171 DBG-DELTAS-SEARCH: type=snapshot-4
172 172 DBG-DELTAS-SEARCH: size=19179
173 173 DBG-DELTAS-SEARCH: base=4930
174 174 DBG-DELTAS-SEARCH: TOO-HIGH
175 175 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
176 176 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
177 177 DBG-DELTAS-SEARCH: type=snapshot-3
178 178 DBG-DELTAS-SEARCH: size=39228
179 179 DBG-DELTAS-SEARCH: base=4799
180 180 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
181 181 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
182 182 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
183 183 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
184 184 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
185 185 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
186 186 DBG-DELTAS-SEARCH: type=snapshot-2
187 187 DBG-DELTAS-SEARCH: size=50213
188 188 DBG-DELTAS-SEARCH: base=4623
189 189 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
190 190 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
191 191 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
192 192 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
193 193
194 $ cat << EOF >>.hg/hgrc
195 > [storage]
196 > revlog.optimize-delta-parent-choice = no
197 > revlog.reuse-external-delta = yes
198 > EOF
199
200 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --quiet
201 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
202 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source full
203 DBG-DELTAS-SEARCH: SEARCH rev=4971
204 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
205 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
206 DBG-DELTAS-SEARCH: type=snapshot-4
207 DBG-DELTAS-SEARCH: size=18296
208 DBG-DELTAS-SEARCH: base=4930
209 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
210 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
211 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
212 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
213 DBG-DELTAS-SEARCH: type=snapshot-4
214 DBG-DELTAS-SEARCH: size=19179
215 DBG-DELTAS-SEARCH: base=4930
216 DBG-DELTAS-SEARCH: TOO-HIGH
217 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
218 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
219 DBG-DELTAS-SEARCH: type=snapshot-3
220 DBG-DELTAS-SEARCH: size=39228
221 DBG-DELTAS-SEARCH: base=4799
222 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
223 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
224 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
225 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
226 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
227 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
228 DBG-DELTAS-SEARCH: type=snapshot-2
229 DBG-DELTAS-SEARCH: size=50213
230 DBG-DELTAS-SEARCH: base=4623
231 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
232 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
233 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
234 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
235 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source storage
236 DBG-DELTAS-SEARCH: SEARCH rev=4971
237 DBG-DELTAS-SEARCH: ROUND #1 - 1 candidates - search-down
238 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
239 DBG-DELTAS-SEARCH: type=snapshot-3
240 DBG-DELTAS-SEARCH: size=39228
241 DBG-DELTAS-SEARCH: base=4799
242 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
243 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
244 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
245 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=1 try-count=1 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
246 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p1
247 DBG-DELTAS-SEARCH: SEARCH rev=4971
248 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
249 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
250 DBG-DELTAS-SEARCH: type=snapshot-4
251 DBG-DELTAS-SEARCH: size=18296
252 DBG-DELTAS-SEARCH: base=4930
253 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
254 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
255 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
256 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
257 DBG-DELTAS-SEARCH: type=snapshot-4
258 DBG-DELTAS-SEARCH: size=19179
259 DBG-DELTAS-SEARCH: base=4930
260 DBG-DELTAS-SEARCH: TOO-HIGH
261 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
262 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
263 DBG-DELTAS-SEARCH: type=snapshot-3
264 DBG-DELTAS-SEARCH: size=39228
265 DBG-DELTAS-SEARCH: base=4799
266 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
267 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
268 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
269 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
270 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
271 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
272 DBG-DELTAS-SEARCH: type=snapshot-2
273 DBG-DELTAS-SEARCH: size=50213
274 DBG-DELTAS-SEARCH: base=4623
275 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
276 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
277 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
278 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
279 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source p2
280 DBG-DELTAS-SEARCH: SEARCH rev=4971
281 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
282 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
283 DBG-DELTAS-SEARCH: type=snapshot-4
284 DBG-DELTAS-SEARCH: size=18296
285 DBG-DELTAS-SEARCH: base=4930
286 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
287 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
288 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
289 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
290 DBG-DELTAS-SEARCH: type=snapshot-4
291 DBG-DELTAS-SEARCH: size=19179
292 DBG-DELTAS-SEARCH: base=4930
293 DBG-DELTAS-SEARCH: TOO-HIGH
294 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
295 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
296 DBG-DELTAS-SEARCH: type=snapshot-3
297 DBG-DELTAS-SEARCH: size=39228
298 DBG-DELTAS-SEARCH: base=4799
299 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
300 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
301 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
302 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
303 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
304 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
305 DBG-DELTAS-SEARCH: type=snapshot-2
306 DBG-DELTAS-SEARCH: size=50213
307 DBG-DELTAS-SEARCH: base=4623
308 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
309 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
310 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
311 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
312 $ hg debug-delta-find SPARSE-REVLOG-TEST-FILE 4971 --source prev
313 DBG-DELTAS-SEARCH: SEARCH rev=4971
314 DBG-DELTAS-SEARCH: ROUND #1 - 2 candidates - search-down
315 DBG-DELTAS-SEARCH: CANDIDATE: rev=4962
316 DBG-DELTAS-SEARCH: type=snapshot-4
317 DBG-DELTAS-SEARCH: size=18296
318 DBG-DELTAS-SEARCH: base=4930
319 DBG-DELTAS-SEARCH: uncompressed-delta-size=30377
320 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
321 DBG-DELTAS-SEARCH: DELTA: length=16872 (BAD)
322 DBG-DELTAS-SEARCH: CANDIDATE: rev=4971
323 DBG-DELTAS-SEARCH: type=snapshot-4
324 DBG-DELTAS-SEARCH: size=19179
325 DBG-DELTAS-SEARCH: base=4930
326 DBG-DELTAS-SEARCH: TOO-HIGH
327 DBG-DELTAS-SEARCH: ROUND #2 - 1 candidates - search-down
328 DBG-DELTAS-SEARCH: CANDIDATE: rev=4930
329 DBG-DELTAS-SEARCH: type=snapshot-3
330 DBG-DELTAS-SEARCH: size=39228
331 DBG-DELTAS-SEARCH: base=4799
332 DBG-DELTAS-SEARCH: uncompressed-delta-size=33050
333 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
334 DBG-DELTAS-SEARCH: DELTA: length=19179 (GOOD)
335 DBG-DELTAS-SEARCH: ROUND #3 - 1 candidates - refine-down
336 DBG-DELTAS-SEARCH: CONTENDER: rev=4930 - length=19179
337 DBG-DELTAS-SEARCH: CANDIDATE: rev=4799
338 DBG-DELTAS-SEARCH: type=snapshot-2
339 DBG-DELTAS-SEARCH: size=50213
340 DBG-DELTAS-SEARCH: base=4623
341 DBG-DELTAS-SEARCH: uncompressed-delta-size=82661
342 DBG-DELTAS-SEARCH: delta-search-time=* (glob)
343 DBG-DELTAS-SEARCH: DELTA: length=49132 (BAD)
344 DBG-DELTAS: FILELOG:SPARSE-REVLOG-TEST-FILE: rev=4971: search-rounds=3 try-count=3 - delta-type=snapshot snap-depth=4 - p1-chain-length=15 p2-chain-length=-1 - duration=* (glob)
345
194 346 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now