##// END OF EJS Templates
find-delta: move most of the debug-find-delta code in the debug module...
marmoute -
r50571:4302db0f default
parent child Browse files
Show More
@@ -1,4752 +1,4718
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 bundlerepo,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 dirstateutils,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 mdiff,
62 61 mergestate as mergestatemod,
63 62 metadata,
64 63 obsolete,
65 64 obsutil,
66 65 pathutil,
67 66 phases,
68 67 policy,
69 68 pvec,
70 69 pycompat,
71 70 registrar,
72 71 repair,
73 72 repoview,
74 73 requirements,
75 74 revlog,
76 revlogutils,
77 75 revset,
78 76 revsetlang,
79 77 scmutil,
80 78 setdiscovery,
81 79 simplemerge,
82 80 sshpeer,
83 81 sslutil,
84 82 streamclone,
85 83 strip,
86 84 tags as tagsmod,
87 85 templater,
88 86 treediscovery,
89 87 upgrade,
90 88 url as urlmod,
91 89 util,
92 90 vfs as vfsmod,
93 91 wireprotoframing,
94 92 wireprotoserver,
95 93 )
96 94 from .interfaces import repository
97 95 from .utils import (
98 96 cborutil,
99 97 compression,
100 98 dateutil,
101 99 procutil,
102 100 stringutil,
103 101 urlutil,
104 102 )
105 103
106 104 from .revlogutils import (
107 105 constants as revlog_constants,
108 106 debug as revlog_debug,
109 107 deltas as deltautil,
110 108 nodemap,
111 109 rewrite,
112 110 sidedata,
113 111 )
114 112
115 113 release = lockmod.release
116 114
117 115 table = {}
118 116 table.update(strip.command._table)
119 117 command = registrar.command(table)
120 118
121 119
122 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
123 121 def debugancestor(ui, repo, *args):
124 122 """find the ancestor revision of two revisions in a given index"""
125 123 if len(args) == 3:
126 124 index, rev1, rev2 = args
127 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
128 126 lookup = r.lookup
129 127 elif len(args) == 2:
130 128 if not repo:
131 129 raise error.Abort(
132 130 _(b'there is no Mercurial repository here (.hg not found)')
133 131 )
134 132 rev1, rev2 = args
135 133 r = repo.changelog
136 134 lookup = repo.lookup
137 135 else:
138 136 raise error.Abort(_(b'either two or three arguments required'))
139 137 a = r.ancestor(lookup(rev1), lookup(rev2))
140 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
141 139
142 140
143 141 @command(b'debugantivirusrunning', [])
144 142 def debugantivirusrunning(ui, repo):
145 143 """attempt to trigger an antivirus scanner to see if one is active"""
146 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
147 145 f.write(
148 146 util.b85decode(
149 147 # This is a base85-armored version of the EICAR test file. See
150 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
151 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
152 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
153 151 )
154 152 )
155 153 # Give an AV engine time to scan the file.
156 154 time.sleep(2)
157 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
158 156
159 157
160 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
161 159 def debugapplystreamclonebundle(ui, repo, fname):
162 160 """apply a stream clone bundle file"""
163 161 f = hg.openpath(ui, fname)
164 162 gen = exchange.readbundle(ui, f, fname)
165 163 gen.apply(repo)
166 164
167 165
168 166 @command(
169 167 b'debugbuilddag',
170 168 [
171 169 (
172 170 b'm',
173 171 b'mergeable-file',
174 172 None,
175 173 _(b'add single file mergeable changes'),
176 174 ),
177 175 (
178 176 b'o',
179 177 b'overwritten-file',
180 178 None,
181 179 _(b'add single file all revs overwrite'),
182 180 ),
183 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
184 182 (
185 183 b'',
186 184 b'from-existing',
187 185 None,
188 186 _(b'continue from a non-empty repository'),
189 187 ),
190 188 ],
191 189 _(b'[OPTION]... [TEXT]'),
192 190 )
193 191 def debugbuilddag(
194 192 ui,
195 193 repo,
196 194 text=None,
197 195 mergeable_file=False,
198 196 overwritten_file=False,
199 197 new_file=False,
200 198 from_existing=False,
201 199 ):
202 200 """builds a repo with a given DAG from scratch in the current empty repo
203 201
204 202 The description of the DAG is read from stdin if not given on the
205 203 command line.
206 204
207 205 Elements:
208 206
209 207 - "+n" is a linear run of n nodes based on the current default parent
210 208 - "." is a single node based on the current default parent
211 209 - "$" resets the default parent to null (implied at the start);
212 210 otherwise the default parent is always the last node created
213 211 - "<p" sets the default parent to the backref p
214 212 - "*p" is a fork at parent p, which is a backref
215 213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
216 214 - "/p2" is a merge of the preceding node and p2
217 215 - ":tag" defines a local tag for the preceding node
218 216 - "@branch" sets the named branch for subsequent nodes
219 217 - "#...\\n" is a comment up to the end of the line
220 218
221 219 Whitespace between the above elements is ignored.
222 220
223 221 A backref is either
224 222
225 223 - a number n, which references the node curr-n, where curr is the current
226 224 node, or
227 225 - the name of a local tag you placed earlier using ":tag", or
228 226 - empty to denote the default parent.
229 227
230 228 All string valued-elements are either strictly alphanumeric, or must
231 229 be enclosed in double quotes ("..."), with "\\" as escape character.
232 230 """
233 231
234 232 if text is None:
235 233 ui.status(_(b"reading DAG from stdin\n"))
236 234 text = ui.fin.read()
237 235
238 236 cl = repo.changelog
239 237 if len(cl) > 0 and not from_existing:
240 238 raise error.Abort(_(b'repository is not empty'))
241 239
242 240 # determine number of revs in DAG
243 241 total = 0
244 242 for type, data in dagparser.parsedag(text):
245 243 if type == b'n':
246 244 total += 1
247 245
248 246 if mergeable_file:
249 247 linesperrev = 2
250 248 # make a file with k lines per rev
251 249 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
252 250 initialmergedlines.append(b"")
253 251
254 252 tags = []
255 253 progress = ui.makeprogress(
256 254 _(b'building'), unit=_(b'revisions'), total=total
257 255 )
258 256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 257 at = -1
260 258 atbranch = b'default'
261 259 nodeids = []
262 260 id = 0
263 261 progress.update(id)
264 262 for type, data in dagparser.parsedag(text):
265 263 if type == b'n':
266 264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 265 id, ps = data
268 266
269 267 files = []
270 268 filecontent = {}
271 269
272 270 p2 = None
273 271 if mergeable_file:
274 272 fn = b"mf"
275 273 p1 = repo[ps[0]]
276 274 if len(ps) > 1:
277 275 p2 = repo[ps[1]]
278 276 pa = p1.ancestor(p2)
279 277 base, local, other = [
280 278 x[fn].data() for x in (pa, p1, p2)
281 279 ]
282 280 m3 = simplemerge.Merge3Text(base, local, other)
283 281 ml = [
284 282 l.strip()
285 283 for l in simplemerge.render_minimized(m3)[0]
286 284 ]
287 285 ml.append(b"")
288 286 elif at > 0:
289 287 ml = p1[fn].data().split(b"\n")
290 288 else:
291 289 ml = initialmergedlines
292 290 ml[id * linesperrev] += b" r%i" % id
293 291 mergedtext = b"\n".join(ml)
294 292 files.append(fn)
295 293 filecontent[fn] = mergedtext
296 294
297 295 if overwritten_file:
298 296 fn = b"of"
299 297 files.append(fn)
300 298 filecontent[fn] = b"r%i\n" % id
301 299
302 300 if new_file:
303 301 fn = b"nf%i" % id
304 302 files.append(fn)
305 303 filecontent[fn] = b"r%i\n" % id
306 304 if len(ps) > 1:
307 305 if not p2:
308 306 p2 = repo[ps[1]]
309 307 for fn in p2:
310 308 if fn.startswith(b"nf"):
311 309 files.append(fn)
312 310 filecontent[fn] = p2[fn].data()
313 311
314 312 def fctxfn(repo, cx, path):
315 313 if path in filecontent:
316 314 return context.memfilectx(
317 315 repo, cx, path, filecontent[path]
318 316 )
319 317 return None
320 318
321 319 if len(ps) == 0 or ps[0] < 0:
322 320 pars = [None, None]
323 321 elif len(ps) == 1:
324 322 pars = [nodeids[ps[0]], None]
325 323 else:
326 324 pars = [nodeids[p] for p in ps]
327 325 cx = context.memctx(
328 326 repo,
329 327 pars,
330 328 b"r%i" % id,
331 329 files,
332 330 fctxfn,
333 331 date=(id, 0),
334 332 user=b"debugbuilddag",
335 333 extra={b'branch': atbranch},
336 334 )
337 335 nodeid = repo.commitctx(cx)
338 336 nodeids.append(nodeid)
339 337 at = id
340 338 elif type == b'l':
341 339 id, name = data
342 340 ui.note((b'tag %s\n' % name))
343 341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 342 elif type == b'a':
345 343 ui.note((b'branch %s\n' % data))
346 344 atbranch = data
347 345 progress.update(id)
348 346
349 347 if tags:
350 348 repo.vfs.write(b"localtags", b"".join(tags))
351 349
352 350
353 351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 352 indent_string = b' ' * indent
355 353 if all:
356 354 ui.writenoi18n(
357 355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 356 % indent_string
359 357 )
360 358
361 359 def showchunks(named):
362 360 ui.write(b"\n%s%s\n" % (indent_string, named))
363 361 for deltadata in gen.deltaiter():
364 362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 363 ui.write(
366 364 b"%s%s %s %s %s %s %d\n"
367 365 % (
368 366 indent_string,
369 367 hex(node),
370 368 hex(p1),
371 369 hex(p2),
372 370 hex(cs),
373 371 hex(deltabase),
374 372 len(delta),
375 373 )
376 374 )
377 375
378 376 gen.changelogheader()
379 377 showchunks(b"changelog")
380 378 gen.manifestheader()
381 379 showchunks(b"manifest")
382 380 for chunkdata in iter(gen.filelogheader, {}):
383 381 fname = chunkdata[b'filename']
384 382 showchunks(fname)
385 383 else:
386 384 if isinstance(gen, bundle2.unbundle20):
387 385 raise error.Abort(_(b'use debugbundle2 for this file'))
388 386 gen.changelogheader()
389 387 for deltadata in gen.deltaiter():
390 388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 390
393 391
394 392 def _debugobsmarkers(ui, part, indent=0, **opts):
395 393 """display version and markers contained in 'data'"""
396 394 opts = pycompat.byteskwargs(opts)
397 395 data = part.read()
398 396 indent_string = b' ' * indent
399 397 try:
400 398 version, markers = obsolete._readmarkers(data)
401 399 except error.UnknownVersion as exc:
402 400 msg = b"%sunsupported version: %s (%d bytes)\n"
403 401 msg %= indent_string, exc.version, len(data)
404 402 ui.write(msg)
405 403 else:
406 404 msg = b"%sversion: %d (%d bytes)\n"
407 405 msg %= indent_string, version, len(data)
408 406 ui.write(msg)
409 407 fm = ui.formatter(b'debugobsolete', opts)
410 408 for rawmarker in sorted(markers):
411 409 m = obsutil.marker(None, rawmarker)
412 410 fm.startitem()
413 411 fm.plain(indent_string)
414 412 cmdutil.showmarker(fm, m)
415 413 fm.end()
416 414
417 415
418 416 def _debugphaseheads(ui, data, indent=0):
419 417 """display version and markers contained in 'data'"""
420 418 indent_string = b' ' * indent
421 419 headsbyphase = phases.binarydecode(data)
422 420 for phase in phases.allphases:
423 421 for head in headsbyphase[phase]:
424 422 ui.write(indent_string)
425 423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 424
427 425
428 426 def _quasirepr(thing):
429 427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 428 return b'{%s}' % (
431 429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 430 )
433 431 return pycompat.bytestr(repr(thing))
434 432
435 433
436 434 def _debugbundle2(ui, gen, all=None, **opts):
437 435 """lists the contents of a bundle2"""
438 436 if not isinstance(gen, bundle2.unbundle20):
439 437 raise error.Abort(_(b'not a bundle2 file'))
440 438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 439 parttypes = opts.get('part_type', [])
442 440 for part in gen.iterparts():
443 441 if parttypes and part.type not in parttypes:
444 442 continue
445 443 msg = b'%s -- %s (mandatory: %r)\n'
446 444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 445 if part.type == b'changegroup':
448 446 version = part.params.get(b'version', b'01')
449 447 cg = changegroup.getunbundler(version, part, b'UN')
450 448 if not ui.quiet:
451 449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 450 if part.type == b'obsmarkers':
453 451 if not ui.quiet:
454 452 _debugobsmarkers(ui, part, indent=4, **opts)
455 453 if part.type == b'phase-heads':
456 454 if not ui.quiet:
457 455 _debugphaseheads(ui, part, indent=4)
458 456
459 457
460 458 @command(
461 459 b'debugbundle',
462 460 [
463 461 (b'a', b'all', None, _(b'show all details')),
464 462 (b'', b'part-type', [], _(b'show only the named part type')),
465 463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 464 ],
467 465 _(b'FILE'),
468 466 norepo=True,
469 467 )
470 468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 469 """lists the contents of a bundle"""
472 470 with hg.openpath(ui, bundlepath) as f:
473 471 if spec:
474 472 spec = exchange.getbundlespec(ui, f)
475 473 ui.write(b'%s\n' % spec)
476 474 return
477 475
478 476 gen = exchange.readbundle(ui, f, bundlepath)
479 477 if isinstance(gen, bundle2.unbundle20):
480 478 return _debugbundle2(ui, gen, all=all, **opts)
481 479 _debugchangegroup(ui, gen, all=all, **opts)
482 480
483 481
484 482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 483 def debugcapabilities(ui, path, **opts):
486 484 """lists the capabilities of a remote peer"""
487 485 opts = pycompat.byteskwargs(opts)
488 486 peer = hg.peer(ui, opts, path)
489 487 try:
490 488 caps = peer.capabilities()
491 489 ui.writenoi18n(b'Main capabilities:\n')
492 490 for c in sorted(caps):
493 491 ui.write(b' %s\n' % c)
494 492 b2caps = bundle2.bundle2caps(peer)
495 493 if b2caps:
496 494 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 495 for key, values in sorted(b2caps.items()):
498 496 ui.write(b' %s\n' % key)
499 497 for v in values:
500 498 ui.write(b' %s\n' % v)
501 499 finally:
502 500 peer.close()
503 501
504 502
505 503 @command(
506 504 b'debugchangedfiles',
507 505 [
508 506 (
509 507 b'',
510 508 b'compute',
511 509 False,
512 510 b"compute information instead of reading it from storage",
513 511 ),
514 512 ],
515 513 b'REV',
516 514 )
517 515 def debugchangedfiles(ui, repo, rev, **opts):
518 516 """list the stored files changes for a revision"""
519 517 ctx = logcmdutil.revsingle(repo, rev, None)
520 518 files = None
521 519
522 520 if opts['compute']:
523 521 files = metadata.compute_all_files_changes(ctx)
524 522 else:
525 523 sd = repo.changelog.sidedata(ctx.rev())
526 524 files_block = sd.get(sidedata.SD_FILES)
527 525 if files_block is not None:
528 526 files = metadata.decode_files_sidedata(sd)
529 527 if files is not None:
530 528 for f in sorted(files.touched):
531 529 if f in files.added:
532 530 action = b"added"
533 531 elif f in files.removed:
534 532 action = b"removed"
535 533 elif f in files.merged:
536 534 action = b"merged"
537 535 elif f in files.salvaged:
538 536 action = b"salvaged"
539 537 else:
540 538 action = b"touched"
541 539
542 540 copy_parent = b""
543 541 copy_source = b""
544 542 if f in files.copied_from_p1:
545 543 copy_parent = b"p1"
546 544 copy_source = files.copied_from_p1[f]
547 545 elif f in files.copied_from_p2:
548 546 copy_parent = b"p2"
549 547 copy_source = files.copied_from_p2[f]
550 548
551 549 data = (action, copy_parent, f, copy_source)
552 550 template = b"%-8s %2s: %s, %s;\n"
553 551 ui.write(template % data)
554 552
555 553
556 554 @command(b'debugcheckstate', [], b'')
557 555 def debugcheckstate(ui, repo):
558 556 """validate the correctness of the current dirstate"""
559 557 parent1, parent2 = repo.dirstate.parents()
560 558 m1 = repo[parent1].manifest()
561 559 m2 = repo[parent2].manifest()
562 560 errors = 0
563 561 for err in repo.dirstate.verify(m1, m2):
564 562 ui.warn(err[0] % err[1:])
565 563 errors += 1
566 564 if errors:
567 565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 566 raise error.Abort(errstr)
569 567
570 568
571 569 @command(
572 570 b'debugcolor',
573 571 [(b'', b'style', None, _(b'show all configured styles'))],
574 572 b'hg debugcolor',
575 573 )
576 574 def debugcolor(ui, repo, **opts):
577 575 """show available color, effects or style"""
578 576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 577 if opts.get('style'):
580 578 return _debugdisplaystyle(ui)
581 579 else:
582 580 return _debugdisplaycolor(ui)
583 581
584 582
585 583 def _debugdisplaycolor(ui):
586 584 ui = ui.copy()
587 585 ui._styles.clear()
588 586 for effect in color._activeeffects(ui).keys():
589 587 ui._styles[effect] = effect
590 588 if ui._terminfoparams:
591 589 for k, v in ui.configitems(b'color'):
592 590 if k.startswith(b'color.'):
593 591 ui._styles[k] = k[6:]
594 592 elif k.startswith(b'terminfo.'):
595 593 ui._styles[k] = k[9:]
596 594 ui.write(_(b'available colors:\n'))
597 595 # sort label with a '_' after the other to group '_background' entry.
598 596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 597 for colorname, label in items:
600 598 ui.write(b'%s\n' % colorname, label=label)
601 599
602 600
603 601 def _debugdisplaystyle(ui):
604 602 ui.write(_(b'available style:\n'))
605 603 if not ui._styles:
606 604 return
607 605 width = max(len(s) for s in ui._styles)
608 606 for label, effects in sorted(ui._styles.items()):
609 607 ui.write(b'%s' % label, label=label)
610 608 if effects:
611 609 # 50
612 610 ui.write(b': ')
613 611 ui.write(b' ' * (max(0, width - len(label))))
614 612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 613 ui.write(b'\n')
616 614
617 615
618 616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 617 def debugcreatestreamclonebundle(ui, repo, fname):
620 618 """create a stream clone bundle file
621 619
622 620 Stream bundles are special bundles that are essentially archives of
623 621 revlog files. They are commonly used for cloning very quickly.
624 622 """
625 623 # TODO we may want to turn this into an abort when this functionality
626 624 # is moved into `hg bundle`.
627 625 if phases.hassecret(repo):
628 626 ui.warn(
629 627 _(
630 628 b'(warning: stream clone bundle will contain secret '
631 629 b'revisions)\n'
632 630 )
633 631 )
634 632
635 633 requirements, gen = streamclone.generatebundlev1(repo)
636 634 changegroup.writechunks(ui, gen, fname)
637 635
638 636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 637
640 638
641 639 @command(
642 640 b'debugdag',
643 641 [
644 642 (b't', b'tags', None, _(b'use tags as labels')),
645 643 (b'b', b'branches', None, _(b'annotate with branch names')),
646 644 (b'', b'dots', None, _(b'use dots for runs')),
647 645 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 646 ],
649 647 _(b'[OPTION]... [FILE [REV]...]'),
650 648 optionalrepo=True,
651 649 )
652 650 def debugdag(ui, repo, file_=None, *revs, **opts):
653 651 """format the changelog or an index DAG as a concise textual description
654 652
655 653 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 654 revision numbers, they get labeled in the output as rN.
657 655
658 656 Otherwise, the changelog DAG of the current repo is emitted.
659 657 """
660 658 spaces = opts.get('spaces')
661 659 dots = opts.get('dots')
662 660 if file_:
663 661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 662 revs = {int(r) for r in revs}
665 663
666 664 def events():
667 665 for r in rlog:
668 666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 667 if r in revs:
670 668 yield b'l', (r, b"r%i" % r)
671 669
672 670 elif repo:
673 671 cl = repo.changelog
674 672 tags = opts.get('tags')
675 673 branches = opts.get('branches')
676 674 if tags:
677 675 labels = {}
678 676 for l, n in repo.tags().items():
679 677 labels.setdefault(cl.rev(n), []).append(l)
680 678
681 679 def events():
682 680 b = b"default"
683 681 for r in cl:
684 682 if branches:
685 683 newb = cl.read(cl.node(r))[5][b'branch']
686 684 if newb != b:
687 685 yield b'a', newb
688 686 b = newb
689 687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 688 if tags:
691 689 ls = labels.get(r)
692 690 if ls:
693 691 for l in ls:
694 692 yield b'l', (r, l)
695 693
696 694 else:
697 695 raise error.Abort(_(b'need repo for changelog dag'))
698 696
699 697 for line in dagparser.dagtextlines(
700 698 events(),
701 699 addspaces=spaces,
702 700 wraplabels=True,
703 701 wrapannotations=True,
704 702 wrapnonlinear=dots,
705 703 usedots=dots,
706 704 maxlinewidth=70,
707 705 ):
708 706 ui.write(line)
709 707 ui.write(b"\n")
710 708
711 709
712 710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 711 def debugdata(ui, repo, file_, rev=None, **opts):
714 712 """dump the contents of a data file revision"""
715 713 opts = pycompat.byteskwargs(opts)
716 714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 715 if rev is not None:
718 716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 717 file_, rev = None, file_
720 718 elif rev is None:
721 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 721 try:
724 722 ui.write(r.rawdata(r.lookup(rev)))
725 723 except KeyError:
726 724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 725
728 726
729 727 @command(
730 728 b'debugdate',
731 729 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 730 _(b'[-e] DATE [RANGE]'),
733 731 norepo=True,
734 732 optionalrepo=True,
735 733 )
736 734 def debugdate(ui, date, range=None, **opts):
737 735 """parse and display a date"""
738 736 if opts["extended"]:
739 737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 738 else:
741 739 d = dateutil.parsedate(date)
742 740 ui.writenoi18n(b"internal: %d %d\n" % d)
743 741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 742 if range:
745 743 m = dateutil.matchdate(range)
746 744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 745
748 746
749 747 @command(
750 748 b'debugdeltachain',
751 749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 750 _(b'-c|-m|FILE'),
753 751 optionalrepo=True,
754 752 )
755 753 def debugdeltachain(ui, repo, file_=None, **opts):
756 754 """dump information about delta chains in a revlog
757 755
758 756 Output can be templatized. Available template keywords are:
759 757
760 758 :``rev``: revision number
761 759 :``p1``: parent 1 revision number (for reference)
762 760 :``p2``: parent 2 revision number (for reference)
763 761 :``chainid``: delta chain identifier (numbered by unique base)
764 762 :``chainlen``: delta chain length to this revision
765 763 :``prevrev``: previous revision in delta chain
766 764 :``deltatype``: role of delta / how it was computed
767 765 - base: a full snapshot
768 766 - snap: an intermediate snapshot
769 767 - p1: a delta against the first parent
770 768 - p2: a delta against the second parent
771 769 - skip1: a delta against the same base as p1
772 770 (when p1 has empty delta
773 771 - skip2: a delta against the same base as p2
774 772 (when p2 has empty delta
775 773 - prev: a delta against the previous revision
776 774 - other: a delta against an arbitrary revision
777 775 :``compsize``: compressed size of revision
778 776 :``uncompsize``: uncompressed size of revision
779 777 :``chainsize``: total size of compressed revisions in chain
780 778 :``chainratio``: total chain size divided by uncompressed revision size
781 779 (new delta chains typically start at ratio 2.00)
782 780 :``lindist``: linear distance from base revision in delta chain to end
783 781 of this revision
784 782 :``extradist``: total size of revisions not part of this delta chain from
785 783 base of delta chain to end of this revision; a measurement
786 784 of how much extra data we need to read/seek across to read
787 785 the delta chain for this revision
788 786 :``extraratio``: extradist divided by chainsize; another representation of
789 787 how much unrelated data is needed to load this delta chain
790 788
791 789 If the repository is configured to use the sparse read, additional keywords
792 790 are available:
793 791
794 792 :``readsize``: total size of data read from the disk for a revision
795 793 (sum of the sizes of all the blocks)
796 794 :``largestblock``: size of the largest block of data read from the disk
797 795 :``readdensity``: density of useful bytes in the data read from the disk
798 796 :``srchunks``: in how many data hunks the whole revision would be read
799 797
800 798 The sparse read can be enabled with experimental.sparse-read = True
801 799 """
802 800 opts = pycompat.byteskwargs(opts)
803 801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
804 802 index = r.index
805 803 start = r.start
806 804 length = r.length
807 805 generaldelta = r._generaldelta
808 806 withsparseread = getattr(r, '_withsparseread', False)
809 807
810 808 # security to avoid crash on corrupted revlogs
811 809 total_revs = len(index)
812 810
813 811 def revinfo(rev):
814 812 e = index[rev]
815 813 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 814 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 815 chainsize = 0
818 816
819 817 base = e[revlog_constants.ENTRY_DELTA_BASE]
820 818 p1 = e[revlog_constants.ENTRY_PARENT_1]
821 819 p2 = e[revlog_constants.ENTRY_PARENT_2]
822 820
823 821 # If the parents of a revision has an empty delta, we never try to delta
824 822 # against that parent, but directly against the delta base of that
825 823 # parent (recursively). It avoids adding a useless entry in the chain.
826 824 #
827 825 # However we need to detect that as a special case for delta-type, that
828 826 # is not simply "other".
829 827 p1_base = p1
830 828 if p1 != nullrev and p1 < total_revs:
831 829 e1 = index[p1]
832 830 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
833 831 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
834 832 if (
835 833 new_base == p1_base
836 834 or new_base == nullrev
837 835 or new_base >= total_revs
838 836 ):
839 837 break
840 838 p1_base = new_base
841 839 e1 = index[p1_base]
842 840 p2_base = p2
843 841 if p2 != nullrev and p2 < total_revs:
844 842 e2 = index[p2]
845 843 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
846 844 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
847 845 if (
848 846 new_base == p2_base
849 847 or new_base == nullrev
850 848 or new_base >= total_revs
851 849 ):
852 850 break
853 851 p2_base = new_base
854 852 e2 = index[p2_base]
855 853
856 854 if generaldelta:
857 855 if base == p1:
858 856 deltatype = b'p1'
859 857 elif base == p2:
860 858 deltatype = b'p2'
861 859 elif base == rev:
862 860 deltatype = b'base'
863 861 elif base == p1_base:
864 862 deltatype = b'skip1'
865 863 elif base == p2_base:
866 864 deltatype = b'skip2'
867 865 elif r.issnapshot(rev):
868 866 deltatype = b'snap'
869 867 elif base == rev - 1:
870 868 deltatype = b'prev'
871 869 else:
872 870 deltatype = b'other'
873 871 else:
874 872 if base == rev:
875 873 deltatype = b'base'
876 874 else:
877 875 deltatype = b'prev'
878 876
879 877 chain = r._deltachain(rev)[0]
880 878 for iterrev in chain:
881 879 e = index[iterrev]
882 880 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
883 881
884 882 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
885 883
886 884 fm = ui.formatter(b'debugdeltachain', opts)
887 885
888 886 fm.plain(
889 887 b' rev p1 p2 chain# chainlen prev delta '
890 888 b'size rawsize chainsize ratio lindist extradist '
891 889 b'extraratio'
892 890 )
893 891 if withsparseread:
894 892 fm.plain(b' readsize largestblk rddensity srchunks')
895 893 fm.plain(b'\n')
896 894
897 895 chainbases = {}
898 896 for rev in r:
899 897 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
900 898 chainbase = chain[0]
901 899 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
902 900 basestart = start(chainbase)
903 901 revstart = start(rev)
904 902 lineardist = revstart + comp - basestart
905 903 extradist = lineardist - chainsize
906 904 try:
907 905 prevrev = chain[-2]
908 906 except IndexError:
909 907 prevrev = -1
910 908
911 909 if uncomp != 0:
912 910 chainratio = float(chainsize) / float(uncomp)
913 911 else:
914 912 chainratio = chainsize
915 913
916 914 if chainsize != 0:
917 915 extraratio = float(extradist) / float(chainsize)
918 916 else:
919 917 extraratio = extradist
920 918
921 919 fm.startitem()
922 920 fm.write(
923 921 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
924 922 b'uncompsize chainsize chainratio lindist extradist '
925 923 b'extraratio',
926 924 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
927 925 rev,
928 926 p1,
929 927 p2,
930 928 chainid,
931 929 len(chain),
932 930 prevrev,
933 931 deltatype,
934 932 comp,
935 933 uncomp,
936 934 chainsize,
937 935 chainratio,
938 936 lineardist,
939 937 extradist,
940 938 extraratio,
941 939 rev=rev,
942 940 chainid=chainid,
943 941 chainlen=len(chain),
944 942 prevrev=prevrev,
945 943 deltatype=deltatype,
946 944 compsize=comp,
947 945 uncompsize=uncomp,
948 946 chainsize=chainsize,
949 947 chainratio=chainratio,
950 948 lindist=lineardist,
951 949 extradist=extradist,
952 950 extraratio=extraratio,
953 951 )
954 952 if withsparseread:
955 953 readsize = 0
956 954 largestblock = 0
957 955 srchunks = 0
958 956
959 957 for revschunk in deltautil.slicechunk(r, chain):
960 958 srchunks += 1
961 959 blkend = start(revschunk[-1]) + length(revschunk[-1])
962 960 blksize = blkend - start(revschunk[0])
963 961
964 962 readsize += blksize
965 963 if largestblock < blksize:
966 964 largestblock = blksize
967 965
968 966 if readsize:
969 967 readdensity = float(chainsize) / float(readsize)
970 968 else:
971 969 readdensity = 1
972 970
973 971 fm.write(
974 972 b'readsize largestblock readdensity srchunks',
975 973 b' %10d %10d %9.5f %8d',
976 974 readsize,
977 975 largestblock,
978 976 readdensity,
979 977 srchunks,
980 978 readsize=readsize,
981 979 largestblock=largestblock,
982 980 readdensity=readdensity,
983 981 srchunks=srchunks,
984 982 )
985 983
986 984 fm.plain(b'\n')
987 985
988 986 fm.end()
989 987
990 988
991 989 @command(
992 990 b'debug-delta-find',
993 991 cmdutil.debugrevlogopts
994 992 + cmdutil.formatteropts
995 993 + [
996 994 (
997 995 b'',
998 996 b'source',
999 997 b'full',
1000 998 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1001 999 ),
1002 1000 ],
1003 1001 _(b'-c|-m|FILE REV'),
1004 1002 optionalrepo=True,
1005 1003 )
1006 1004 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1007 1005 """display the computation to get to a valid delta for storing REV
1008 1006
1009 1007 This command will replay the process used to find the "best" delta to store
1010 1008 a revision and display information about all the steps used to get to that
1011 1009 result.
1012 1010
1013 1011 By default, the process is fed with a the full-text for the revision. This
1014 1012 can be controlled with the --source flag.
1015 1013
1016 1014 The revision use the revision number of the target storage (not changelog
1017 1015 revision number).
1018 1016
1019 1017 note: the process is initiated from a full text of the revision to store.
1020 1018 """
1021 1019 opts = pycompat.byteskwargs(opts)
1022 1020 if arg_2 is None:
1023 1021 file_ = None
1024 1022 rev = arg_1
1025 1023 else:
1026 1024 file_ = arg_1
1027 1025 rev = arg_2
1028 1026
1029 1027 rev = int(rev)
1030 1028
1031 1029 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1032
1033 deltacomputer = deltautil.deltacomputer(
1034 revlog,
1035 write_debug=ui.write,
1036 debug_search=not ui.quiet,
1037 )
1038
1039 node = revlog.node(rev)
1040 1030 p1r, p2r = revlog.parentrevs(rev)
1041 p1 = revlog.node(p1r)
1042 p2 = revlog.node(p2r)
1043 full_text = revlog.revision(rev)
1044 btext = [full_text]
1045 textlen = len(btext[0])
1046 cachedelta = None
1047 flags = revlog.flags(rev)
1048
1049 if source != b'full':
1050 if source == b'storage':
1051 base_rev = revlog.deltaparent(rev)
1052 elif source == b'p1':
1053 base_rev = p1r
1054 elif source == b'p2':
1055 base_rev = p2r
1056 elif source == b'prev':
1057 base_rev = rev - 1
1058 else:
1059 raise error.InputError(b"invalid --source value: %s" % source)
1060
1061 if base_rev != nullrev:
1062 base_text = revlog.revision(base_rev)
1063 delta = mdiff.textdiff(base_text, full_text)
1064
1065 cachedelta = (base_rev, delta)
1066 btext = [None]
1067
1068 revinfo = revlogutils.revisioninfo(
1069 node,
1070 p1,
1071 p2,
1072 btext,
1073 textlen,
1074 cachedelta,
1075 flags,
1076 )
1077
1078 fh = revlog._datafp()
1079 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
1031
1032 if source == b'full':
1033 base_rev = nullrev
1034 elif source == b'storage':
1035 base_rev = revlog.deltaparent(rev)
1036 elif source == b'p1':
1037 base_rev = p1r
1038 elif source == b'p2':
1039 base_rev = p2r
1040 elif source == b'prev':
1041 base_rev = rev - 1
1042 else:
1043 raise error.InputError(b"invalid --source value: %s" % source)
1044
1045 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1080 1046
1081 1047
1082 1048 @command(
1083 1049 b'debugdirstate|debugstate',
1084 1050 [
1085 1051 (
1086 1052 b'',
1087 1053 b'nodates',
1088 1054 None,
1089 1055 _(b'do not display the saved mtime (DEPRECATED)'),
1090 1056 ),
1091 1057 (b'', b'dates', True, _(b'display the saved mtime')),
1092 1058 (b'', b'datesort', None, _(b'sort by saved mtime')),
1093 1059 (
1094 1060 b'',
1095 1061 b'docket',
1096 1062 False,
1097 1063 _(b'display the docket (metadata file) instead'),
1098 1064 ),
1099 1065 (
1100 1066 b'',
1101 1067 b'all',
1102 1068 False,
1103 1069 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1104 1070 ),
1105 1071 ],
1106 1072 _(b'[OPTION]...'),
1107 1073 )
1108 1074 def debugstate(ui, repo, **opts):
1109 1075 """show the contents of the current dirstate"""
1110 1076
1111 1077 if opts.get("docket"):
1112 1078 if not repo.dirstate._use_dirstate_v2:
1113 1079 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1114 1080
1115 1081 docket = repo.dirstate._map.docket
1116 1082 (
1117 1083 start_offset,
1118 1084 root_nodes,
1119 1085 nodes_with_entry,
1120 1086 nodes_with_copy,
1121 1087 unused_bytes,
1122 1088 _unused,
1123 1089 ignore_pattern,
1124 1090 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1125 1091
1126 1092 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1127 1093 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1128 1094 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1129 1095 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1130 1096 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1131 1097 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1132 1098 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1133 1099 ui.write(
1134 1100 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1135 1101 )
1136 1102 return
1137 1103
1138 1104 nodates = not opts['dates']
1139 1105 if opts.get('nodates') is not None:
1140 1106 nodates = True
1141 1107 datesort = opts.get('datesort')
1142 1108
1143 1109 if datesort:
1144 1110
1145 1111 def keyfunc(entry):
1146 1112 filename, _state, _mode, _size, mtime = entry
1147 1113 return (mtime, filename)
1148 1114
1149 1115 else:
1150 1116 keyfunc = None # sort by filename
1151 1117 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1152 1118 entries.sort(key=keyfunc)
1153 1119 for entry in entries:
1154 1120 filename, state, mode, size, mtime = entry
1155 1121 if mtime == -1:
1156 1122 timestr = b'unset '
1157 1123 elif nodates:
1158 1124 timestr = b'set '
1159 1125 else:
1160 1126 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1161 1127 timestr = encoding.strtolocal(timestr)
1162 1128 if mode & 0o20000:
1163 1129 mode = b'lnk'
1164 1130 else:
1165 1131 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1166 1132 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1167 1133 for f in repo.dirstate.copies():
1168 1134 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1169 1135
1170 1136
1171 1137 @command(
1172 1138 b'debugdirstateignorepatternshash',
1173 1139 [],
1174 1140 _(b''),
1175 1141 )
1176 1142 def debugdirstateignorepatternshash(ui, repo, **opts):
1177 1143 """show the hash of ignore patterns stored in dirstate if v2,
1178 1144 or nothing for dirstate-v2
1179 1145 """
1180 1146 if repo.dirstate._use_dirstate_v2:
1181 1147 docket = repo.dirstate._map.docket
1182 1148 hash_len = 20 # 160 bits for SHA-1
1183 1149 hash_bytes = docket.tree_metadata[-hash_len:]
1184 1150 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1185 1151
1186 1152
1187 1153 @command(
1188 1154 b'debugdiscovery',
1189 1155 [
1190 1156 (b'', b'old', None, _(b'use old-style discovery')),
1191 1157 (
1192 1158 b'',
1193 1159 b'nonheads',
1194 1160 None,
1195 1161 _(b'use old-style discovery with non-heads included'),
1196 1162 ),
1197 1163 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1198 1164 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1199 1165 (
1200 1166 b'',
1201 1167 b'local-as-revs',
1202 1168 b"",
1203 1169 b'treat local has having these revisions only',
1204 1170 ),
1205 1171 (
1206 1172 b'',
1207 1173 b'remote-as-revs',
1208 1174 b"",
1209 1175 b'use local as remote, with only these revisions',
1210 1176 ),
1211 1177 ]
1212 1178 + cmdutil.remoteopts
1213 1179 + cmdutil.formatteropts,
1214 1180 _(b'[--rev REV] [OTHER]'),
1215 1181 )
1216 1182 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1217 1183 """runs the changeset discovery protocol in isolation
1218 1184
1219 1185 The local peer can be "replaced" by a subset of the local repository by
1220 1186 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1221 1187 can be "replaced" by a subset of the local repository using the
1222 1188 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1223 1189 discovery situations.
1224 1190
1225 1191 The following developer oriented config are relevant for people playing with this command:
1226 1192
1227 1193 * devel.discovery.exchange-heads=True
1228 1194
1229 1195 If False, the discovery will not start with
1230 1196 remote head fetching and local head querying.
1231 1197
1232 1198 * devel.discovery.grow-sample=True
1233 1199
1234 1200 If False, the sample size used in set discovery will not be increased
1235 1201 through the process
1236 1202
1237 1203 * devel.discovery.grow-sample.dynamic=True
1238 1204
1239 1205 When discovery.grow-sample.dynamic is True, the default, the sample size is
1240 1206 adapted to the shape of the undecided set (it is set to the max of:
1241 1207 <target-size>, len(roots(undecided)), len(heads(undecided)
1242 1208
1243 1209 * devel.discovery.grow-sample.rate=1.05
1244 1210
1245 1211 the rate at which the sample grow
1246 1212
1247 1213 * devel.discovery.randomize=True
1248 1214
1249 1215 If andom sampling during discovery are deterministic. It is meant for
1250 1216 integration tests.
1251 1217
1252 1218 * devel.discovery.sample-size=200
1253 1219
1254 1220 Control the initial size of the discovery sample
1255 1221
1256 1222 * devel.discovery.sample-size.initial=100
1257 1223
1258 1224 Control the initial size of the discovery for initial change
1259 1225 """
1260 1226 opts = pycompat.byteskwargs(opts)
1261 1227 unfi = repo.unfiltered()
1262 1228
1263 1229 # setup potential extra filtering
1264 1230 local_revs = opts[b"local_as_revs"]
1265 1231 remote_revs = opts[b"remote_as_revs"]
1266 1232
1267 1233 # make sure tests are repeatable
1268 1234 random.seed(int(opts[b'seed']))
1269 1235
1270 1236 if not remote_revs:
1271 1237
1272 1238 remoteurl, branches = urlutil.get_unique_pull_path(
1273 1239 b'debugdiscovery', repo, ui, remoteurl
1274 1240 )
1275 1241 remote = hg.peer(repo, opts, remoteurl)
1276 1242 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1277 1243 else:
1278 1244 branches = (None, [])
1279 1245 remote_filtered_revs = logcmdutil.revrange(
1280 1246 unfi, [b"not (::(%s))" % remote_revs]
1281 1247 )
1282 1248 remote_filtered_revs = frozenset(remote_filtered_revs)
1283 1249
1284 1250 def remote_func(x):
1285 1251 return remote_filtered_revs
1286 1252
1287 1253 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1288 1254
1289 1255 remote = repo.peer()
1290 1256 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1291 1257
1292 1258 if local_revs:
1293 1259 local_filtered_revs = logcmdutil.revrange(
1294 1260 unfi, [b"not (::(%s))" % local_revs]
1295 1261 )
1296 1262 local_filtered_revs = frozenset(local_filtered_revs)
1297 1263
1298 1264 def local_func(x):
1299 1265 return local_filtered_revs
1300 1266
1301 1267 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1302 1268 repo = repo.filtered(b'debug-discovery-local-filter')
1303 1269
1304 1270 data = {}
1305 1271 if opts.get(b'old'):
1306 1272
1307 1273 def doit(pushedrevs, remoteheads, remote=remote):
1308 1274 if not util.safehasattr(remote, b'branches'):
1309 1275 # enable in-client legacy support
1310 1276 remote = localrepo.locallegacypeer(remote.local())
1311 1277 if remote_revs:
1312 1278 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1313 1279 remote._repo = r
1314 1280 common, _in, hds = treediscovery.findcommonincoming(
1315 1281 repo, remote, force=True, audit=data
1316 1282 )
1317 1283 common = set(common)
1318 1284 if not opts.get(b'nonheads'):
1319 1285 ui.writenoi18n(
1320 1286 b"unpruned common: %s\n"
1321 1287 % b" ".join(sorted(short(n) for n in common))
1322 1288 )
1323 1289
1324 1290 clnode = repo.changelog.node
1325 1291 common = repo.revs(b'heads(::%ln)', common)
1326 1292 common = {clnode(r) for r in common}
1327 1293 return common, hds
1328 1294
1329 1295 else:
1330 1296
1331 1297 def doit(pushedrevs, remoteheads, remote=remote):
1332 1298 nodes = None
1333 1299 if pushedrevs:
1334 1300 revs = logcmdutil.revrange(repo, pushedrevs)
1335 1301 nodes = [repo[r].node() for r in revs]
1336 1302 common, any, hds = setdiscovery.findcommonheads(
1337 1303 ui,
1338 1304 repo,
1339 1305 remote,
1340 1306 ancestorsof=nodes,
1341 1307 audit=data,
1342 1308 abortwhenunrelated=False,
1343 1309 )
1344 1310 return common, hds
1345 1311
1346 1312 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1347 1313 localrevs = opts[b'rev']
1348 1314
1349 1315 fm = ui.formatter(b'debugdiscovery', opts)
1350 1316 if fm.strict_format:
1351 1317
1352 1318 @contextlib.contextmanager
1353 1319 def may_capture_output():
1354 1320 ui.pushbuffer()
1355 1321 yield
1356 1322 data[b'output'] = ui.popbuffer()
1357 1323
1358 1324 else:
1359 1325 may_capture_output = util.nullcontextmanager
1360 1326 with may_capture_output():
1361 1327 with util.timedcm('debug-discovery') as t:
1362 1328 common, hds = doit(localrevs, remoterevs)
1363 1329
1364 1330 # compute all statistics
1365 1331 if len(common) == 1 and repo.nullid in common:
1366 1332 common = set()
1367 1333 heads_common = set(common)
1368 1334 heads_remote = set(hds)
1369 1335 heads_local = set(repo.heads())
1370 1336 # note: they cannot be a local or remote head that is in common and not
1371 1337 # itself a head of common.
1372 1338 heads_common_local = heads_common & heads_local
1373 1339 heads_common_remote = heads_common & heads_remote
1374 1340 heads_common_both = heads_common & heads_remote & heads_local
1375 1341
1376 1342 all = repo.revs(b'all()')
1377 1343 common = repo.revs(b'::%ln', common)
1378 1344 roots_common = repo.revs(b'roots(::%ld)', common)
1379 1345 missing = repo.revs(b'not ::%ld', common)
1380 1346 heads_missing = repo.revs(b'heads(%ld)', missing)
1381 1347 roots_missing = repo.revs(b'roots(%ld)', missing)
1382 1348 assert len(common) + len(missing) == len(all)
1383 1349
1384 1350 initial_undecided = repo.revs(
1385 1351 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1386 1352 )
1387 1353 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1388 1354 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1389 1355 common_initial_undecided = initial_undecided & common
1390 1356 missing_initial_undecided = initial_undecided & missing
1391 1357
1392 1358 data[b'elapsed'] = t.elapsed
1393 1359 data[b'nb-common-heads'] = len(heads_common)
1394 1360 data[b'nb-common-heads-local'] = len(heads_common_local)
1395 1361 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1396 1362 data[b'nb-common-heads-both'] = len(heads_common_both)
1397 1363 data[b'nb-common-roots'] = len(roots_common)
1398 1364 data[b'nb-head-local'] = len(heads_local)
1399 1365 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1400 1366 data[b'nb-head-remote'] = len(heads_remote)
1401 1367 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1402 1368 heads_common_remote
1403 1369 )
1404 1370 data[b'nb-revs'] = len(all)
1405 1371 data[b'nb-revs-common'] = len(common)
1406 1372 data[b'nb-revs-missing'] = len(missing)
1407 1373 data[b'nb-missing-heads'] = len(heads_missing)
1408 1374 data[b'nb-missing-roots'] = len(roots_missing)
1409 1375 data[b'nb-ini_und'] = len(initial_undecided)
1410 1376 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1411 1377 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1412 1378 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1413 1379 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1414 1380
1415 1381 fm.startitem()
1416 1382 fm.data(**pycompat.strkwargs(data))
1417 1383 # display discovery summary
1418 1384 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1419 1385 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1420 1386 if b'total-round-trips-heads' in data:
1421 1387 fm.plain(
1422 1388 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1423 1389 )
1424 1390 if b'total-round-trips-branches' in data:
1425 1391 fm.plain(
1426 1392 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1427 1393 % data
1428 1394 )
1429 1395 if b'total-round-trips-between' in data:
1430 1396 fm.plain(
1431 1397 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1432 1398 )
1433 1399 fm.plain(b"queries: %(total-queries)9d\n" % data)
1434 1400 if b'total-queries-branches' in data:
1435 1401 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1436 1402 if b'total-queries-between' in data:
1437 1403 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1438 1404 fm.plain(b"heads summary:\n")
1439 1405 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1440 1406 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1441 1407 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1442 1408 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1443 1409 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1444 1410 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1445 1411 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1446 1412 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1447 1413 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1448 1414 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1449 1415 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1450 1416 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1451 1417 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1452 1418 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1453 1419 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1454 1420 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1455 1421 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1456 1422 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1457 1423 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1458 1424 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1459 1425 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1460 1426 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1461 1427
1462 1428 if ui.verbose:
1463 1429 fm.plain(
1464 1430 b"common heads: %s\n"
1465 1431 % b" ".join(sorted(short(n) for n in heads_common))
1466 1432 )
1467 1433 fm.end()
1468 1434
1469 1435
1470 1436 _chunksize = 4 << 10
1471 1437
1472 1438
1473 1439 @command(
1474 1440 b'debugdownload',
1475 1441 [
1476 1442 (b'o', b'output', b'', _(b'path')),
1477 1443 ],
1478 1444 optionalrepo=True,
1479 1445 )
1480 1446 def debugdownload(ui, repo, url, output=None, **opts):
1481 1447 """download a resource using Mercurial logic and config"""
1482 1448 fh = urlmod.open(ui, url, output)
1483 1449
1484 1450 dest = ui
1485 1451 if output:
1486 1452 dest = open(output, b"wb", _chunksize)
1487 1453 try:
1488 1454 data = fh.read(_chunksize)
1489 1455 while data:
1490 1456 dest.write(data)
1491 1457 data = fh.read(_chunksize)
1492 1458 finally:
1493 1459 if output:
1494 1460 dest.close()
1495 1461
1496 1462
1497 1463 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1498 1464 def debugextensions(ui, repo, **opts):
1499 1465 '''show information about active extensions'''
1500 1466 opts = pycompat.byteskwargs(opts)
1501 1467 exts = extensions.extensions(ui)
1502 1468 hgver = util.version()
1503 1469 fm = ui.formatter(b'debugextensions', opts)
1504 1470 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1505 1471 isinternal = extensions.ismoduleinternal(extmod)
1506 1472 extsource = None
1507 1473
1508 1474 if util.safehasattr(extmod, '__file__'):
1509 1475 extsource = pycompat.fsencode(extmod.__file__)
1510 1476 elif getattr(sys, 'oxidized', False):
1511 1477 extsource = pycompat.sysexecutable
1512 1478 if isinternal:
1513 1479 exttestedwith = [] # never expose magic string to users
1514 1480 else:
1515 1481 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1516 1482 extbuglink = getattr(extmod, 'buglink', None)
1517 1483
1518 1484 fm.startitem()
1519 1485
1520 1486 if ui.quiet or ui.verbose:
1521 1487 fm.write(b'name', b'%s\n', extname)
1522 1488 else:
1523 1489 fm.write(b'name', b'%s', extname)
1524 1490 if isinternal or hgver in exttestedwith:
1525 1491 fm.plain(b'\n')
1526 1492 elif not exttestedwith:
1527 1493 fm.plain(_(b' (untested!)\n'))
1528 1494 else:
1529 1495 lasttestedversion = exttestedwith[-1]
1530 1496 fm.plain(b' (%s!)\n' % lasttestedversion)
1531 1497
1532 1498 fm.condwrite(
1533 1499 ui.verbose and extsource,
1534 1500 b'source',
1535 1501 _(b' location: %s\n'),
1536 1502 extsource or b"",
1537 1503 )
1538 1504
1539 1505 if ui.verbose:
1540 1506 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1541 1507 fm.data(bundled=isinternal)
1542 1508
1543 1509 fm.condwrite(
1544 1510 ui.verbose and exttestedwith,
1545 1511 b'testedwith',
1546 1512 _(b' tested with: %s\n'),
1547 1513 fm.formatlist(exttestedwith, name=b'ver'),
1548 1514 )
1549 1515
1550 1516 fm.condwrite(
1551 1517 ui.verbose and extbuglink,
1552 1518 b'buglink',
1553 1519 _(b' bug reporting: %s\n'),
1554 1520 extbuglink or b"",
1555 1521 )
1556 1522
1557 1523 fm.end()
1558 1524
1559 1525
1560 1526 @command(
1561 1527 b'debugfileset',
1562 1528 [
1563 1529 (
1564 1530 b'r',
1565 1531 b'rev',
1566 1532 b'',
1567 1533 _(b'apply the filespec on this revision'),
1568 1534 _(b'REV'),
1569 1535 ),
1570 1536 (
1571 1537 b'',
1572 1538 b'all-files',
1573 1539 False,
1574 1540 _(b'test files from all revisions and working directory'),
1575 1541 ),
1576 1542 (
1577 1543 b's',
1578 1544 b'show-matcher',
1579 1545 None,
1580 1546 _(b'print internal representation of matcher'),
1581 1547 ),
1582 1548 (
1583 1549 b'p',
1584 1550 b'show-stage',
1585 1551 [],
1586 1552 _(b'print parsed tree at the given stage'),
1587 1553 _(b'NAME'),
1588 1554 ),
1589 1555 ],
1590 1556 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1591 1557 )
1592 1558 def debugfileset(ui, repo, expr, **opts):
1593 1559 '''parse and apply a fileset specification'''
1594 1560 from . import fileset
1595 1561
1596 1562 fileset.symbols # force import of fileset so we have predicates to optimize
1597 1563 opts = pycompat.byteskwargs(opts)
1598 1564 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1599 1565
1600 1566 stages = [
1601 1567 (b'parsed', pycompat.identity),
1602 1568 (b'analyzed', filesetlang.analyze),
1603 1569 (b'optimized', filesetlang.optimize),
1604 1570 ]
1605 1571 stagenames = {n for n, f in stages}
1606 1572
1607 1573 showalways = set()
1608 1574 if ui.verbose and not opts[b'show_stage']:
1609 1575 # show parsed tree by --verbose (deprecated)
1610 1576 showalways.add(b'parsed')
1611 1577 if opts[b'show_stage'] == [b'all']:
1612 1578 showalways.update(stagenames)
1613 1579 else:
1614 1580 for n in opts[b'show_stage']:
1615 1581 if n not in stagenames:
1616 1582 raise error.Abort(_(b'invalid stage name: %s') % n)
1617 1583 showalways.update(opts[b'show_stage'])
1618 1584
1619 1585 tree = filesetlang.parse(expr)
1620 1586 for n, f in stages:
1621 1587 tree = f(tree)
1622 1588 if n in showalways:
1623 1589 if opts[b'show_stage'] or n != b'parsed':
1624 1590 ui.write(b"* %s:\n" % n)
1625 1591 ui.write(filesetlang.prettyformat(tree), b"\n")
1626 1592
1627 1593 files = set()
1628 1594 if opts[b'all_files']:
1629 1595 for r in repo:
1630 1596 c = repo[r]
1631 1597 files.update(c.files())
1632 1598 files.update(c.substate)
1633 1599 if opts[b'all_files'] or ctx.rev() is None:
1634 1600 wctx = repo[None]
1635 1601 files.update(
1636 1602 repo.dirstate.walk(
1637 1603 scmutil.matchall(repo),
1638 1604 subrepos=list(wctx.substate),
1639 1605 unknown=True,
1640 1606 ignored=True,
1641 1607 )
1642 1608 )
1643 1609 files.update(wctx.substate)
1644 1610 else:
1645 1611 files.update(ctx.files())
1646 1612 files.update(ctx.substate)
1647 1613
1648 1614 m = ctx.matchfileset(repo.getcwd(), expr)
1649 1615 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1650 1616 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1651 1617 for f in sorted(files):
1652 1618 if not m(f):
1653 1619 continue
1654 1620 ui.write(b"%s\n" % f)
1655 1621
1656 1622
1657 1623 @command(
1658 1624 b"debug-repair-issue6528",
1659 1625 [
1660 1626 (
1661 1627 b'',
1662 1628 b'to-report',
1663 1629 b'',
1664 1630 _(b'build a report of affected revisions to this file'),
1665 1631 _(b'FILE'),
1666 1632 ),
1667 1633 (
1668 1634 b'',
1669 1635 b'from-report',
1670 1636 b'',
1671 1637 _(b'repair revisions listed in this report file'),
1672 1638 _(b'FILE'),
1673 1639 ),
1674 1640 (
1675 1641 b'',
1676 1642 b'paranoid',
1677 1643 False,
1678 1644 _(b'check that both detection methods do the same thing'),
1679 1645 ),
1680 1646 ]
1681 1647 + cmdutil.dryrunopts,
1682 1648 )
1683 1649 def debug_repair_issue6528(ui, repo, **opts):
1684 1650 """find affected revisions and repair them. See issue6528 for more details.
1685 1651
1686 1652 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1687 1653 computation of affected revisions for a given repository across clones.
1688 1654 The report format is line-based (with empty lines ignored):
1689 1655
1690 1656 ```
1691 1657 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1692 1658 ```
1693 1659
1694 1660 There can be multiple broken revisions per filelog, they are separated by
1695 1661 a comma with no spaces. The only space is between the revision(s) and the
1696 1662 filename.
1697 1663
1698 1664 Note that this does *not* mean that this repairs future affected revisions,
1699 1665 that needs a separate fix at the exchange level that was introduced in
1700 1666 Mercurial 5.9.1.
1701 1667
1702 1668 There is a `--paranoid` flag to test that the fast implementation is correct
1703 1669 by checking it against the slow implementation. Since this matter is quite
1704 1670 urgent and testing every edge-case is probably quite costly, we use this
1705 1671 method to test on large repositories as a fuzzing method of sorts.
1706 1672 """
1707 1673 cmdutil.check_incompatible_arguments(
1708 1674 opts, 'to_report', ['from_report', 'dry_run']
1709 1675 )
1710 1676 dry_run = opts.get('dry_run')
1711 1677 to_report = opts.get('to_report')
1712 1678 from_report = opts.get('from_report')
1713 1679 paranoid = opts.get('paranoid')
1714 1680 # TODO maybe add filelog pattern and revision pattern parameters to help
1715 1681 # narrow down the search for users that know what they're looking for?
1716 1682
1717 1683 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1718 1684 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1719 1685 raise error.Abort(_(msg))
1720 1686
1721 1687 rewrite.repair_issue6528(
1722 1688 ui,
1723 1689 repo,
1724 1690 dry_run=dry_run,
1725 1691 to_report=to_report,
1726 1692 from_report=from_report,
1727 1693 paranoid=paranoid,
1728 1694 )
1729 1695
1730 1696
1731 1697 @command(b'debugformat', [] + cmdutil.formatteropts)
1732 1698 def debugformat(ui, repo, **opts):
1733 1699 """display format information about the current repository
1734 1700
1735 1701 Use --verbose to get extra information about current config value and
1736 1702 Mercurial default."""
1737 1703 opts = pycompat.byteskwargs(opts)
1738 1704 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1739 1705 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1740 1706
1741 1707 def makeformatname(name):
1742 1708 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1743 1709
1744 1710 fm = ui.formatter(b'debugformat', opts)
1745 1711 if fm.isplain():
1746 1712
1747 1713 def formatvalue(value):
1748 1714 if util.safehasattr(value, b'startswith'):
1749 1715 return value
1750 1716 if value:
1751 1717 return b'yes'
1752 1718 else:
1753 1719 return b'no'
1754 1720
1755 1721 else:
1756 1722 formatvalue = pycompat.identity
1757 1723
1758 1724 fm.plain(b'format-variant')
1759 1725 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1760 1726 fm.plain(b' repo')
1761 1727 if ui.verbose:
1762 1728 fm.plain(b' config default')
1763 1729 fm.plain(b'\n')
1764 1730 for fv in upgrade.allformatvariant:
1765 1731 fm.startitem()
1766 1732 repovalue = fv.fromrepo(repo)
1767 1733 configvalue = fv.fromconfig(repo)
1768 1734
1769 1735 if repovalue != configvalue:
1770 1736 namelabel = b'formatvariant.name.mismatchconfig'
1771 1737 repolabel = b'formatvariant.repo.mismatchconfig'
1772 1738 elif repovalue != fv.default:
1773 1739 namelabel = b'formatvariant.name.mismatchdefault'
1774 1740 repolabel = b'formatvariant.repo.mismatchdefault'
1775 1741 else:
1776 1742 namelabel = b'formatvariant.name.uptodate'
1777 1743 repolabel = b'formatvariant.repo.uptodate'
1778 1744
1779 1745 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1780 1746 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1781 1747 if fv.default != configvalue:
1782 1748 configlabel = b'formatvariant.config.special'
1783 1749 else:
1784 1750 configlabel = b'formatvariant.config.default'
1785 1751 fm.condwrite(
1786 1752 ui.verbose,
1787 1753 b'config',
1788 1754 b' %6s',
1789 1755 formatvalue(configvalue),
1790 1756 label=configlabel,
1791 1757 )
1792 1758 fm.condwrite(
1793 1759 ui.verbose,
1794 1760 b'default',
1795 1761 b' %7s',
1796 1762 formatvalue(fv.default),
1797 1763 label=b'formatvariant.default',
1798 1764 )
1799 1765 fm.plain(b'\n')
1800 1766 fm.end()
1801 1767
1802 1768
1803 1769 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1804 1770 def debugfsinfo(ui, path=b"."):
1805 1771 """show information detected about current filesystem"""
1806 1772 ui.writenoi18n(b'path: %s\n' % path)
1807 1773 ui.writenoi18n(
1808 1774 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1809 1775 )
1810 1776 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1811 1777 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1812 1778 ui.writenoi18n(
1813 1779 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1814 1780 )
1815 1781 ui.writenoi18n(
1816 1782 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1817 1783 )
1818 1784 casesensitive = b'(unknown)'
1819 1785 try:
1820 1786 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1821 1787 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1822 1788 except OSError:
1823 1789 pass
1824 1790 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1825 1791
1826 1792
1827 1793 @command(
1828 1794 b'debuggetbundle',
1829 1795 [
1830 1796 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1831 1797 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1832 1798 (
1833 1799 b't',
1834 1800 b'type',
1835 1801 b'bzip2',
1836 1802 _(b'bundle compression type to use'),
1837 1803 _(b'TYPE'),
1838 1804 ),
1839 1805 ],
1840 1806 _(b'REPO FILE [-H|-C ID]...'),
1841 1807 norepo=True,
1842 1808 )
1843 1809 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1844 1810 """retrieves a bundle from a repo
1845 1811
1846 1812 Every ID must be a full-length hex node id string. Saves the bundle to the
1847 1813 given file.
1848 1814 """
1849 1815 opts = pycompat.byteskwargs(opts)
1850 1816 repo = hg.peer(ui, opts, repopath)
1851 1817 if not repo.capable(b'getbundle'):
1852 1818 raise error.Abort(b"getbundle() not supported by target repository")
1853 1819 args = {}
1854 1820 if common:
1855 1821 args['common'] = [bin(s) for s in common]
1856 1822 if head:
1857 1823 args['heads'] = [bin(s) for s in head]
1858 1824 # TODO: get desired bundlecaps from command line.
1859 1825 args['bundlecaps'] = None
1860 1826 bundle = repo.getbundle(b'debug', **args)
1861 1827
1862 1828 bundletype = opts.get(b'type', b'bzip2').lower()
1863 1829 btypes = {
1864 1830 b'none': b'HG10UN',
1865 1831 b'bzip2': b'HG10BZ',
1866 1832 b'gzip': b'HG10GZ',
1867 1833 b'bundle2': b'HG20',
1868 1834 }
1869 1835 bundletype = btypes.get(bundletype)
1870 1836 if bundletype not in bundle2.bundletypes:
1871 1837 raise error.Abort(_(b'unknown bundle type specified with --type'))
1872 1838 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1873 1839
1874 1840
1875 1841 @command(b'debugignore', [], b'[FILE]')
1876 1842 def debugignore(ui, repo, *files, **opts):
1877 1843 """display the combined ignore pattern and information about ignored files
1878 1844
1879 1845 With no argument display the combined ignore pattern.
1880 1846
1881 1847 Given space separated file names, shows if the given file is ignored and
1882 1848 if so, show the ignore rule (file and line number) that matched it.
1883 1849 """
1884 1850 ignore = repo.dirstate._ignore
1885 1851 if not files:
1886 1852 # Show all the patterns
1887 1853 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1888 1854 else:
1889 1855 m = scmutil.match(repo[None], pats=files)
1890 1856 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1891 1857 for f in m.files():
1892 1858 nf = util.normpath(f)
1893 1859 ignored = None
1894 1860 ignoredata = None
1895 1861 if nf != b'.':
1896 1862 if ignore(nf):
1897 1863 ignored = nf
1898 1864 ignoredata = repo.dirstate._ignorefileandline(nf)
1899 1865 else:
1900 1866 for p in pathutil.finddirs(nf):
1901 1867 if ignore(p):
1902 1868 ignored = p
1903 1869 ignoredata = repo.dirstate._ignorefileandline(p)
1904 1870 break
1905 1871 if ignored:
1906 1872 if ignored == nf:
1907 1873 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1908 1874 else:
1909 1875 ui.write(
1910 1876 _(
1911 1877 b"%s is ignored because of "
1912 1878 b"containing directory %s\n"
1913 1879 )
1914 1880 % (uipathfn(f), ignored)
1915 1881 )
1916 1882 ignorefile, lineno, line = ignoredata
1917 1883 ui.write(
1918 1884 _(b"(ignore rule in %s, line %d: '%s')\n")
1919 1885 % (ignorefile, lineno, line)
1920 1886 )
1921 1887 else:
1922 1888 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1923 1889
1924 1890
1925 1891 @command(
1926 1892 b'debug-revlog-index|debugindex',
1927 1893 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1928 1894 _(b'-c|-m|FILE'),
1929 1895 )
1930 1896 def debugindex(ui, repo, file_=None, **opts):
1931 1897 """dump index data for a revlog"""
1932 1898 opts = pycompat.byteskwargs(opts)
1933 1899 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1934 1900
1935 1901 fm = ui.formatter(b'debugindex', opts)
1936 1902
1937 1903 revlog = getattr(store, b'_revlog', store)
1938 1904
1939 1905 return revlog_debug.debug_index(
1940 1906 ui,
1941 1907 repo,
1942 1908 formatter=fm,
1943 1909 revlog=revlog,
1944 1910 full_node=ui.debugflag,
1945 1911 )
1946 1912
1947 1913
1948 1914 @command(
1949 1915 b'debugindexdot',
1950 1916 cmdutil.debugrevlogopts,
1951 1917 _(b'-c|-m|FILE'),
1952 1918 optionalrepo=True,
1953 1919 )
1954 1920 def debugindexdot(ui, repo, file_=None, **opts):
1955 1921 """dump an index DAG as a graphviz dot file"""
1956 1922 opts = pycompat.byteskwargs(opts)
1957 1923 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1958 1924 ui.writenoi18n(b"digraph G {\n")
1959 1925 for i in r:
1960 1926 node = r.node(i)
1961 1927 pp = r.parents(node)
1962 1928 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1963 1929 if pp[1] != repo.nullid:
1964 1930 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1965 1931 ui.write(b"}\n")
1966 1932
1967 1933
1968 1934 @command(b'debugindexstats', [])
1969 1935 def debugindexstats(ui, repo):
1970 1936 """show stats related to the changelog index"""
1971 1937 repo.changelog.shortest(repo.nullid, 1)
1972 1938 index = repo.changelog.index
1973 1939 if not util.safehasattr(index, b'stats'):
1974 1940 raise error.Abort(_(b'debugindexstats only works with native code'))
1975 1941 for k, v in sorted(index.stats().items()):
1976 1942 ui.write(b'%s: %d\n' % (k, v))
1977 1943
1978 1944
1979 1945 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1980 1946 def debuginstall(ui, **opts):
1981 1947 """test Mercurial installation
1982 1948
1983 1949 Returns 0 on success.
1984 1950 """
1985 1951 opts = pycompat.byteskwargs(opts)
1986 1952
1987 1953 problems = 0
1988 1954
1989 1955 fm = ui.formatter(b'debuginstall', opts)
1990 1956 fm.startitem()
1991 1957
1992 1958 # encoding might be unknown or wrong. don't translate these messages.
1993 1959 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1994 1960 err = None
1995 1961 try:
1996 1962 codecs.lookup(pycompat.sysstr(encoding.encoding))
1997 1963 except LookupError as inst:
1998 1964 err = stringutil.forcebytestr(inst)
1999 1965 problems += 1
2000 1966 fm.condwrite(
2001 1967 err,
2002 1968 b'encodingerror',
2003 1969 b" %s\n (check that your locale is properly set)\n",
2004 1970 err,
2005 1971 )
2006 1972
2007 1973 # Python
2008 1974 pythonlib = None
2009 1975 if util.safehasattr(os, '__file__'):
2010 1976 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
2011 1977 elif getattr(sys, 'oxidized', False):
2012 1978 pythonlib = pycompat.sysexecutable
2013 1979
2014 1980 fm.write(
2015 1981 b'pythonexe',
2016 1982 _(b"checking Python executable (%s)\n"),
2017 1983 pycompat.sysexecutable or _(b"unknown"),
2018 1984 )
2019 1985 fm.write(
2020 1986 b'pythonimplementation',
2021 1987 _(b"checking Python implementation (%s)\n"),
2022 1988 pycompat.sysbytes(platform.python_implementation()),
2023 1989 )
2024 1990 fm.write(
2025 1991 b'pythonver',
2026 1992 _(b"checking Python version (%s)\n"),
2027 1993 (b"%d.%d.%d" % sys.version_info[:3]),
2028 1994 )
2029 1995 fm.write(
2030 1996 b'pythonlib',
2031 1997 _(b"checking Python lib (%s)...\n"),
2032 1998 pythonlib or _(b"unknown"),
2033 1999 )
2034 2000
2035 2001 try:
2036 2002 from . import rustext # pytype: disable=import-error
2037 2003
2038 2004 rustext.__doc__ # trigger lazy import
2039 2005 except ImportError:
2040 2006 rustext = None
2041 2007
2042 2008 security = set(sslutil.supportedprotocols)
2043 2009 if sslutil.hassni:
2044 2010 security.add(b'sni')
2045 2011
2046 2012 fm.write(
2047 2013 b'pythonsecurity',
2048 2014 _(b"checking Python security support (%s)\n"),
2049 2015 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2050 2016 )
2051 2017
2052 2018 # These are warnings, not errors. So don't increment problem count. This
2053 2019 # may change in the future.
2054 2020 if b'tls1.2' not in security:
2055 2021 fm.plain(
2056 2022 _(
2057 2023 b' TLS 1.2 not supported by Python install; '
2058 2024 b'network connections lack modern security\n'
2059 2025 )
2060 2026 )
2061 2027 if b'sni' not in security:
2062 2028 fm.plain(
2063 2029 _(
2064 2030 b' SNI not supported by Python install; may have '
2065 2031 b'connectivity issues with some servers\n'
2066 2032 )
2067 2033 )
2068 2034
2069 2035 fm.plain(
2070 2036 _(
2071 2037 b"checking Rust extensions (%s)\n"
2072 2038 % (b'missing' if rustext is None else b'installed')
2073 2039 ),
2074 2040 )
2075 2041
2076 2042 # TODO print CA cert info
2077 2043
2078 2044 # hg version
2079 2045 hgver = util.version()
2080 2046 fm.write(
2081 2047 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2082 2048 )
2083 2049 fm.write(
2084 2050 b'hgverextra',
2085 2051 _(b"checking Mercurial custom build (%s)\n"),
2086 2052 b'+'.join(hgver.split(b'+')[1:]),
2087 2053 )
2088 2054
2089 2055 # compiled modules
2090 2056 hgmodules = None
2091 2057 if util.safehasattr(sys.modules[__name__], '__file__'):
2092 2058 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2093 2059 elif getattr(sys, 'oxidized', False):
2094 2060 hgmodules = pycompat.sysexecutable
2095 2061
2096 2062 fm.write(
2097 2063 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2098 2064 )
2099 2065 fm.write(
2100 2066 b'hgmodules',
2101 2067 _(b"checking installed modules (%s)...\n"),
2102 2068 hgmodules or _(b"unknown"),
2103 2069 )
2104 2070
2105 2071 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2106 2072 rustext = rustandc # for now, that's the only case
2107 2073 cext = policy.policy in (b'c', b'allow') or rustandc
2108 2074 nopure = cext or rustext
2109 2075 if nopure:
2110 2076 err = None
2111 2077 try:
2112 2078 if cext:
2113 2079 from .cext import ( # pytype: disable=import-error
2114 2080 base85,
2115 2081 bdiff,
2116 2082 mpatch,
2117 2083 osutil,
2118 2084 )
2119 2085
2120 2086 # quiet pyflakes
2121 2087 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2122 2088 if rustext:
2123 2089 from .rustext import ( # pytype: disable=import-error
2124 2090 ancestor,
2125 2091 dirstate,
2126 2092 )
2127 2093
2128 2094 dir(ancestor), dir(dirstate) # quiet pyflakes
2129 2095 except Exception as inst:
2130 2096 err = stringutil.forcebytestr(inst)
2131 2097 problems += 1
2132 2098 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2133 2099
2134 2100 compengines = util.compengines._engines.values()
2135 2101 fm.write(
2136 2102 b'compengines',
2137 2103 _(b'checking registered compression engines (%s)\n'),
2138 2104 fm.formatlist(
2139 2105 sorted(e.name() for e in compengines),
2140 2106 name=b'compengine',
2141 2107 fmt=b'%s',
2142 2108 sep=b', ',
2143 2109 ),
2144 2110 )
2145 2111 fm.write(
2146 2112 b'compenginesavail',
2147 2113 _(b'checking available compression engines (%s)\n'),
2148 2114 fm.formatlist(
2149 2115 sorted(e.name() for e in compengines if e.available()),
2150 2116 name=b'compengine',
2151 2117 fmt=b'%s',
2152 2118 sep=b', ',
2153 2119 ),
2154 2120 )
2155 2121 wirecompengines = compression.compengines.supportedwireengines(
2156 2122 compression.SERVERROLE
2157 2123 )
2158 2124 fm.write(
2159 2125 b'compenginesserver',
2160 2126 _(
2161 2127 b'checking available compression engines '
2162 2128 b'for wire protocol (%s)\n'
2163 2129 ),
2164 2130 fm.formatlist(
2165 2131 [e.name() for e in wirecompengines if e.wireprotosupport()],
2166 2132 name=b'compengine',
2167 2133 fmt=b'%s',
2168 2134 sep=b', ',
2169 2135 ),
2170 2136 )
2171 2137 re2 = b'missing'
2172 2138 if util._re2:
2173 2139 re2 = b'available'
2174 2140 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2175 2141 fm.data(re2=bool(util._re2))
2176 2142
2177 2143 # templates
2178 2144 p = templater.templatedir()
2179 2145 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2180 2146 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2181 2147 if p:
2182 2148 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2183 2149 if m:
2184 2150 # template found, check if it is working
2185 2151 err = None
2186 2152 try:
2187 2153 templater.templater.frommapfile(m)
2188 2154 except Exception as inst:
2189 2155 err = stringutil.forcebytestr(inst)
2190 2156 p = None
2191 2157 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2192 2158 else:
2193 2159 p = None
2194 2160 fm.condwrite(
2195 2161 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2196 2162 )
2197 2163 fm.condwrite(
2198 2164 not m,
2199 2165 b'defaulttemplatenotfound',
2200 2166 _(b" template '%s' not found\n"),
2201 2167 b"default",
2202 2168 )
2203 2169 if not p:
2204 2170 problems += 1
2205 2171 fm.condwrite(
2206 2172 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2207 2173 )
2208 2174
2209 2175 # editor
2210 2176 editor = ui.geteditor()
2211 2177 editor = util.expandpath(editor)
2212 2178 editorbin = procutil.shellsplit(editor)[0]
2213 2179 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2214 2180 cmdpath = procutil.findexe(editorbin)
2215 2181 fm.condwrite(
2216 2182 not cmdpath and editor == b'vi',
2217 2183 b'vinotfound',
2218 2184 _(
2219 2185 b" No commit editor set and can't find %s in PATH\n"
2220 2186 b" (specify a commit editor in your configuration"
2221 2187 b" file)\n"
2222 2188 ),
2223 2189 not cmdpath and editor == b'vi' and editorbin,
2224 2190 )
2225 2191 fm.condwrite(
2226 2192 not cmdpath and editor != b'vi',
2227 2193 b'editornotfound',
2228 2194 _(
2229 2195 b" Can't find editor '%s' in PATH\n"
2230 2196 b" (specify a commit editor in your configuration"
2231 2197 b" file)\n"
2232 2198 ),
2233 2199 not cmdpath and editorbin,
2234 2200 )
2235 2201 if not cmdpath and editor != b'vi':
2236 2202 problems += 1
2237 2203
2238 2204 # check username
2239 2205 username = None
2240 2206 err = None
2241 2207 try:
2242 2208 username = ui.username()
2243 2209 except error.Abort as e:
2244 2210 err = e.message
2245 2211 problems += 1
2246 2212
2247 2213 fm.condwrite(
2248 2214 username, b'username', _(b"checking username (%s)\n"), username
2249 2215 )
2250 2216 fm.condwrite(
2251 2217 err,
2252 2218 b'usernameerror',
2253 2219 _(
2254 2220 b"checking username...\n %s\n"
2255 2221 b" (specify a username in your configuration file)\n"
2256 2222 ),
2257 2223 err,
2258 2224 )
2259 2225
2260 2226 for name, mod in extensions.extensions():
2261 2227 handler = getattr(mod, 'debuginstall', None)
2262 2228 if handler is not None:
2263 2229 problems += handler(ui, fm)
2264 2230
2265 2231 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2266 2232 if not problems:
2267 2233 fm.data(problems=problems)
2268 2234 fm.condwrite(
2269 2235 problems,
2270 2236 b'problems',
2271 2237 _(b"%d problems detected, please check your install!\n"),
2272 2238 problems,
2273 2239 )
2274 2240 fm.end()
2275 2241
2276 2242 return problems
2277 2243
2278 2244
2279 2245 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2280 2246 def debugknown(ui, repopath, *ids, **opts):
2281 2247 """test whether node ids are known to a repo
2282 2248
2283 2249 Every ID must be a full-length hex node id string. Returns a list of 0s
2284 2250 and 1s indicating unknown/known.
2285 2251 """
2286 2252 opts = pycompat.byteskwargs(opts)
2287 2253 repo = hg.peer(ui, opts, repopath)
2288 2254 if not repo.capable(b'known'):
2289 2255 raise error.Abort(b"known() not supported by target repository")
2290 2256 flags = repo.known([bin(s) for s in ids])
2291 2257 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2292 2258
2293 2259
2294 2260 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2295 2261 def debuglabelcomplete(ui, repo, *args):
2296 2262 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2297 2263 debugnamecomplete(ui, repo, *args)
2298 2264
2299 2265
2300 2266 @command(
2301 2267 b'debuglocks',
2302 2268 [
2303 2269 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2304 2270 (
2305 2271 b'W',
2306 2272 b'force-free-wlock',
2307 2273 None,
2308 2274 _(b'free the working state lock (DANGEROUS)'),
2309 2275 ),
2310 2276 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2311 2277 (
2312 2278 b'S',
2313 2279 b'set-wlock',
2314 2280 None,
2315 2281 _(b'set the working state lock until stopped'),
2316 2282 ),
2317 2283 ],
2318 2284 _(b'[OPTION]...'),
2319 2285 )
2320 2286 def debuglocks(ui, repo, **opts):
2321 2287 """show or modify state of locks
2322 2288
2323 2289 By default, this command will show which locks are held. This
2324 2290 includes the user and process holding the lock, the amount of time
2325 2291 the lock has been held, and the machine name where the process is
2326 2292 running if it's not local.
2327 2293
2328 2294 Locks protect the integrity of Mercurial's data, so should be
2329 2295 treated with care. System crashes or other interruptions may cause
2330 2296 locks to not be properly released, though Mercurial will usually
2331 2297 detect and remove such stale locks automatically.
2332 2298
2333 2299 However, detecting stale locks may not always be possible (for
2334 2300 instance, on a shared filesystem). Removing locks may also be
2335 2301 blocked by filesystem permissions.
2336 2302
2337 2303 Setting a lock will prevent other commands from changing the data.
2338 2304 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2339 2305 The set locks are removed when the command exits.
2340 2306
2341 2307 Returns 0 if no locks are held.
2342 2308
2343 2309 """
2344 2310
2345 2311 if opts.get('force_free_lock'):
2346 2312 repo.svfs.tryunlink(b'lock')
2347 2313 if opts.get('force_free_wlock'):
2348 2314 repo.vfs.tryunlink(b'wlock')
2349 2315 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2350 2316 return 0
2351 2317
2352 2318 locks = []
2353 2319 try:
2354 2320 if opts.get('set_wlock'):
2355 2321 try:
2356 2322 locks.append(repo.wlock(False))
2357 2323 except error.LockHeld:
2358 2324 raise error.Abort(_(b'wlock is already held'))
2359 2325 if opts.get('set_lock'):
2360 2326 try:
2361 2327 locks.append(repo.lock(False))
2362 2328 except error.LockHeld:
2363 2329 raise error.Abort(_(b'lock is already held'))
2364 2330 if len(locks):
2365 2331 try:
2366 2332 if ui.interactive():
2367 2333 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2368 2334 ui.promptchoice(prompt)
2369 2335 else:
2370 2336 msg = b"%d locks held, waiting for signal\n"
2371 2337 msg %= len(locks)
2372 2338 ui.status(msg)
2373 2339 while True: # XXX wait for a signal
2374 2340 time.sleep(0.1)
2375 2341 except KeyboardInterrupt:
2376 2342 msg = b"signal-received releasing locks\n"
2377 2343 ui.status(msg)
2378 2344 return 0
2379 2345 finally:
2380 2346 release(*locks)
2381 2347
2382 2348 now = time.time()
2383 2349 held = 0
2384 2350
2385 2351 def report(vfs, name, method):
2386 2352 # this causes stale locks to get reaped for more accurate reporting
2387 2353 try:
2388 2354 l = method(False)
2389 2355 except error.LockHeld:
2390 2356 l = None
2391 2357
2392 2358 if l:
2393 2359 l.release()
2394 2360 else:
2395 2361 try:
2396 2362 st = vfs.lstat(name)
2397 2363 age = now - st[stat.ST_MTIME]
2398 2364 user = util.username(st.st_uid)
2399 2365 locker = vfs.readlock(name)
2400 2366 if b":" in locker:
2401 2367 host, pid = locker.split(b':')
2402 2368 if host == socket.gethostname():
2403 2369 locker = b'user %s, process %s' % (user or b'None', pid)
2404 2370 else:
2405 2371 locker = b'user %s, process %s, host %s' % (
2406 2372 user or b'None',
2407 2373 pid,
2408 2374 host,
2409 2375 )
2410 2376 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2411 2377 return 1
2412 2378 except FileNotFoundError:
2413 2379 pass
2414 2380
2415 2381 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2416 2382 return 0
2417 2383
2418 2384 held += report(repo.svfs, b"lock", repo.lock)
2419 2385 held += report(repo.vfs, b"wlock", repo.wlock)
2420 2386
2421 2387 return held
2422 2388
2423 2389
2424 2390 @command(
2425 2391 b'debugmanifestfulltextcache',
2426 2392 [
2427 2393 (b'', b'clear', False, _(b'clear the cache')),
2428 2394 (
2429 2395 b'a',
2430 2396 b'add',
2431 2397 [],
2432 2398 _(b'add the given manifest nodes to the cache'),
2433 2399 _(b'NODE'),
2434 2400 ),
2435 2401 ],
2436 2402 b'',
2437 2403 )
2438 2404 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2439 2405 """show, clear or amend the contents of the manifest fulltext cache"""
2440 2406
2441 2407 def getcache():
2442 2408 r = repo.manifestlog.getstorage(b'')
2443 2409 try:
2444 2410 return r._fulltextcache
2445 2411 except AttributeError:
2446 2412 msg = _(
2447 2413 b"Current revlog implementation doesn't appear to have a "
2448 2414 b"manifest fulltext cache\n"
2449 2415 )
2450 2416 raise error.Abort(msg)
2451 2417
2452 2418 if opts.get('clear'):
2453 2419 with repo.wlock():
2454 2420 cache = getcache()
2455 2421 cache.clear(clear_persisted_data=True)
2456 2422 return
2457 2423
2458 2424 if add:
2459 2425 with repo.wlock():
2460 2426 m = repo.manifestlog
2461 2427 store = m.getstorage(b'')
2462 2428 for n in add:
2463 2429 try:
2464 2430 manifest = m[store.lookup(n)]
2465 2431 except error.LookupError as e:
2466 2432 raise error.Abort(
2467 2433 bytes(e), hint=b"Check your manifest node id"
2468 2434 )
2469 2435 manifest.read() # stores revisision in cache too
2470 2436 return
2471 2437
2472 2438 cache = getcache()
2473 2439 if not len(cache):
2474 2440 ui.write(_(b'cache empty\n'))
2475 2441 else:
2476 2442 ui.write(
2477 2443 _(
2478 2444 b'cache contains %d manifest entries, in order of most to '
2479 2445 b'least recent:\n'
2480 2446 )
2481 2447 % (len(cache),)
2482 2448 )
2483 2449 totalsize = 0
2484 2450 for nodeid in cache:
2485 2451 # Use cache.get to not update the LRU order
2486 2452 data = cache.peek(nodeid)
2487 2453 size = len(data)
2488 2454 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2489 2455 ui.write(
2490 2456 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2491 2457 )
2492 2458 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2493 2459 ui.write(
2494 2460 _(b'total cache data size %s, on-disk %s\n')
2495 2461 % (util.bytecount(totalsize), util.bytecount(ondisk))
2496 2462 )
2497 2463
2498 2464
2499 2465 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2500 2466 def debugmergestate(ui, repo, *args, **opts):
2501 2467 """print merge state
2502 2468
2503 2469 Use --verbose to print out information about whether v1 or v2 merge state
2504 2470 was chosen."""
2505 2471
2506 2472 if ui.verbose:
2507 2473 ms = mergestatemod.mergestate(repo)
2508 2474
2509 2475 # sort so that reasonable information is on top
2510 2476 v1records = ms._readrecordsv1()
2511 2477 v2records = ms._readrecordsv2()
2512 2478
2513 2479 if not v1records and not v2records:
2514 2480 pass
2515 2481 elif not v2records:
2516 2482 ui.writenoi18n(b'no version 2 merge state\n')
2517 2483 elif ms._v1v2match(v1records, v2records):
2518 2484 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2519 2485 else:
2520 2486 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2521 2487
2522 2488 opts = pycompat.byteskwargs(opts)
2523 2489 if not opts[b'template']:
2524 2490 opts[b'template'] = (
2525 2491 b'{if(commits, "", "no merge state found\n")}'
2526 2492 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2527 2493 b'{files % "file: {path} (state \\"{state}\\")\n'
2528 2494 b'{if(local_path, "'
2529 2495 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2530 2496 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2531 2497 b' other path: {other_path} (node {other_node})\n'
2532 2498 b'")}'
2533 2499 b'{if(rename_side, "'
2534 2500 b' rename side: {rename_side}\n'
2535 2501 b' renamed path: {renamed_path}\n'
2536 2502 b'")}'
2537 2503 b'{extras % " extra: {key} = {value}\n"}'
2538 2504 b'"}'
2539 2505 b'{extras % "extra: {file} ({key} = {value})\n"}'
2540 2506 )
2541 2507
2542 2508 ms = mergestatemod.mergestate.read(repo)
2543 2509
2544 2510 fm = ui.formatter(b'debugmergestate', opts)
2545 2511 fm.startitem()
2546 2512
2547 2513 fm_commits = fm.nested(b'commits')
2548 2514 if ms.active():
2549 2515 for name, node, label_index in (
2550 2516 (b'local', ms.local, 0),
2551 2517 (b'other', ms.other, 1),
2552 2518 ):
2553 2519 fm_commits.startitem()
2554 2520 fm_commits.data(name=name)
2555 2521 fm_commits.data(node=hex(node))
2556 2522 if ms._labels and len(ms._labels) > label_index:
2557 2523 fm_commits.data(label=ms._labels[label_index])
2558 2524 fm_commits.end()
2559 2525
2560 2526 fm_files = fm.nested(b'files')
2561 2527 if ms.active():
2562 2528 for f in ms:
2563 2529 fm_files.startitem()
2564 2530 fm_files.data(path=f)
2565 2531 state = ms._state[f]
2566 2532 fm_files.data(state=state[0])
2567 2533 if state[0] in (
2568 2534 mergestatemod.MERGE_RECORD_UNRESOLVED,
2569 2535 mergestatemod.MERGE_RECORD_RESOLVED,
2570 2536 ):
2571 2537 fm_files.data(local_key=state[1])
2572 2538 fm_files.data(local_path=state[2])
2573 2539 fm_files.data(ancestor_path=state[3])
2574 2540 fm_files.data(ancestor_node=state[4])
2575 2541 fm_files.data(other_path=state[5])
2576 2542 fm_files.data(other_node=state[6])
2577 2543 fm_files.data(local_flags=state[7])
2578 2544 elif state[0] in (
2579 2545 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2580 2546 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2581 2547 ):
2582 2548 fm_files.data(renamed_path=state[1])
2583 2549 fm_files.data(rename_side=state[2])
2584 2550 fm_extras = fm_files.nested(b'extras')
2585 2551 for k, v in sorted(ms.extras(f).items()):
2586 2552 fm_extras.startitem()
2587 2553 fm_extras.data(key=k)
2588 2554 fm_extras.data(value=v)
2589 2555 fm_extras.end()
2590 2556
2591 2557 fm_files.end()
2592 2558
2593 2559 fm_extras = fm.nested(b'extras')
2594 2560 for f, d in sorted(ms.allextras().items()):
2595 2561 if f in ms:
2596 2562 # If file is in mergestate, we have already processed it's extras
2597 2563 continue
2598 2564 for k, v in d.items():
2599 2565 fm_extras.startitem()
2600 2566 fm_extras.data(file=f)
2601 2567 fm_extras.data(key=k)
2602 2568 fm_extras.data(value=v)
2603 2569 fm_extras.end()
2604 2570
2605 2571 fm.end()
2606 2572
2607 2573
2608 2574 @command(b'debugnamecomplete', [], _(b'NAME...'))
2609 2575 def debugnamecomplete(ui, repo, *args):
2610 2576 '''complete "names" - tags, open branch names, bookmark names'''
2611 2577
2612 2578 names = set()
2613 2579 # since we previously only listed open branches, we will handle that
2614 2580 # specially (after this for loop)
2615 2581 for name, ns in repo.names.items():
2616 2582 if name != b'branches':
2617 2583 names.update(ns.listnames(repo))
2618 2584 names.update(
2619 2585 tag
2620 2586 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2621 2587 if not closed
2622 2588 )
2623 2589 completions = set()
2624 2590 if not args:
2625 2591 args = [b'']
2626 2592 for a in args:
2627 2593 completions.update(n for n in names if n.startswith(a))
2628 2594 ui.write(b'\n'.join(sorted(completions)))
2629 2595 ui.write(b'\n')
2630 2596
2631 2597
2632 2598 @command(
2633 2599 b'debugnodemap',
2634 2600 [
2635 2601 (
2636 2602 b'',
2637 2603 b'dump-new',
2638 2604 False,
2639 2605 _(b'write a (new) persistent binary nodemap on stdout'),
2640 2606 ),
2641 2607 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2642 2608 (
2643 2609 b'',
2644 2610 b'check',
2645 2611 False,
2646 2612 _(b'check that the data on disk data are correct.'),
2647 2613 ),
2648 2614 (
2649 2615 b'',
2650 2616 b'metadata',
2651 2617 False,
2652 2618 _(b'display the on disk meta data for the nodemap'),
2653 2619 ),
2654 2620 ],
2655 2621 )
2656 2622 def debugnodemap(ui, repo, **opts):
2657 2623 """write and inspect on disk nodemap"""
2658 2624 if opts['dump_new']:
2659 2625 unfi = repo.unfiltered()
2660 2626 cl = unfi.changelog
2661 2627 if util.safehasattr(cl.index, "nodemap_data_all"):
2662 2628 data = cl.index.nodemap_data_all()
2663 2629 else:
2664 2630 data = nodemap.persistent_data(cl.index)
2665 2631 ui.write(data)
2666 2632 elif opts['dump_disk']:
2667 2633 unfi = repo.unfiltered()
2668 2634 cl = unfi.changelog
2669 2635 nm_data = nodemap.persisted_data(cl)
2670 2636 if nm_data is not None:
2671 2637 docket, data = nm_data
2672 2638 ui.write(data[:])
2673 2639 elif opts['check']:
2674 2640 unfi = repo.unfiltered()
2675 2641 cl = unfi.changelog
2676 2642 nm_data = nodemap.persisted_data(cl)
2677 2643 if nm_data is not None:
2678 2644 docket, data = nm_data
2679 2645 return nodemap.check_data(ui, cl.index, data)
2680 2646 elif opts['metadata']:
2681 2647 unfi = repo.unfiltered()
2682 2648 cl = unfi.changelog
2683 2649 nm_data = nodemap.persisted_data(cl)
2684 2650 if nm_data is not None:
2685 2651 docket, data = nm_data
2686 2652 ui.write((b"uid: %s\n") % docket.uid)
2687 2653 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2688 2654 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2689 2655 ui.write((b"data-length: %d\n") % docket.data_length)
2690 2656 ui.write((b"data-unused: %d\n") % docket.data_unused)
2691 2657 unused_perc = docket.data_unused * 100.0 / docket.data_length
2692 2658 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2693 2659
2694 2660
2695 2661 @command(
2696 2662 b'debugobsolete',
2697 2663 [
2698 2664 (b'', b'flags', 0, _(b'markers flag')),
2699 2665 (
2700 2666 b'',
2701 2667 b'record-parents',
2702 2668 False,
2703 2669 _(b'record parent information for the precursor'),
2704 2670 ),
2705 2671 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2706 2672 (
2707 2673 b'',
2708 2674 b'exclusive',
2709 2675 False,
2710 2676 _(b'restrict display to markers only relevant to REV'),
2711 2677 ),
2712 2678 (b'', b'index', False, _(b'display index of the marker')),
2713 2679 (b'', b'delete', [], _(b'delete markers specified by indices')),
2714 2680 ]
2715 2681 + cmdutil.commitopts2
2716 2682 + cmdutil.formatteropts,
2717 2683 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2718 2684 )
2719 2685 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2720 2686 """create arbitrary obsolete marker
2721 2687
2722 2688 With no arguments, displays the list of obsolescence markers."""
2723 2689
2724 2690 opts = pycompat.byteskwargs(opts)
2725 2691
2726 2692 def parsenodeid(s):
2727 2693 try:
2728 2694 # We do not use revsingle/revrange functions here to accept
2729 2695 # arbitrary node identifiers, possibly not present in the
2730 2696 # local repository.
2731 2697 n = bin(s)
2732 2698 if len(n) != repo.nodeconstants.nodelen:
2733 2699 raise ValueError
2734 2700 return n
2735 2701 except ValueError:
2736 2702 raise error.InputError(
2737 2703 b'changeset references must be full hexadecimal '
2738 2704 b'node identifiers'
2739 2705 )
2740 2706
2741 2707 if opts.get(b'delete'):
2742 2708 indices = []
2743 2709 for v in opts.get(b'delete'):
2744 2710 try:
2745 2711 indices.append(int(v))
2746 2712 except ValueError:
2747 2713 raise error.InputError(
2748 2714 _(b'invalid index value: %r') % v,
2749 2715 hint=_(b'use integers for indices'),
2750 2716 )
2751 2717
2752 2718 if repo.currenttransaction():
2753 2719 raise error.Abort(
2754 2720 _(b'cannot delete obsmarkers in the middle of transaction.')
2755 2721 )
2756 2722
2757 2723 with repo.lock():
2758 2724 n = repair.deleteobsmarkers(repo.obsstore, indices)
2759 2725 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2760 2726
2761 2727 return
2762 2728
2763 2729 if precursor is not None:
2764 2730 if opts[b'rev']:
2765 2731 raise error.InputError(
2766 2732 b'cannot select revision when creating marker'
2767 2733 )
2768 2734 metadata = {}
2769 2735 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2770 2736 succs = tuple(parsenodeid(succ) for succ in successors)
2771 2737 l = repo.lock()
2772 2738 try:
2773 2739 tr = repo.transaction(b'debugobsolete')
2774 2740 try:
2775 2741 date = opts.get(b'date')
2776 2742 if date:
2777 2743 date = dateutil.parsedate(date)
2778 2744 else:
2779 2745 date = None
2780 2746 prec = parsenodeid(precursor)
2781 2747 parents = None
2782 2748 if opts[b'record_parents']:
2783 2749 if prec not in repo.unfiltered():
2784 2750 raise error.Abort(
2785 2751 b'cannot used --record-parents on '
2786 2752 b'unknown changesets'
2787 2753 )
2788 2754 parents = repo.unfiltered()[prec].parents()
2789 2755 parents = tuple(p.node() for p in parents)
2790 2756 repo.obsstore.create(
2791 2757 tr,
2792 2758 prec,
2793 2759 succs,
2794 2760 opts[b'flags'],
2795 2761 parents=parents,
2796 2762 date=date,
2797 2763 metadata=metadata,
2798 2764 ui=ui,
2799 2765 )
2800 2766 tr.close()
2801 2767 except ValueError as exc:
2802 2768 raise error.Abort(
2803 2769 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2804 2770 )
2805 2771 finally:
2806 2772 tr.release()
2807 2773 finally:
2808 2774 l.release()
2809 2775 else:
2810 2776 if opts[b'rev']:
2811 2777 revs = logcmdutil.revrange(repo, opts[b'rev'])
2812 2778 nodes = [repo[r].node() for r in revs]
2813 2779 markers = list(
2814 2780 obsutil.getmarkers(
2815 2781 repo, nodes=nodes, exclusive=opts[b'exclusive']
2816 2782 )
2817 2783 )
2818 2784 markers.sort(key=lambda x: x._data)
2819 2785 else:
2820 2786 markers = obsutil.getmarkers(repo)
2821 2787
2822 2788 markerstoiter = markers
2823 2789 isrelevant = lambda m: True
2824 2790 if opts.get(b'rev') and opts.get(b'index'):
2825 2791 markerstoiter = obsutil.getmarkers(repo)
2826 2792 markerset = set(markers)
2827 2793 isrelevant = lambda m: m in markerset
2828 2794
2829 2795 fm = ui.formatter(b'debugobsolete', opts)
2830 2796 for i, m in enumerate(markerstoiter):
2831 2797 if not isrelevant(m):
2832 2798 # marker can be irrelevant when we're iterating over a set
2833 2799 # of markers (markerstoiter) which is bigger than the set
2834 2800 # of markers we want to display (markers)
2835 2801 # this can happen if both --index and --rev options are
2836 2802 # provided and thus we need to iterate over all of the markers
2837 2803 # to get the correct indices, but only display the ones that
2838 2804 # are relevant to --rev value
2839 2805 continue
2840 2806 fm.startitem()
2841 2807 ind = i if opts.get(b'index') else None
2842 2808 cmdutil.showmarker(fm, m, index=ind)
2843 2809 fm.end()
2844 2810
2845 2811
2846 2812 @command(
2847 2813 b'debugp1copies',
2848 2814 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2849 2815 _(b'[-r REV]'),
2850 2816 )
2851 2817 def debugp1copies(ui, repo, **opts):
2852 2818 """dump copy information compared to p1"""
2853 2819
2854 2820 opts = pycompat.byteskwargs(opts)
2855 2821 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2856 2822 for dst, src in ctx.p1copies().items():
2857 2823 ui.write(b'%s -> %s\n' % (src, dst))
2858 2824
2859 2825
2860 2826 @command(
2861 2827 b'debugp2copies',
2862 2828 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2863 2829 _(b'[-r REV]'),
2864 2830 )
2865 2831 def debugp2copies(ui, repo, **opts):
2866 2832 """dump copy information compared to p2"""
2867 2833
2868 2834 opts = pycompat.byteskwargs(opts)
2869 2835 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2870 2836 for dst, src in ctx.p2copies().items():
2871 2837 ui.write(b'%s -> %s\n' % (src, dst))
2872 2838
2873 2839
2874 2840 @command(
2875 2841 b'debugpathcomplete',
2876 2842 [
2877 2843 (b'f', b'full', None, _(b'complete an entire path')),
2878 2844 (b'n', b'normal', None, _(b'show only normal files')),
2879 2845 (b'a', b'added', None, _(b'show only added files')),
2880 2846 (b'r', b'removed', None, _(b'show only removed files')),
2881 2847 ],
2882 2848 _(b'FILESPEC...'),
2883 2849 )
2884 2850 def debugpathcomplete(ui, repo, *specs, **opts):
2885 2851 """complete part or all of a tracked path
2886 2852
2887 2853 This command supports shells that offer path name completion. It
2888 2854 currently completes only files already known to the dirstate.
2889 2855
2890 2856 Completion extends only to the next path segment unless
2891 2857 --full is specified, in which case entire paths are used."""
2892 2858
2893 2859 def complete(path, acceptable):
2894 2860 dirstate = repo.dirstate
2895 2861 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2896 2862 rootdir = repo.root + pycompat.ossep
2897 2863 if spec != repo.root and not spec.startswith(rootdir):
2898 2864 return [], []
2899 2865 if os.path.isdir(spec):
2900 2866 spec += b'/'
2901 2867 spec = spec[len(rootdir) :]
2902 2868 fixpaths = pycompat.ossep != b'/'
2903 2869 if fixpaths:
2904 2870 spec = spec.replace(pycompat.ossep, b'/')
2905 2871 speclen = len(spec)
2906 2872 fullpaths = opts['full']
2907 2873 files, dirs = set(), set()
2908 2874 adddir, addfile = dirs.add, files.add
2909 2875 for f, st in dirstate.items():
2910 2876 if f.startswith(spec) and st.state in acceptable:
2911 2877 if fixpaths:
2912 2878 f = f.replace(b'/', pycompat.ossep)
2913 2879 if fullpaths:
2914 2880 addfile(f)
2915 2881 continue
2916 2882 s = f.find(pycompat.ossep, speclen)
2917 2883 if s >= 0:
2918 2884 adddir(f[:s])
2919 2885 else:
2920 2886 addfile(f)
2921 2887 return files, dirs
2922 2888
2923 2889 acceptable = b''
2924 2890 if opts['normal']:
2925 2891 acceptable += b'nm'
2926 2892 if opts['added']:
2927 2893 acceptable += b'a'
2928 2894 if opts['removed']:
2929 2895 acceptable += b'r'
2930 2896 cwd = repo.getcwd()
2931 2897 if not specs:
2932 2898 specs = [b'.']
2933 2899
2934 2900 files, dirs = set(), set()
2935 2901 for spec in specs:
2936 2902 f, d = complete(spec, acceptable or b'nmar')
2937 2903 files.update(f)
2938 2904 dirs.update(d)
2939 2905 files.update(dirs)
2940 2906 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2941 2907 ui.write(b'\n')
2942 2908
2943 2909
2944 2910 @command(
2945 2911 b'debugpathcopies',
2946 2912 cmdutil.walkopts,
2947 2913 b'hg debugpathcopies REV1 REV2 [FILE]',
2948 2914 inferrepo=True,
2949 2915 )
2950 2916 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2951 2917 """show copies between two revisions"""
2952 2918 ctx1 = scmutil.revsingle(repo, rev1)
2953 2919 ctx2 = scmutil.revsingle(repo, rev2)
2954 2920 m = scmutil.match(ctx1, pats, opts)
2955 2921 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2956 2922 ui.write(b'%s -> %s\n' % (src, dst))
2957 2923
2958 2924
2959 2925 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2960 2926 def debugpeer(ui, path):
2961 2927 """establish a connection to a peer repository"""
2962 2928 # Always enable peer request logging. Requires --debug to display
2963 2929 # though.
2964 2930 overrides = {
2965 2931 (b'devel', b'debug.peer-request'): True,
2966 2932 }
2967 2933
2968 2934 with ui.configoverride(overrides):
2969 2935 peer = hg.peer(ui, {}, path)
2970 2936
2971 2937 try:
2972 2938 local = peer.local() is not None
2973 2939 canpush = peer.canpush()
2974 2940
2975 2941 ui.write(_(b'url: %s\n') % peer.url())
2976 2942 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2977 2943 ui.write(
2978 2944 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2979 2945 )
2980 2946 finally:
2981 2947 peer.close()
2982 2948
2983 2949
2984 2950 @command(
2985 2951 b'debugpickmergetool',
2986 2952 [
2987 2953 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2988 2954 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2989 2955 ]
2990 2956 + cmdutil.walkopts
2991 2957 + cmdutil.mergetoolopts,
2992 2958 _(b'[PATTERN]...'),
2993 2959 inferrepo=True,
2994 2960 )
2995 2961 def debugpickmergetool(ui, repo, *pats, **opts):
2996 2962 """examine which merge tool is chosen for specified file
2997 2963
2998 2964 As described in :hg:`help merge-tools`, Mercurial examines
2999 2965 configurations below in this order to decide which merge tool is
3000 2966 chosen for specified file.
3001 2967
3002 2968 1. ``--tool`` option
3003 2969 2. ``HGMERGE`` environment variable
3004 2970 3. configurations in ``merge-patterns`` section
3005 2971 4. configuration of ``ui.merge``
3006 2972 5. configurations in ``merge-tools`` section
3007 2973 6. ``hgmerge`` tool (for historical reason only)
3008 2974 7. default tool for fallback (``:merge`` or ``:prompt``)
3009 2975
3010 2976 This command writes out examination result in the style below::
3011 2977
3012 2978 FILE = MERGETOOL
3013 2979
3014 2980 By default, all files known in the first parent context of the
3015 2981 working directory are examined. Use file patterns and/or -I/-X
3016 2982 options to limit target files. -r/--rev is also useful to examine
3017 2983 files in another context without actual updating to it.
3018 2984
3019 2985 With --debug, this command shows warning messages while matching
3020 2986 against ``merge-patterns`` and so on, too. It is recommended to
3021 2987 use this option with explicit file patterns and/or -I/-X options,
3022 2988 because this option increases amount of output per file according
3023 2989 to configurations in hgrc.
3024 2990
3025 2991 With -v/--verbose, this command shows configurations below at
3026 2992 first (only if specified).
3027 2993
3028 2994 - ``--tool`` option
3029 2995 - ``HGMERGE`` environment variable
3030 2996 - configuration of ``ui.merge``
3031 2997
3032 2998 If merge tool is chosen before matching against
3033 2999 ``merge-patterns``, this command can't show any helpful
3034 3000 information, even with --debug. In such case, information above is
3035 3001 useful to know why a merge tool is chosen.
3036 3002 """
3037 3003 opts = pycompat.byteskwargs(opts)
3038 3004 overrides = {}
3039 3005 if opts[b'tool']:
3040 3006 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3041 3007 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3042 3008
3043 3009 with ui.configoverride(overrides, b'debugmergepatterns'):
3044 3010 hgmerge = encoding.environ.get(b"HGMERGE")
3045 3011 if hgmerge is not None:
3046 3012 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3047 3013 uimerge = ui.config(b"ui", b"merge")
3048 3014 if uimerge:
3049 3015 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3050 3016
3051 3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3052 3018 m = scmutil.match(ctx, pats, opts)
3053 3019 changedelete = opts[b'changedelete']
3054 3020 for path in ctx.walk(m):
3055 3021 fctx = ctx[path]
3056 3022 with ui.silent(
3057 3023 error=True
3058 3024 ) if not ui.debugflag else util.nullcontextmanager():
3059 3025 tool, toolpath = filemerge._picktool(
3060 3026 repo,
3061 3027 ui,
3062 3028 path,
3063 3029 fctx.isbinary(),
3064 3030 b'l' in fctx.flags(),
3065 3031 changedelete,
3066 3032 )
3067 3033 ui.write(b'%s = %s\n' % (path, tool))
3068 3034
3069 3035
3070 3036 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3071 3037 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3072 3038 """access the pushkey key/value protocol
3073 3039
3074 3040 With two args, list the keys in the given namespace.
3075 3041
3076 3042 With five args, set a key to new if it currently is set to old.
3077 3043 Reports success or failure.
3078 3044 """
3079 3045
3080 3046 target = hg.peer(ui, {}, repopath)
3081 3047 try:
3082 3048 if keyinfo:
3083 3049 key, old, new = keyinfo
3084 3050 with target.commandexecutor() as e:
3085 3051 r = e.callcommand(
3086 3052 b'pushkey',
3087 3053 {
3088 3054 b'namespace': namespace,
3089 3055 b'key': key,
3090 3056 b'old': old,
3091 3057 b'new': new,
3092 3058 },
3093 3059 ).result()
3094 3060
3095 3061 ui.status(pycompat.bytestr(r) + b'\n')
3096 3062 return not r
3097 3063 else:
3098 3064 for k, v in sorted(target.listkeys(namespace).items()):
3099 3065 ui.write(
3100 3066 b"%s\t%s\n"
3101 3067 % (stringutil.escapestr(k), stringutil.escapestr(v))
3102 3068 )
3103 3069 finally:
3104 3070 target.close()
3105 3071
3106 3072
3107 3073 @command(b'debugpvec', [], _(b'A B'))
3108 3074 def debugpvec(ui, repo, a, b=None):
3109 3075 ca = scmutil.revsingle(repo, a)
3110 3076 cb = scmutil.revsingle(repo, b)
3111 3077 pa = pvec.ctxpvec(ca)
3112 3078 pb = pvec.ctxpvec(cb)
3113 3079 if pa == pb:
3114 3080 rel = b"="
3115 3081 elif pa > pb:
3116 3082 rel = b">"
3117 3083 elif pa < pb:
3118 3084 rel = b"<"
3119 3085 elif pa | pb:
3120 3086 rel = b"|"
3121 3087 ui.write(_(b"a: %s\n") % pa)
3122 3088 ui.write(_(b"b: %s\n") % pb)
3123 3089 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3124 3090 ui.write(
3125 3091 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3126 3092 % (
3127 3093 abs(pa._depth - pb._depth),
3128 3094 pvec._hamming(pa._vec, pb._vec),
3129 3095 pa.distance(pb),
3130 3096 rel,
3131 3097 )
3132 3098 )
3133 3099
3134 3100
3135 3101 @command(
3136 3102 b'debugrebuilddirstate|debugrebuildstate',
3137 3103 [
3138 3104 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3139 3105 (
3140 3106 b'',
3141 3107 b'minimal',
3142 3108 None,
3143 3109 _(
3144 3110 b'only rebuild files that are inconsistent with '
3145 3111 b'the working copy parent'
3146 3112 ),
3147 3113 ),
3148 3114 ],
3149 3115 _(b'[-r REV]'),
3150 3116 )
3151 3117 def debugrebuilddirstate(ui, repo, rev, **opts):
3152 3118 """rebuild the dirstate as it would look like for the given revision
3153 3119
3154 3120 If no revision is specified the first current parent will be used.
3155 3121
3156 3122 The dirstate will be set to the files of the given revision.
3157 3123 The actual working directory content or existing dirstate
3158 3124 information such as adds or removes is not considered.
3159 3125
3160 3126 ``minimal`` will only rebuild the dirstate status for files that claim to be
3161 3127 tracked but are not in the parent manifest, or that exist in the parent
3162 3128 manifest but are not in the dirstate. It will not change adds, removes, or
3163 3129 modified files that are in the working copy parent.
3164 3130
3165 3131 One use of this command is to make the next :hg:`status` invocation
3166 3132 check the actual file content.
3167 3133 """
3168 3134 ctx = scmutil.revsingle(repo, rev)
3169 3135 with repo.wlock():
3170 3136 dirstate = repo.dirstate
3171 3137 changedfiles = None
3172 3138 # See command doc for what minimal does.
3173 3139 if opts.get('minimal'):
3174 3140 manifestfiles = set(ctx.manifest().keys())
3175 3141 dirstatefiles = set(dirstate)
3176 3142 manifestonly = manifestfiles - dirstatefiles
3177 3143 dsonly = dirstatefiles - manifestfiles
3178 3144 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3179 3145 changedfiles = manifestonly | dsnotadded
3180 3146
3181 3147 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3182 3148
3183 3149
3184 3150 @command(
3185 3151 b'debugrebuildfncache',
3186 3152 [
3187 3153 (
3188 3154 b'',
3189 3155 b'only-data',
3190 3156 False,
3191 3157 _(b'only look for wrong .d files (much faster)'),
3192 3158 )
3193 3159 ],
3194 3160 b'',
3195 3161 )
3196 3162 def debugrebuildfncache(ui, repo, **opts):
3197 3163 """rebuild the fncache file"""
3198 3164 opts = pycompat.byteskwargs(opts)
3199 3165 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3200 3166
3201 3167
3202 3168 @command(
3203 3169 b'debugrename',
3204 3170 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3205 3171 _(b'[-r REV] [FILE]...'),
3206 3172 )
3207 3173 def debugrename(ui, repo, *pats, **opts):
3208 3174 """dump rename information"""
3209 3175
3210 3176 opts = pycompat.byteskwargs(opts)
3211 3177 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3212 3178 m = scmutil.match(ctx, pats, opts)
3213 3179 for abs in ctx.walk(m):
3214 3180 fctx = ctx[abs]
3215 3181 o = fctx.filelog().renamed(fctx.filenode())
3216 3182 rel = repo.pathto(abs)
3217 3183 if o:
3218 3184 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3219 3185 else:
3220 3186 ui.write(_(b"%s not renamed\n") % rel)
3221 3187
3222 3188
3223 3189 @command(b'debugrequires|debugrequirements', [], b'')
3224 3190 def debugrequirements(ui, repo):
3225 3191 """print the current repo requirements"""
3226 3192 for r in sorted(repo.requirements):
3227 3193 ui.write(b"%s\n" % r)
3228 3194
3229 3195
3230 3196 @command(
3231 3197 b'debugrevlog',
3232 3198 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3233 3199 _(b'-c|-m|FILE'),
3234 3200 optionalrepo=True,
3235 3201 )
3236 3202 def debugrevlog(ui, repo, file_=None, **opts):
3237 3203 """show data and statistics about a revlog"""
3238 3204 opts = pycompat.byteskwargs(opts)
3239 3205 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3240 3206
3241 3207 if opts.get(b"dump"):
3242 3208 revlog_debug.dump(ui, r)
3243 3209 else:
3244 3210 revlog_debug.debug_revlog(ui, r)
3245 3211 return 0
3246 3212
3247 3213
3248 3214 @command(
3249 3215 b'debugrevlogindex',
3250 3216 cmdutil.debugrevlogopts
3251 3217 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3252 3218 _(b'[-f FORMAT] -c|-m|FILE'),
3253 3219 optionalrepo=True,
3254 3220 )
3255 3221 def debugrevlogindex(ui, repo, file_=None, **opts):
3256 3222 """dump the contents of a revlog index"""
3257 3223 opts = pycompat.byteskwargs(opts)
3258 3224 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3259 3225 format = opts.get(b'format', 0)
3260 3226 if format not in (0, 1):
3261 3227 raise error.Abort(_(b"unknown format %d") % format)
3262 3228
3263 3229 if ui.debugflag:
3264 3230 shortfn = hex
3265 3231 else:
3266 3232 shortfn = short
3267 3233
3268 3234 # There might not be anything in r, so have a sane default
3269 3235 idlen = 12
3270 3236 for i in r:
3271 3237 idlen = len(shortfn(r.node(i)))
3272 3238 break
3273 3239
3274 3240 if format == 0:
3275 3241 if ui.verbose:
3276 3242 ui.writenoi18n(
3277 3243 b" rev offset length linkrev %s %s p2\n"
3278 3244 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3279 3245 )
3280 3246 else:
3281 3247 ui.writenoi18n(
3282 3248 b" rev linkrev %s %s p2\n"
3283 3249 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3284 3250 )
3285 3251 elif format == 1:
3286 3252 if ui.verbose:
3287 3253 ui.writenoi18n(
3288 3254 (
3289 3255 b" rev flag offset length size link p1"
3290 3256 b" p2 %s\n"
3291 3257 )
3292 3258 % b"nodeid".rjust(idlen)
3293 3259 )
3294 3260 else:
3295 3261 ui.writenoi18n(
3296 3262 b" rev flag size link p1 p2 %s\n"
3297 3263 % b"nodeid".rjust(idlen)
3298 3264 )
3299 3265
3300 3266 for i in r:
3301 3267 node = r.node(i)
3302 3268 if format == 0:
3303 3269 try:
3304 3270 pp = r.parents(node)
3305 3271 except Exception:
3306 3272 pp = [repo.nullid, repo.nullid]
3307 3273 if ui.verbose:
3308 3274 ui.write(
3309 3275 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3310 3276 % (
3311 3277 i,
3312 3278 r.start(i),
3313 3279 r.length(i),
3314 3280 r.linkrev(i),
3315 3281 shortfn(node),
3316 3282 shortfn(pp[0]),
3317 3283 shortfn(pp[1]),
3318 3284 )
3319 3285 )
3320 3286 else:
3321 3287 ui.write(
3322 3288 b"% 6d % 7d %s %s %s\n"
3323 3289 % (
3324 3290 i,
3325 3291 r.linkrev(i),
3326 3292 shortfn(node),
3327 3293 shortfn(pp[0]),
3328 3294 shortfn(pp[1]),
3329 3295 )
3330 3296 )
3331 3297 elif format == 1:
3332 3298 pr = r.parentrevs(i)
3333 3299 if ui.verbose:
3334 3300 ui.write(
3335 3301 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3336 3302 % (
3337 3303 i,
3338 3304 r.flags(i),
3339 3305 r.start(i),
3340 3306 r.length(i),
3341 3307 r.rawsize(i),
3342 3308 r.linkrev(i),
3343 3309 pr[0],
3344 3310 pr[1],
3345 3311 shortfn(node),
3346 3312 )
3347 3313 )
3348 3314 else:
3349 3315 ui.write(
3350 3316 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3351 3317 % (
3352 3318 i,
3353 3319 r.flags(i),
3354 3320 r.rawsize(i),
3355 3321 r.linkrev(i),
3356 3322 pr[0],
3357 3323 pr[1],
3358 3324 shortfn(node),
3359 3325 )
3360 3326 )
3361 3327
3362 3328
3363 3329 @command(
3364 3330 b'debugrevspec',
3365 3331 [
3366 3332 (
3367 3333 b'',
3368 3334 b'optimize',
3369 3335 None,
3370 3336 _(b'print parsed tree after optimizing (DEPRECATED)'),
3371 3337 ),
3372 3338 (
3373 3339 b'',
3374 3340 b'show-revs',
3375 3341 True,
3376 3342 _(b'print list of result revisions (default)'),
3377 3343 ),
3378 3344 (
3379 3345 b's',
3380 3346 b'show-set',
3381 3347 None,
3382 3348 _(b'print internal representation of result set'),
3383 3349 ),
3384 3350 (
3385 3351 b'p',
3386 3352 b'show-stage',
3387 3353 [],
3388 3354 _(b'print parsed tree at the given stage'),
3389 3355 _(b'NAME'),
3390 3356 ),
3391 3357 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3392 3358 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3393 3359 ],
3394 3360 b'REVSPEC',
3395 3361 )
3396 3362 def debugrevspec(ui, repo, expr, **opts):
3397 3363 """parse and apply a revision specification
3398 3364
3399 3365 Use -p/--show-stage option to print the parsed tree at the given stages.
3400 3366 Use -p all to print tree at every stage.
3401 3367
3402 3368 Use --no-show-revs option with -s or -p to print only the set
3403 3369 representation or the parsed tree respectively.
3404 3370
3405 3371 Use --verify-optimized to compare the optimized result with the unoptimized
3406 3372 one. Returns 1 if the optimized result differs.
3407 3373 """
3408 3374 opts = pycompat.byteskwargs(opts)
3409 3375 aliases = ui.configitems(b'revsetalias')
3410 3376 stages = [
3411 3377 (b'parsed', lambda tree: tree),
3412 3378 (
3413 3379 b'expanded',
3414 3380 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3415 3381 ),
3416 3382 (b'concatenated', revsetlang.foldconcat),
3417 3383 (b'analyzed', revsetlang.analyze),
3418 3384 (b'optimized', revsetlang.optimize),
3419 3385 ]
3420 3386 if opts[b'no_optimized']:
3421 3387 stages = stages[:-1]
3422 3388 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3423 3389 raise error.Abort(
3424 3390 _(b'cannot use --verify-optimized with --no-optimized')
3425 3391 )
3426 3392 stagenames = {n for n, f in stages}
3427 3393
3428 3394 showalways = set()
3429 3395 showchanged = set()
3430 3396 if ui.verbose and not opts[b'show_stage']:
3431 3397 # show parsed tree by --verbose (deprecated)
3432 3398 showalways.add(b'parsed')
3433 3399 showchanged.update([b'expanded', b'concatenated'])
3434 3400 if opts[b'optimize']:
3435 3401 showalways.add(b'optimized')
3436 3402 if opts[b'show_stage'] and opts[b'optimize']:
3437 3403 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3438 3404 if opts[b'show_stage'] == [b'all']:
3439 3405 showalways.update(stagenames)
3440 3406 else:
3441 3407 for n in opts[b'show_stage']:
3442 3408 if n not in stagenames:
3443 3409 raise error.Abort(_(b'invalid stage name: %s') % n)
3444 3410 showalways.update(opts[b'show_stage'])
3445 3411
3446 3412 treebystage = {}
3447 3413 printedtree = None
3448 3414 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3449 3415 for n, f in stages:
3450 3416 treebystage[n] = tree = f(tree)
3451 3417 if n in showalways or (n in showchanged and tree != printedtree):
3452 3418 if opts[b'show_stage'] or n != b'parsed':
3453 3419 ui.write(b"* %s:\n" % n)
3454 3420 ui.write(revsetlang.prettyformat(tree), b"\n")
3455 3421 printedtree = tree
3456 3422
3457 3423 if opts[b'verify_optimized']:
3458 3424 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3459 3425 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3460 3426 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3461 3427 ui.writenoi18n(
3462 3428 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3463 3429 )
3464 3430 ui.writenoi18n(
3465 3431 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3466 3432 )
3467 3433 arevs = list(arevs)
3468 3434 brevs = list(brevs)
3469 3435 if arevs == brevs:
3470 3436 return 0
3471 3437 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3472 3438 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3473 3439 sm = difflib.SequenceMatcher(None, arevs, brevs)
3474 3440 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3475 3441 if tag in ('delete', 'replace'):
3476 3442 for c in arevs[alo:ahi]:
3477 3443 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3478 3444 if tag in ('insert', 'replace'):
3479 3445 for c in brevs[blo:bhi]:
3480 3446 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3481 3447 if tag == 'equal':
3482 3448 for c in arevs[alo:ahi]:
3483 3449 ui.write(b' %d\n' % c)
3484 3450 return 1
3485 3451
3486 3452 func = revset.makematcher(tree)
3487 3453 revs = func(repo)
3488 3454 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3489 3455 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3490 3456 if not opts[b'show_revs']:
3491 3457 return
3492 3458 for c in revs:
3493 3459 ui.write(b"%d\n" % c)
3494 3460
3495 3461
3496 3462 @command(
3497 3463 b'debugserve',
3498 3464 [
3499 3465 (
3500 3466 b'',
3501 3467 b'sshstdio',
3502 3468 False,
3503 3469 _(b'run an SSH server bound to process handles'),
3504 3470 ),
3505 3471 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3506 3472 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3507 3473 ],
3508 3474 b'',
3509 3475 )
3510 3476 def debugserve(ui, repo, **opts):
3511 3477 """run a server with advanced settings
3512 3478
3513 3479 This command is similar to :hg:`serve`. It exists partially as a
3514 3480 workaround to the fact that ``hg serve --stdio`` must have specific
3515 3481 arguments for security reasons.
3516 3482 """
3517 3483 opts = pycompat.byteskwargs(opts)
3518 3484
3519 3485 if not opts[b'sshstdio']:
3520 3486 raise error.Abort(_(b'only --sshstdio is currently supported'))
3521 3487
3522 3488 logfh = None
3523 3489
3524 3490 if opts[b'logiofd'] and opts[b'logiofile']:
3525 3491 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3526 3492
3527 3493 if opts[b'logiofd']:
3528 3494 # Ideally we would be line buffered. But line buffering in binary
3529 3495 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3530 3496 # buffering could have performance impacts. But since this isn't
3531 3497 # performance critical code, it should be fine.
3532 3498 try:
3533 3499 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3534 3500 except OSError as e:
3535 3501 if e.errno != errno.ESPIPE:
3536 3502 raise
3537 3503 # can't seek a pipe, so `ab` mode fails on py3
3538 3504 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3539 3505 elif opts[b'logiofile']:
3540 3506 logfh = open(opts[b'logiofile'], b'ab', 0)
3541 3507
3542 3508 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3543 3509 s.serve_forever()
3544 3510
3545 3511
3546 3512 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3547 3513 def debugsetparents(ui, repo, rev1, rev2=None):
3548 3514 """manually set the parents of the current working directory (DANGEROUS)
3549 3515
3550 3516 This command is not what you are looking for and should not be used. Using
3551 3517 this command will most certainly results in slight corruption of the file
3552 3518 level histories withing your repository. DO NOT USE THIS COMMAND.
3553 3519
3554 3520 The command update the p1 and p2 field in the dirstate, and not touching
3555 3521 anything else. This useful for writing repository conversion tools, but
3556 3522 should be used with extreme care. For example, neither the working
3557 3523 directory nor the dirstate is updated, so file status may be incorrect
3558 3524 after running this command. Only used if you are one of the few people that
3559 3525 deeply unstand both conversion tools and file level histories. If you are
3560 3526 reading this help, you are not one of this people (most of them sailed west
3561 3527 from Mithlond anyway.
3562 3528
3563 3529 So one last time DO NOT USE THIS COMMAND.
3564 3530
3565 3531 Returns 0 on success.
3566 3532 """
3567 3533
3568 3534 node1 = scmutil.revsingle(repo, rev1).node()
3569 3535 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3570 3536
3571 3537 with repo.wlock():
3572 3538 repo.setparents(node1, node2)
3573 3539
3574 3540
3575 3541 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3576 3542 def debugsidedata(ui, repo, file_, rev=None, **opts):
3577 3543 """dump the side data for a cl/manifest/file revision
3578 3544
3579 3545 Use --verbose to dump the sidedata content."""
3580 3546 opts = pycompat.byteskwargs(opts)
3581 3547 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3582 3548 if rev is not None:
3583 3549 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3584 3550 file_, rev = None, file_
3585 3551 elif rev is None:
3586 3552 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3587 3553 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3588 3554 r = getattr(r, '_revlog', r)
3589 3555 try:
3590 3556 sidedata = r.sidedata(r.lookup(rev))
3591 3557 except KeyError:
3592 3558 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3593 3559 if sidedata:
3594 3560 sidedata = list(sidedata.items())
3595 3561 sidedata.sort()
3596 3562 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3597 3563 for key, value in sidedata:
3598 3564 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3599 3565 if ui.verbose:
3600 3566 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3601 3567
3602 3568
3603 3569 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3604 3570 def debugssl(ui, repo, source=None, **opts):
3605 3571 """test a secure connection to a server
3606 3572
3607 3573 This builds the certificate chain for the server on Windows, installing the
3608 3574 missing intermediates and trusted root via Windows Update if necessary. It
3609 3575 does nothing on other platforms.
3610 3576
3611 3577 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3612 3578 that server is used. See :hg:`help urls` for more information.
3613 3579
3614 3580 If the update succeeds, retry the original operation. Otherwise, the cause
3615 3581 of the SSL error is likely another issue.
3616 3582 """
3617 3583 if not pycompat.iswindows:
3618 3584 raise error.Abort(
3619 3585 _(b'certificate chain building is only possible on Windows')
3620 3586 )
3621 3587
3622 3588 if not source:
3623 3589 if not repo:
3624 3590 raise error.Abort(
3625 3591 _(
3626 3592 b"there is no Mercurial repository here, and no "
3627 3593 b"server specified"
3628 3594 )
3629 3595 )
3630 3596 source = b"default"
3631 3597
3632 3598 source, branches = urlutil.get_unique_pull_path(
3633 3599 b'debugssl', repo, ui, source
3634 3600 )
3635 3601 url = urlutil.url(source)
3636 3602
3637 3603 defaultport = {b'https': 443, b'ssh': 22}
3638 3604 if url.scheme in defaultport:
3639 3605 try:
3640 3606 addr = (url.host, int(url.port or defaultport[url.scheme]))
3641 3607 except ValueError:
3642 3608 raise error.Abort(_(b"malformed port number in URL"))
3643 3609 else:
3644 3610 raise error.Abort(_(b"only https and ssh connections are supported"))
3645 3611
3646 3612 from . import win32
3647 3613
3648 3614 s = ssl.wrap_socket(
3649 3615 socket.socket(),
3650 3616 ssl_version=ssl.PROTOCOL_TLS,
3651 3617 cert_reqs=ssl.CERT_NONE,
3652 3618 ca_certs=None,
3653 3619 )
3654 3620
3655 3621 try:
3656 3622 s.connect(addr)
3657 3623 cert = s.getpeercert(True)
3658 3624
3659 3625 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3660 3626
3661 3627 complete = win32.checkcertificatechain(cert, build=False)
3662 3628
3663 3629 if not complete:
3664 3630 ui.status(_(b'certificate chain is incomplete, updating... '))
3665 3631
3666 3632 if not win32.checkcertificatechain(cert):
3667 3633 ui.status(_(b'failed.\n'))
3668 3634 else:
3669 3635 ui.status(_(b'done.\n'))
3670 3636 else:
3671 3637 ui.status(_(b'full certificate chain is available\n'))
3672 3638 finally:
3673 3639 s.close()
3674 3640
3675 3641
3676 3642 @command(
3677 3643 b"debugbackupbundle",
3678 3644 [
3679 3645 (
3680 3646 b"",
3681 3647 b"recover",
3682 3648 b"",
3683 3649 b"brings the specified changeset back into the repository",
3684 3650 )
3685 3651 ]
3686 3652 + cmdutil.logopts,
3687 3653 _(b"hg debugbackupbundle [--recover HASH]"),
3688 3654 )
3689 3655 def debugbackupbundle(ui, repo, *pats, **opts):
3690 3656 """lists the changesets available in backup bundles
3691 3657
3692 3658 Without any arguments, this command prints a list of the changesets in each
3693 3659 backup bundle.
3694 3660
3695 3661 --recover takes a changeset hash and unbundles the first bundle that
3696 3662 contains that hash, which puts that changeset back in your repository.
3697 3663
3698 3664 --verbose will print the entire commit message and the bundle path for that
3699 3665 backup.
3700 3666 """
3701 3667 backups = list(
3702 3668 filter(
3703 3669 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3704 3670 )
3705 3671 )
3706 3672 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3707 3673
3708 3674 opts = pycompat.byteskwargs(opts)
3709 3675 opts[b"bundle"] = b""
3710 3676 opts[b"force"] = None
3711 3677 limit = logcmdutil.getlimit(opts)
3712 3678
3713 3679 def display(other, chlist, displayer):
3714 3680 if opts.get(b"newest_first"):
3715 3681 chlist.reverse()
3716 3682 count = 0
3717 3683 for n in chlist:
3718 3684 if limit is not None and count >= limit:
3719 3685 break
3720 3686 parents = [
3721 3687 True for p in other.changelog.parents(n) if p != repo.nullid
3722 3688 ]
3723 3689 if opts.get(b"no_merges") and len(parents) == 2:
3724 3690 continue
3725 3691 count += 1
3726 3692 displayer.show(other[n])
3727 3693
3728 3694 recovernode = opts.get(b"recover")
3729 3695 if recovernode:
3730 3696 if scmutil.isrevsymbol(repo, recovernode):
3731 3697 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3732 3698 return
3733 3699 elif backups:
3734 3700 msg = _(
3735 3701 b"Recover changesets using: hg debugbackupbundle --recover "
3736 3702 b"<changeset hash>\n\nAvailable backup changesets:"
3737 3703 )
3738 3704 ui.status(msg, label=b"status.removed")
3739 3705 else:
3740 3706 ui.status(_(b"no backup changesets found\n"))
3741 3707 return
3742 3708
3743 3709 for backup in backups:
3744 3710 # Much of this is copied from the hg incoming logic
3745 3711 source = os.path.relpath(backup, encoding.getcwd())
3746 3712 source, branches = urlutil.get_unique_pull_path(
3747 3713 b'debugbackupbundle',
3748 3714 repo,
3749 3715 ui,
3750 3716 source,
3751 3717 default_branches=opts.get(b'branch'),
3752 3718 )
3753 3719 try:
3754 3720 other = hg.peer(repo, opts, source)
3755 3721 except error.LookupError as ex:
3756 3722 msg = _(b"\nwarning: unable to open bundle %s") % source
3757 3723 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3758 3724 ui.warn(msg, hint=hint)
3759 3725 continue
3760 3726 revs, checkout = hg.addbranchrevs(
3761 3727 repo, other, branches, opts.get(b"rev")
3762 3728 )
3763 3729
3764 3730 if revs:
3765 3731 revs = [other.lookup(rev) for rev in revs]
3766 3732
3767 3733 with ui.silent():
3768 3734 try:
3769 3735 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3770 3736 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3771 3737 )
3772 3738 except error.LookupError:
3773 3739 continue
3774 3740
3775 3741 try:
3776 3742 if not chlist:
3777 3743 continue
3778 3744 if recovernode:
3779 3745 with repo.lock(), repo.transaction(b"unbundle") as tr:
3780 3746 if scmutil.isrevsymbol(other, recovernode):
3781 3747 ui.status(_(b"Unbundling %s\n") % (recovernode))
3782 3748 f = hg.openpath(ui, source)
3783 3749 gen = exchange.readbundle(ui, f, source)
3784 3750 if isinstance(gen, bundle2.unbundle20):
3785 3751 bundle2.applybundle(
3786 3752 repo,
3787 3753 gen,
3788 3754 tr,
3789 3755 source=b"unbundle",
3790 3756 url=b"bundle:" + source,
3791 3757 )
3792 3758 else:
3793 3759 gen.apply(repo, b"unbundle", b"bundle:" + source)
3794 3760 break
3795 3761 else:
3796 3762 backupdate = encoding.strtolocal(
3797 3763 time.strftime(
3798 3764 "%a %H:%M, %Y-%m-%d",
3799 3765 time.localtime(os.path.getmtime(source)),
3800 3766 )
3801 3767 )
3802 3768 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3803 3769 if ui.verbose:
3804 3770 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3805 3771 else:
3806 3772 opts[
3807 3773 b"template"
3808 3774 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3809 3775 displayer = logcmdutil.changesetdisplayer(
3810 3776 ui, other, opts, False
3811 3777 )
3812 3778 display(other, chlist, displayer)
3813 3779 displayer.close()
3814 3780 finally:
3815 3781 cleanupfn()
3816 3782
3817 3783
3818 3784 @command(
3819 3785 b'debugsub',
3820 3786 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3821 3787 _(b'[-r REV] [REV]'),
3822 3788 )
3823 3789 def debugsub(ui, repo, rev=None):
3824 3790 ctx = scmutil.revsingle(repo, rev, None)
3825 3791 for k, v in sorted(ctx.substate.items()):
3826 3792 ui.writenoi18n(b'path %s\n' % k)
3827 3793 ui.writenoi18n(b' source %s\n' % v[0])
3828 3794 ui.writenoi18n(b' revision %s\n' % v[1])
3829 3795
3830 3796
3831 3797 @command(b'debugshell', optionalrepo=True)
3832 3798 def debugshell(ui, repo):
3833 3799 """run an interactive Python interpreter
3834 3800
3835 3801 The local namespace is provided with a reference to the ui and
3836 3802 the repo instance (if available).
3837 3803 """
3838 3804 import code
3839 3805
3840 3806 imported_objects = {
3841 3807 'ui': ui,
3842 3808 'repo': repo,
3843 3809 }
3844 3810
3845 3811 code.interact(local=imported_objects)
3846 3812
3847 3813
3848 3814 @command(
3849 3815 b'debugsuccessorssets',
3850 3816 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3851 3817 _(b'[REV]'),
3852 3818 )
3853 3819 def debugsuccessorssets(ui, repo, *revs, **opts):
3854 3820 """show set of successors for revision
3855 3821
3856 3822 A successors set of changeset A is a consistent group of revisions that
3857 3823 succeed A. It contains non-obsolete changesets only unless closests
3858 3824 successors set is set.
3859 3825
3860 3826 In most cases a changeset A has a single successors set containing a single
3861 3827 successor (changeset A replaced by A').
3862 3828
3863 3829 A changeset that is made obsolete with no successors are called "pruned".
3864 3830 Such changesets have no successors sets at all.
3865 3831
3866 3832 A changeset that has been "split" will have a successors set containing
3867 3833 more than one successor.
3868 3834
3869 3835 A changeset that has been rewritten in multiple different ways is called
3870 3836 "divergent". Such changesets have multiple successor sets (each of which
3871 3837 may also be split, i.e. have multiple successors).
3872 3838
3873 3839 Results are displayed as follows::
3874 3840
3875 3841 <rev1>
3876 3842 <successors-1A>
3877 3843 <rev2>
3878 3844 <successors-2A>
3879 3845 <successors-2B1> <successors-2B2> <successors-2B3>
3880 3846
3881 3847 Here rev2 has two possible (i.e. divergent) successors sets. The first
3882 3848 holds one element, whereas the second holds three (i.e. the changeset has
3883 3849 been split).
3884 3850 """
3885 3851 # passed to successorssets caching computation from one call to another
3886 3852 cache = {}
3887 3853 ctx2str = bytes
3888 3854 node2str = short
3889 3855 for rev in logcmdutil.revrange(repo, revs):
3890 3856 ctx = repo[rev]
3891 3857 ui.write(b'%s\n' % ctx2str(ctx))
3892 3858 for succsset in obsutil.successorssets(
3893 3859 repo, ctx.node(), closest=opts['closest'], cache=cache
3894 3860 ):
3895 3861 if succsset:
3896 3862 ui.write(b' ')
3897 3863 ui.write(node2str(succsset[0]))
3898 3864 for node in succsset[1:]:
3899 3865 ui.write(b' ')
3900 3866 ui.write(node2str(node))
3901 3867 ui.write(b'\n')
3902 3868
3903 3869
3904 3870 @command(b'debugtagscache', [])
3905 3871 def debugtagscache(ui, repo):
3906 3872 """display the contents of .hg/cache/hgtagsfnodes1"""
3907 3873 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3908 3874 flog = repo.file(b'.hgtags')
3909 3875 for r in repo:
3910 3876 node = repo[r].node()
3911 3877 tagsnode = cache.getfnode(node, computemissing=False)
3912 3878 if tagsnode:
3913 3879 tagsnodedisplay = hex(tagsnode)
3914 3880 if not flog.hasnode(tagsnode):
3915 3881 tagsnodedisplay += b' (unknown node)'
3916 3882 elif tagsnode is None:
3917 3883 tagsnodedisplay = b'missing'
3918 3884 else:
3919 3885 tagsnodedisplay = b'invalid'
3920 3886
3921 3887 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3922 3888
3923 3889
3924 3890 @command(
3925 3891 b'debugtemplate',
3926 3892 [
3927 3893 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3928 3894 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3929 3895 ],
3930 3896 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3931 3897 optionalrepo=True,
3932 3898 )
3933 3899 def debugtemplate(ui, repo, tmpl, **opts):
3934 3900 """parse and apply a template
3935 3901
3936 3902 If -r/--rev is given, the template is processed as a log template and
3937 3903 applied to the given changesets. Otherwise, it is processed as a generic
3938 3904 template.
3939 3905
3940 3906 Use --verbose to print the parsed tree.
3941 3907 """
3942 3908 revs = None
3943 3909 if opts['rev']:
3944 3910 if repo is None:
3945 3911 raise error.RepoError(
3946 3912 _(b'there is no Mercurial repository here (.hg not found)')
3947 3913 )
3948 3914 revs = logcmdutil.revrange(repo, opts['rev'])
3949 3915
3950 3916 props = {}
3951 3917 for d in opts['define']:
3952 3918 try:
3953 3919 k, v = (e.strip() for e in d.split(b'=', 1))
3954 3920 if not k or k == b'ui':
3955 3921 raise ValueError
3956 3922 props[k] = v
3957 3923 except ValueError:
3958 3924 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3959 3925
3960 3926 if ui.verbose:
3961 3927 aliases = ui.configitems(b'templatealias')
3962 3928 tree = templater.parse(tmpl)
3963 3929 ui.note(templater.prettyformat(tree), b'\n')
3964 3930 newtree = templater.expandaliases(tree, aliases)
3965 3931 if newtree != tree:
3966 3932 ui.notenoi18n(
3967 3933 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3968 3934 )
3969 3935
3970 3936 if revs is None:
3971 3937 tres = formatter.templateresources(ui, repo)
3972 3938 t = formatter.maketemplater(ui, tmpl, resources=tres)
3973 3939 if ui.verbose:
3974 3940 kwds, funcs = t.symbolsuseddefault()
3975 3941 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3976 3942 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3977 3943 ui.write(t.renderdefault(props))
3978 3944 else:
3979 3945 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3980 3946 if ui.verbose:
3981 3947 kwds, funcs = displayer.t.symbolsuseddefault()
3982 3948 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3983 3949 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3984 3950 for r in revs:
3985 3951 displayer.show(repo[r], **pycompat.strkwargs(props))
3986 3952 displayer.close()
3987 3953
3988 3954
3989 3955 @command(
3990 3956 b'debuguigetpass',
3991 3957 [
3992 3958 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3993 3959 ],
3994 3960 _(b'[-p TEXT]'),
3995 3961 norepo=True,
3996 3962 )
3997 3963 def debuguigetpass(ui, prompt=b''):
3998 3964 """show prompt to type password"""
3999 3965 r = ui.getpass(prompt)
4000 3966 if r is None:
4001 3967 r = b"<default response>"
4002 3968 ui.writenoi18n(b'response: %s\n' % r)
4003 3969
4004 3970
4005 3971 @command(
4006 3972 b'debuguiprompt',
4007 3973 [
4008 3974 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4009 3975 ],
4010 3976 _(b'[-p TEXT]'),
4011 3977 norepo=True,
4012 3978 )
4013 3979 def debuguiprompt(ui, prompt=b''):
4014 3980 """show plain prompt"""
4015 3981 r = ui.prompt(prompt)
4016 3982 ui.writenoi18n(b'response: %s\n' % r)
4017 3983
4018 3984
4019 3985 @command(b'debugupdatecaches', [])
4020 3986 def debugupdatecaches(ui, repo, *pats, **opts):
4021 3987 """warm all known caches in the repository"""
4022 3988 with repo.wlock(), repo.lock():
4023 3989 repo.updatecaches(caches=repository.CACHES_ALL)
4024 3990
4025 3991
4026 3992 @command(
4027 3993 b'debugupgraderepo',
4028 3994 [
4029 3995 (
4030 3996 b'o',
4031 3997 b'optimize',
4032 3998 [],
4033 3999 _(b'extra optimization to perform'),
4034 4000 _(b'NAME'),
4035 4001 ),
4036 4002 (b'', b'run', False, _(b'performs an upgrade')),
4037 4003 (b'', b'backup', True, _(b'keep the old repository content around')),
4038 4004 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4039 4005 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4040 4006 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4041 4007 ],
4042 4008 )
4043 4009 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4044 4010 """upgrade a repository to use different features
4045 4011
4046 4012 If no arguments are specified, the repository is evaluated for upgrade
4047 4013 and a list of problems and potential optimizations is printed.
4048 4014
4049 4015 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4050 4016 can be influenced via additional arguments. More details will be provided
4051 4017 by the command output when run without ``--run``.
4052 4018
4053 4019 During the upgrade, the repository will be locked and no writes will be
4054 4020 allowed.
4055 4021
4056 4022 At the end of the upgrade, the repository may not be readable while new
4057 4023 repository data is swapped in. This window will be as long as it takes to
4058 4024 rename some directories inside the ``.hg`` directory. On most machines, this
4059 4025 should complete almost instantaneously and the chances of a consumer being
4060 4026 unable to access the repository should be low.
4061 4027
4062 4028 By default, all revlogs will be upgraded. You can restrict this using flags
4063 4029 such as `--manifest`:
4064 4030
4065 4031 * `--manifest`: only optimize the manifest
4066 4032 * `--no-manifest`: optimize all revlog but the manifest
4067 4033 * `--changelog`: optimize the changelog only
4068 4034 * `--no-changelog --no-manifest`: optimize filelogs only
4069 4035 * `--filelogs`: optimize the filelogs only
4070 4036 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4071 4037 """
4072 4038 return upgrade.upgraderepo(
4073 4039 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4074 4040 )
4075 4041
4076 4042
4077 4043 @command(
4078 4044 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4079 4045 )
4080 4046 def debugwalk(ui, repo, *pats, **opts):
4081 4047 """show how files match on given patterns"""
4082 4048 opts = pycompat.byteskwargs(opts)
4083 4049 m = scmutil.match(repo[None], pats, opts)
4084 4050 if ui.verbose:
4085 4051 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4086 4052 items = list(repo[None].walk(m))
4087 4053 if not items:
4088 4054 return
4089 4055 f = lambda fn: fn
4090 4056 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4091 4057 f = lambda fn: util.normpath(fn)
4092 4058 fmt = b'f %%-%ds %%-%ds %%s' % (
4093 4059 max([len(abs) for abs in items]),
4094 4060 max([len(repo.pathto(abs)) for abs in items]),
4095 4061 )
4096 4062 for abs in items:
4097 4063 line = fmt % (
4098 4064 abs,
4099 4065 f(repo.pathto(abs)),
4100 4066 m.exact(abs) and b'exact' or b'',
4101 4067 )
4102 4068 ui.write(b"%s\n" % line.rstrip())
4103 4069
4104 4070
4105 4071 @command(b'debugwhyunstable', [], _(b'REV'))
4106 4072 def debugwhyunstable(ui, repo, rev):
4107 4073 """explain instabilities of a changeset"""
4108 4074 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4109 4075 dnodes = b''
4110 4076 if entry.get(b'divergentnodes'):
4111 4077 dnodes = (
4112 4078 b' '.join(
4113 4079 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4114 4080 for ctx in entry[b'divergentnodes']
4115 4081 )
4116 4082 + b' '
4117 4083 )
4118 4084 ui.write(
4119 4085 b'%s: %s%s %s\n'
4120 4086 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4121 4087 )
4122 4088
4123 4089
4124 4090 @command(
4125 4091 b'debugwireargs',
4126 4092 [
4127 4093 (b'', b'three', b'', b'three'),
4128 4094 (b'', b'four', b'', b'four'),
4129 4095 (b'', b'five', b'', b'five'),
4130 4096 ]
4131 4097 + cmdutil.remoteopts,
4132 4098 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4133 4099 norepo=True,
4134 4100 )
4135 4101 def debugwireargs(ui, repopath, *vals, **opts):
4136 4102 opts = pycompat.byteskwargs(opts)
4137 4103 repo = hg.peer(ui, opts, repopath)
4138 4104 try:
4139 4105 for opt in cmdutil.remoteopts:
4140 4106 del opts[opt[1]]
4141 4107 args = {}
4142 4108 for k, v in opts.items():
4143 4109 if v:
4144 4110 args[k] = v
4145 4111 args = pycompat.strkwargs(args)
4146 4112 # run twice to check that we don't mess up the stream for the next command
4147 4113 res1 = repo.debugwireargs(*vals, **args)
4148 4114 res2 = repo.debugwireargs(*vals, **args)
4149 4115 ui.write(b"%s\n" % res1)
4150 4116 if res1 != res2:
4151 4117 ui.warn(b"%s\n" % res2)
4152 4118 finally:
4153 4119 repo.close()
4154 4120
4155 4121
4156 4122 def _parsewirelangblocks(fh):
4157 4123 activeaction = None
4158 4124 blocklines = []
4159 4125 lastindent = 0
4160 4126
4161 4127 for line in fh:
4162 4128 line = line.rstrip()
4163 4129 if not line:
4164 4130 continue
4165 4131
4166 4132 if line.startswith(b'#'):
4167 4133 continue
4168 4134
4169 4135 if not line.startswith(b' '):
4170 4136 # New block. Flush previous one.
4171 4137 if activeaction:
4172 4138 yield activeaction, blocklines
4173 4139
4174 4140 activeaction = line
4175 4141 blocklines = []
4176 4142 lastindent = 0
4177 4143 continue
4178 4144
4179 4145 # Else we start with an indent.
4180 4146
4181 4147 if not activeaction:
4182 4148 raise error.Abort(_(b'indented line outside of block'))
4183 4149
4184 4150 indent = len(line) - len(line.lstrip())
4185 4151
4186 4152 # If this line is indented more than the last line, concatenate it.
4187 4153 if indent > lastindent and blocklines:
4188 4154 blocklines[-1] += line.lstrip()
4189 4155 else:
4190 4156 blocklines.append(line)
4191 4157 lastindent = indent
4192 4158
4193 4159 # Flush last block.
4194 4160 if activeaction:
4195 4161 yield activeaction, blocklines
4196 4162
4197 4163
4198 4164 @command(
4199 4165 b'debugwireproto',
4200 4166 [
4201 4167 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4202 4168 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4203 4169 (
4204 4170 b'',
4205 4171 b'noreadstderr',
4206 4172 False,
4207 4173 _(b'do not read from stderr of the remote'),
4208 4174 ),
4209 4175 (
4210 4176 b'',
4211 4177 b'nologhandshake',
4212 4178 False,
4213 4179 _(b'do not log I/O related to the peer handshake'),
4214 4180 ),
4215 4181 ]
4216 4182 + cmdutil.remoteopts,
4217 4183 _(b'[PATH]'),
4218 4184 optionalrepo=True,
4219 4185 )
4220 4186 def debugwireproto(ui, repo, path=None, **opts):
4221 4187 """send wire protocol commands to a server
4222 4188
4223 4189 This command can be used to issue wire protocol commands to remote
4224 4190 peers and to debug the raw data being exchanged.
4225 4191
4226 4192 ``--localssh`` will start an SSH server against the current repository
4227 4193 and connect to that. By default, the connection will perform a handshake
4228 4194 and establish an appropriate peer instance.
4229 4195
4230 4196 ``--peer`` can be used to bypass the handshake protocol and construct a
4231 4197 peer instance using the specified class type. Valid values are ``raw``,
4232 4198 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4233 4199 don't support higher-level command actions.
4234 4200
4235 4201 ``--noreadstderr`` can be used to disable automatic reading from stderr
4236 4202 of the peer (for SSH connections only). Disabling automatic reading of
4237 4203 stderr is useful for making output more deterministic.
4238 4204
4239 4205 Commands are issued via a mini language which is specified via stdin.
4240 4206 The language consists of individual actions to perform. An action is
4241 4207 defined by a block. A block is defined as a line with no leading
4242 4208 space followed by 0 or more lines with leading space. Blocks are
4243 4209 effectively a high-level command with additional metadata.
4244 4210
4245 4211 Lines beginning with ``#`` are ignored.
4246 4212
4247 4213 The following sections denote available actions.
4248 4214
4249 4215 raw
4250 4216 ---
4251 4217
4252 4218 Send raw data to the server.
4253 4219
4254 4220 The block payload contains the raw data to send as one atomic send
4255 4221 operation. The data may not actually be delivered in a single system
4256 4222 call: it depends on the abilities of the transport being used.
4257 4223
4258 4224 Each line in the block is de-indented and concatenated. Then, that
4259 4225 value is evaluated as a Python b'' literal. This allows the use of
4260 4226 backslash escaping, etc.
4261 4227
4262 4228 raw+
4263 4229 ----
4264 4230
4265 4231 Behaves like ``raw`` except flushes output afterwards.
4266 4232
4267 4233 command <X>
4268 4234 -----------
4269 4235
4270 4236 Send a request to run a named command, whose name follows the ``command``
4271 4237 string.
4272 4238
4273 4239 Arguments to the command are defined as lines in this block. The format of
4274 4240 each line is ``<key> <value>``. e.g.::
4275 4241
4276 4242 command listkeys
4277 4243 namespace bookmarks
4278 4244
4279 4245 If the value begins with ``eval:``, it will be interpreted as a Python
4280 4246 literal expression. Otherwise values are interpreted as Python b'' literals.
4281 4247 This allows sending complex types and encoding special byte sequences via
4282 4248 backslash escaping.
4283 4249
4284 4250 The following arguments have special meaning:
4285 4251
4286 4252 ``PUSHFILE``
4287 4253 When defined, the *push* mechanism of the peer will be used instead
4288 4254 of the static request-response mechanism and the content of the
4289 4255 file specified in the value of this argument will be sent as the
4290 4256 command payload.
4291 4257
4292 4258 This can be used to submit a local bundle file to the remote.
4293 4259
4294 4260 batchbegin
4295 4261 ----------
4296 4262
4297 4263 Instruct the peer to begin a batched send.
4298 4264
4299 4265 All ``command`` blocks are queued for execution until the next
4300 4266 ``batchsubmit`` block.
4301 4267
4302 4268 batchsubmit
4303 4269 -----------
4304 4270
4305 4271 Submit previously queued ``command`` blocks as a batch request.
4306 4272
4307 4273 This action MUST be paired with a ``batchbegin`` action.
4308 4274
4309 4275 httprequest <method> <path>
4310 4276 ---------------------------
4311 4277
4312 4278 (HTTP peer only)
4313 4279
4314 4280 Send an HTTP request to the peer.
4315 4281
4316 4282 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4317 4283
4318 4284 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4319 4285 headers to add to the request. e.g. ``Accept: foo``.
4320 4286
4321 4287 The following arguments are special:
4322 4288
4323 4289 ``BODYFILE``
4324 4290 The content of the file defined as the value to this argument will be
4325 4291 transferred verbatim as the HTTP request body.
4326 4292
4327 4293 ``frame <type> <flags> <payload>``
4328 4294 Send a unified protocol frame as part of the request body.
4329 4295
4330 4296 All frames will be collected and sent as the body to the HTTP
4331 4297 request.
4332 4298
4333 4299 close
4334 4300 -----
4335 4301
4336 4302 Close the connection to the server.
4337 4303
4338 4304 flush
4339 4305 -----
4340 4306
4341 4307 Flush data written to the server.
4342 4308
4343 4309 readavailable
4344 4310 -------------
4345 4311
4346 4312 Close the write end of the connection and read all available data from
4347 4313 the server.
4348 4314
4349 4315 If the connection to the server encompasses multiple pipes, we poll both
4350 4316 pipes and read available data.
4351 4317
4352 4318 readline
4353 4319 --------
4354 4320
4355 4321 Read a line of output from the server. If there are multiple output
4356 4322 pipes, reads only the main pipe.
4357 4323
4358 4324 ereadline
4359 4325 ---------
4360 4326
4361 4327 Like ``readline``, but read from the stderr pipe, if available.
4362 4328
4363 4329 read <X>
4364 4330 --------
4365 4331
4366 4332 ``read()`` N bytes from the server's main output pipe.
4367 4333
4368 4334 eread <X>
4369 4335 ---------
4370 4336
4371 4337 ``read()`` N bytes from the server's stderr pipe, if available.
4372 4338
4373 4339 Specifying Unified Frame-Based Protocol Frames
4374 4340 ----------------------------------------------
4375 4341
4376 4342 It is possible to emit a *Unified Frame-Based Protocol* by using special
4377 4343 syntax.
4378 4344
4379 4345 A frame is composed as a type, flags, and payload. These can be parsed
4380 4346 from a string of the form:
4381 4347
4382 4348 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4383 4349
4384 4350 ``request-id`` and ``stream-id`` are integers defining the request and
4385 4351 stream identifiers.
4386 4352
4387 4353 ``type`` can be an integer value for the frame type or the string name
4388 4354 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4389 4355 ``command-name``.
4390 4356
4391 4357 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4392 4358 components. Each component (and there can be just one) can be an integer
4393 4359 or a flag name for stream flags or frame flags, respectively. Values are
4394 4360 resolved to integers and then bitwise OR'd together.
4395 4361
4396 4362 ``payload`` represents the raw frame payload. If it begins with
4397 4363 ``cbor:``, the following string is evaluated as Python code and the
4398 4364 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4399 4365 as a Python byte string literal.
4400 4366 """
4401 4367 opts = pycompat.byteskwargs(opts)
4402 4368
4403 4369 if opts[b'localssh'] and not repo:
4404 4370 raise error.Abort(_(b'--localssh requires a repository'))
4405 4371
4406 4372 if opts[b'peer'] and opts[b'peer'] not in (
4407 4373 b'raw',
4408 4374 b'ssh1',
4409 4375 ):
4410 4376 raise error.Abort(
4411 4377 _(b'invalid value for --peer'),
4412 4378 hint=_(b'valid values are "raw" and "ssh1"'),
4413 4379 )
4414 4380
4415 4381 if path and opts[b'localssh']:
4416 4382 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4417 4383
4418 4384 if ui.interactive():
4419 4385 ui.write(_(b'(waiting for commands on stdin)\n'))
4420 4386
4421 4387 blocks = list(_parsewirelangblocks(ui.fin))
4422 4388
4423 4389 proc = None
4424 4390 stdin = None
4425 4391 stdout = None
4426 4392 stderr = None
4427 4393 opener = None
4428 4394
4429 4395 if opts[b'localssh']:
4430 4396 # We start the SSH server in its own process so there is process
4431 4397 # separation. This prevents a whole class of potential bugs around
4432 4398 # shared state from interfering with server operation.
4433 4399 args = procutil.hgcmd() + [
4434 4400 b'-R',
4435 4401 repo.root,
4436 4402 b'debugserve',
4437 4403 b'--sshstdio',
4438 4404 ]
4439 4405 proc = subprocess.Popen(
4440 4406 pycompat.rapply(procutil.tonativestr, args),
4441 4407 stdin=subprocess.PIPE,
4442 4408 stdout=subprocess.PIPE,
4443 4409 stderr=subprocess.PIPE,
4444 4410 bufsize=0,
4445 4411 )
4446 4412
4447 4413 stdin = proc.stdin
4448 4414 stdout = proc.stdout
4449 4415 stderr = proc.stderr
4450 4416
4451 4417 # We turn the pipes into observers so we can log I/O.
4452 4418 if ui.verbose or opts[b'peer'] == b'raw':
4453 4419 stdin = util.makeloggingfileobject(
4454 4420 ui, proc.stdin, b'i', logdata=True
4455 4421 )
4456 4422 stdout = util.makeloggingfileobject(
4457 4423 ui, proc.stdout, b'o', logdata=True
4458 4424 )
4459 4425 stderr = util.makeloggingfileobject(
4460 4426 ui, proc.stderr, b'e', logdata=True
4461 4427 )
4462 4428
4463 4429 # --localssh also implies the peer connection settings.
4464 4430
4465 4431 url = b'ssh://localserver'
4466 4432 autoreadstderr = not opts[b'noreadstderr']
4467 4433
4468 4434 if opts[b'peer'] == b'ssh1':
4469 4435 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4470 4436 peer = sshpeer.sshv1peer(
4471 4437 ui,
4472 4438 url,
4473 4439 proc,
4474 4440 stdin,
4475 4441 stdout,
4476 4442 stderr,
4477 4443 None,
4478 4444 autoreadstderr=autoreadstderr,
4479 4445 )
4480 4446 elif opts[b'peer'] == b'raw':
4481 4447 ui.write(_(b'using raw connection to peer\n'))
4482 4448 peer = None
4483 4449 else:
4484 4450 ui.write(_(b'creating ssh peer from handshake results\n'))
4485 4451 peer = sshpeer.makepeer(
4486 4452 ui,
4487 4453 url,
4488 4454 proc,
4489 4455 stdin,
4490 4456 stdout,
4491 4457 stderr,
4492 4458 autoreadstderr=autoreadstderr,
4493 4459 )
4494 4460
4495 4461 elif path:
4496 4462 # We bypass hg.peer() so we can proxy the sockets.
4497 4463 # TODO consider not doing this because we skip
4498 4464 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4499 4465 u = urlutil.url(path)
4500 4466 if u.scheme != b'http':
4501 4467 raise error.Abort(_(b'only http:// paths are currently supported'))
4502 4468
4503 4469 url, authinfo = u.authinfo()
4504 4470 openerargs = {
4505 4471 'useragent': b'Mercurial debugwireproto',
4506 4472 }
4507 4473
4508 4474 # Turn pipes/sockets into observers so we can log I/O.
4509 4475 if ui.verbose:
4510 4476 openerargs.update(
4511 4477 {
4512 4478 'loggingfh': ui,
4513 4479 'loggingname': b's',
4514 4480 'loggingopts': {
4515 4481 'logdata': True,
4516 4482 'logdataapis': False,
4517 4483 },
4518 4484 }
4519 4485 )
4520 4486
4521 4487 if ui.debugflag:
4522 4488 openerargs['loggingopts']['logdataapis'] = True
4523 4489
4524 4490 # Don't send default headers when in raw mode. This allows us to
4525 4491 # bypass most of the behavior of our URL handling code so we can
4526 4492 # have near complete control over what's sent on the wire.
4527 4493 if opts[b'peer'] == b'raw':
4528 4494 openerargs['sendaccept'] = False
4529 4495
4530 4496 opener = urlmod.opener(ui, authinfo, **openerargs)
4531 4497
4532 4498 if opts[b'peer'] == b'raw':
4533 4499 ui.write(_(b'using raw connection to peer\n'))
4534 4500 peer = None
4535 4501 elif opts[b'peer']:
4536 4502 raise error.Abort(
4537 4503 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4538 4504 )
4539 4505 else:
4540 4506 peer = httppeer.makepeer(ui, path, opener=opener)
4541 4507
4542 4508 # We /could/ populate stdin/stdout with sock.makefile()...
4543 4509 else:
4544 4510 raise error.Abort(_(b'unsupported connection configuration'))
4545 4511
4546 4512 batchedcommands = None
4547 4513
4548 4514 # Now perform actions based on the parsed wire language instructions.
4549 4515 for action, lines in blocks:
4550 4516 if action in (b'raw', b'raw+'):
4551 4517 if not stdin:
4552 4518 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4553 4519
4554 4520 # Concatenate the data together.
4555 4521 data = b''.join(l.lstrip() for l in lines)
4556 4522 data = stringutil.unescapestr(data)
4557 4523 stdin.write(data)
4558 4524
4559 4525 if action == b'raw+':
4560 4526 stdin.flush()
4561 4527 elif action == b'flush':
4562 4528 if not stdin:
4563 4529 raise error.Abort(_(b'cannot call flush on this peer'))
4564 4530 stdin.flush()
4565 4531 elif action.startswith(b'command'):
4566 4532 if not peer:
4567 4533 raise error.Abort(
4568 4534 _(
4569 4535 b'cannot send commands unless peer instance '
4570 4536 b'is available'
4571 4537 )
4572 4538 )
4573 4539
4574 4540 command = action.split(b' ', 1)[1]
4575 4541
4576 4542 args = {}
4577 4543 for line in lines:
4578 4544 # We need to allow empty values.
4579 4545 fields = line.lstrip().split(b' ', 1)
4580 4546 if len(fields) == 1:
4581 4547 key = fields[0]
4582 4548 value = b''
4583 4549 else:
4584 4550 key, value = fields
4585 4551
4586 4552 if value.startswith(b'eval:'):
4587 4553 value = stringutil.evalpythonliteral(value[5:])
4588 4554 else:
4589 4555 value = stringutil.unescapestr(value)
4590 4556
4591 4557 args[key] = value
4592 4558
4593 4559 if batchedcommands is not None:
4594 4560 batchedcommands.append((command, args))
4595 4561 continue
4596 4562
4597 4563 ui.status(_(b'sending %s command\n') % command)
4598 4564
4599 4565 if b'PUSHFILE' in args:
4600 4566 with open(args[b'PUSHFILE'], 'rb') as fh:
4601 4567 del args[b'PUSHFILE']
4602 4568 res, output = peer._callpush(
4603 4569 command, fh, **pycompat.strkwargs(args)
4604 4570 )
4605 4571 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4606 4572 ui.status(
4607 4573 _(b'remote output: %s\n') % stringutil.escapestr(output)
4608 4574 )
4609 4575 else:
4610 4576 with peer.commandexecutor() as e:
4611 4577 res = e.callcommand(command, args).result()
4612 4578
4613 4579 ui.status(
4614 4580 _(b'response: %s\n')
4615 4581 % stringutil.pprint(res, bprefix=True, indent=2)
4616 4582 )
4617 4583
4618 4584 elif action == b'batchbegin':
4619 4585 if batchedcommands is not None:
4620 4586 raise error.Abort(_(b'nested batchbegin not allowed'))
4621 4587
4622 4588 batchedcommands = []
4623 4589 elif action == b'batchsubmit':
4624 4590 # There is a batching API we could go through. But it would be
4625 4591 # difficult to normalize requests into function calls. It is easier
4626 4592 # to bypass this layer and normalize to commands + args.
4627 4593 ui.status(
4628 4594 _(b'sending batch with %d sub-commands\n')
4629 4595 % len(batchedcommands)
4630 4596 )
4631 4597 assert peer is not None
4632 4598 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4633 4599 ui.status(
4634 4600 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4635 4601 )
4636 4602
4637 4603 batchedcommands = None
4638 4604
4639 4605 elif action.startswith(b'httprequest '):
4640 4606 if not opener:
4641 4607 raise error.Abort(
4642 4608 _(b'cannot use httprequest without an HTTP peer')
4643 4609 )
4644 4610
4645 4611 request = action.split(b' ', 2)
4646 4612 if len(request) != 3:
4647 4613 raise error.Abort(
4648 4614 _(
4649 4615 b'invalid httprequest: expected format is '
4650 4616 b'"httprequest <method> <path>'
4651 4617 )
4652 4618 )
4653 4619
4654 4620 method, httppath = request[1:]
4655 4621 headers = {}
4656 4622 body = None
4657 4623 frames = []
4658 4624 for line in lines:
4659 4625 line = line.lstrip()
4660 4626 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4661 4627 if m:
4662 4628 # Headers need to use native strings.
4663 4629 key = pycompat.strurl(m.group(1))
4664 4630 value = pycompat.strurl(m.group(2))
4665 4631 headers[key] = value
4666 4632 continue
4667 4633
4668 4634 if line.startswith(b'BODYFILE '):
4669 4635 with open(line.split(b' ', 1), b'rb') as fh:
4670 4636 body = fh.read()
4671 4637 elif line.startswith(b'frame '):
4672 4638 frame = wireprotoframing.makeframefromhumanstring(
4673 4639 line[len(b'frame ') :]
4674 4640 )
4675 4641
4676 4642 frames.append(frame)
4677 4643 else:
4678 4644 raise error.Abort(
4679 4645 _(b'unknown argument to httprequest: %s') % line
4680 4646 )
4681 4647
4682 4648 url = path + httppath
4683 4649
4684 4650 if frames:
4685 4651 body = b''.join(bytes(f) for f in frames)
4686 4652
4687 4653 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4688 4654
4689 4655 # urllib.Request insists on using has_data() as a proxy for
4690 4656 # determining the request method. Override that to use our
4691 4657 # explicitly requested method.
4692 4658 req.get_method = lambda: pycompat.sysstr(method)
4693 4659
4694 4660 try:
4695 4661 res = opener.open(req)
4696 4662 body = res.read()
4697 4663 except util.urlerr.urlerror as e:
4698 4664 # read() method must be called, but only exists in Python 2
4699 4665 getattr(e, 'read', lambda: None)()
4700 4666 continue
4701 4667
4702 4668 ct = res.headers.get('Content-Type')
4703 4669 if ct == 'application/mercurial-cbor':
4704 4670 ui.write(
4705 4671 _(b'cbor> %s\n')
4706 4672 % stringutil.pprint(
4707 4673 cborutil.decodeall(body), bprefix=True, indent=2
4708 4674 )
4709 4675 )
4710 4676
4711 4677 elif action == b'close':
4712 4678 assert peer is not None
4713 4679 peer.close()
4714 4680 elif action == b'readavailable':
4715 4681 if not stdout or not stderr:
4716 4682 raise error.Abort(
4717 4683 _(b'readavailable not available on this peer')
4718 4684 )
4719 4685
4720 4686 stdin.close()
4721 4687 stdout.read()
4722 4688 stderr.read()
4723 4689
4724 4690 elif action == b'readline':
4725 4691 if not stdout:
4726 4692 raise error.Abort(_(b'readline not available on this peer'))
4727 4693 stdout.readline()
4728 4694 elif action == b'ereadline':
4729 4695 if not stderr:
4730 4696 raise error.Abort(_(b'ereadline not available on this peer'))
4731 4697 stderr.readline()
4732 4698 elif action.startswith(b'read '):
4733 4699 count = int(action.split(b' ', 1)[1])
4734 4700 if not stdout:
4735 4701 raise error.Abort(_(b'read not available on this peer'))
4736 4702 stdout.read(count)
4737 4703 elif action.startswith(b'eread '):
4738 4704 count = int(action.split(b' ', 1)[1])
4739 4705 if not stderr:
4740 4706 raise error.Abort(_(b'eread not available on this peer'))
4741 4707 stderr.read(count)
4742 4708 else:
4743 4709 raise error.Abort(_(b'unknown action: %s') % action)
4744 4710
4745 4711 if batchedcommands is not None:
4746 4712 raise error.Abort(_(b'unclosed "batchbegin" request'))
4747 4713
4748 4714 if peer:
4749 4715 peer.close()
4750 4716
4751 4717 if proc:
4752 4718 proc.kill()
@@ -1,621 +1,663
1 1 # revlogutils/debug.py - utility used for revlog debuging
2 2 #
3 3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 4 # Copyright 2022 Octobus <contact@octobus.net>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import collections
10 10 import string
11 11
12 12 from .. import (
13 mdiff,
13 14 node as nodemod,
15 revlogutils,
14 16 util,
15 17 )
16 18
17 19 from . import (
18 20 constants,
21 deltas as deltautil,
19 22 )
20 23
21 24 INDEX_ENTRY_DEBUG_COLUMN = []
22 25
23 26 NODE_SIZE = object()
24 27
25 28
26 29 class _column_base:
27 30 """constains the definition of a revlog column
28 31
29 32 name: the column header,
30 33 value_func: the function called to get a value,
31 34 size: the width of the column,
32 35 verbose_only: only include the column in verbose mode.
33 36 """
34 37
35 38 def __init__(self, name, value_func, size=None, verbose=False):
36 39 self.name = name
37 40 self.value_func = value_func
38 41 if size is not NODE_SIZE:
39 42 if size is None:
40 43 size = 8 # arbitrary default
41 44 size = max(len(name), size)
42 45 self._size = size
43 46 self.verbose_only = verbose
44 47
45 48 def get_size(self, node_size):
46 49 if self._size is NODE_SIZE:
47 50 return node_size
48 51 else:
49 52 return self._size
50 53
51 54
52 55 def debug_column(name, size=None, verbose=False):
53 56 """decorated function is registered as a column
54 57
55 58 name: the name of the column,
56 59 size: the expected size of the column.
57 60 """
58 61
59 62 def register(func):
60 63 entry = _column_base(
61 64 name=name,
62 65 value_func=func,
63 66 size=size,
64 67 verbose=verbose,
65 68 )
66 69 INDEX_ENTRY_DEBUG_COLUMN.append(entry)
67 70 return entry
68 71
69 72 return register
70 73
71 74
72 75 @debug_column(b"rev", size=6)
73 76 def _rev(index, rev, entry, hexfn):
74 77 return b"%d" % rev
75 78
76 79
77 80 @debug_column(b"rank", size=6, verbose=True)
78 81 def rank(index, rev, entry, hexfn):
79 82 return b"%d" % entry[constants.ENTRY_RANK]
80 83
81 84
82 85 @debug_column(b"linkrev", size=6)
83 86 def _linkrev(index, rev, entry, hexfn):
84 87 return b"%d" % entry[constants.ENTRY_LINK_REV]
85 88
86 89
87 90 @debug_column(b"nodeid", size=NODE_SIZE)
88 91 def _nodeid(index, rev, entry, hexfn):
89 92 return hexfn(entry[constants.ENTRY_NODE_ID])
90 93
91 94
92 95 @debug_column(b"p1-rev", size=6, verbose=True)
93 96 def _p1_rev(index, rev, entry, hexfn):
94 97 return b"%d" % entry[constants.ENTRY_PARENT_1]
95 98
96 99
97 100 @debug_column(b"p1-nodeid", size=NODE_SIZE)
98 101 def _p1_node(index, rev, entry, hexfn):
99 102 parent = entry[constants.ENTRY_PARENT_1]
100 103 p_entry = index[parent]
101 104 return hexfn(p_entry[constants.ENTRY_NODE_ID])
102 105
103 106
104 107 @debug_column(b"p2-rev", size=6, verbose=True)
105 108 def _p2_rev(index, rev, entry, hexfn):
106 109 return b"%d" % entry[constants.ENTRY_PARENT_2]
107 110
108 111
109 112 @debug_column(b"p2-nodeid", size=NODE_SIZE)
110 113 def _p2_node(index, rev, entry, hexfn):
111 114 parent = entry[constants.ENTRY_PARENT_2]
112 115 p_entry = index[parent]
113 116 return hexfn(p_entry[constants.ENTRY_NODE_ID])
114 117
115 118
116 119 @debug_column(b"full-size", size=20, verbose=True)
117 120 def full_size(index, rev, entry, hexfn):
118 121 return b"%d" % entry[constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
119 122
120 123
121 124 @debug_column(b"delta-base", size=6, verbose=True)
122 125 def delta_base(index, rev, entry, hexfn):
123 126 return b"%d" % entry[constants.ENTRY_DELTA_BASE]
124 127
125 128
126 129 @debug_column(b"flags", size=2, verbose=True)
127 130 def flags(index, rev, entry, hexfn):
128 131 field = entry[constants.ENTRY_DATA_OFFSET]
129 132 field &= 0xFFFF
130 133 return b"%d" % field
131 134
132 135
133 136 @debug_column(b"comp-mode", size=4, verbose=True)
134 137 def compression_mode(index, rev, entry, hexfn):
135 138 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSION_MODE]
136 139
137 140
138 141 @debug_column(b"data-offset", size=20, verbose=True)
139 142 def data_offset(index, rev, entry, hexfn):
140 143 field = entry[constants.ENTRY_DATA_OFFSET]
141 144 field >>= 16
142 145 return b"%d" % field
143 146
144 147
145 148 @debug_column(b"chunk-size", size=10, verbose=True)
146 149 def data_chunk_size(index, rev, entry, hexfn):
147 150 return b"%d" % entry[constants.ENTRY_DATA_COMPRESSED_LENGTH]
148 151
149 152
150 153 @debug_column(b"sd-comp-mode", size=7, verbose=True)
151 154 def sidedata_compression_mode(index, rev, entry, hexfn):
152 155 compression = entry[constants.ENTRY_SIDEDATA_COMPRESSION_MODE]
153 156 if compression == constants.COMP_MODE_PLAIN:
154 157 return b"plain"
155 158 elif compression == constants.COMP_MODE_DEFAULT:
156 159 return b"default"
157 160 elif compression == constants.COMP_MODE_INLINE:
158 161 return b"inline"
159 162 else:
160 163 return b"%d" % compression
161 164
162 165
163 166 @debug_column(b"sidedata-offset", size=20, verbose=True)
164 167 def sidedata_offset(index, rev, entry, hexfn):
165 168 return b"%d" % entry[constants.ENTRY_SIDEDATA_OFFSET]
166 169
167 170
168 171 @debug_column(b"sd-chunk-size", size=10, verbose=True)
169 172 def sidedata_chunk_size(index, rev, entry, hexfn):
170 173 return b"%d" % entry[constants.ENTRY_SIDEDATA_COMPRESSED_LENGTH]
171 174
172 175
173 176 def debug_index(
174 177 ui,
175 178 repo,
176 179 formatter,
177 180 revlog,
178 181 full_node,
179 182 ):
180 183 """display index data for a revlog"""
181 184 if full_node:
182 185 hexfn = nodemod.hex
183 186 else:
184 187 hexfn = nodemod.short
185 188
186 189 idlen = 12
187 190 for i in revlog:
188 191 idlen = len(hexfn(revlog.node(i)))
189 192 break
190 193
191 194 fm = formatter
192 195
193 196 header_pieces = []
194 197 for column in INDEX_ENTRY_DEBUG_COLUMN:
195 198 if column.verbose_only and not ui.verbose:
196 199 continue
197 200 size = column.get_size(idlen)
198 201 name = column.name
199 202 header_pieces.append(name.rjust(size))
200 203
201 204 fm.plain(b' '.join(header_pieces) + b'\n')
202 205
203 206 index = revlog.index
204 207
205 208 for rev in revlog:
206 209 fm.startitem()
207 210 entry = index[rev]
208 211 first = True
209 212 for column in INDEX_ENTRY_DEBUG_COLUMN:
210 213 if column.verbose_only and not ui.verbose:
211 214 continue
212 215 if not first:
213 216 fm.plain(b' ')
214 217 first = False
215 218
216 219 size = column.get_size(idlen)
217 220 value = column.value_func(index, rev, entry, hexfn)
218 221 display = b"%%%ds" % size
219 222 fm.write(column.name, display, value)
220 223 fm.plain(b'\n')
221 224
222 225 fm.end()
223 226
224 227
225 228 def dump(ui, revlog):
226 229 """perform the work for `hg debugrevlog --dump"""
227 230 # XXX seems redundant with debug index ?
228 231 r = revlog
229 232 numrevs = len(r)
230 233 ui.write(
231 234 (
232 235 b"# rev p1rev p2rev start end deltastart base p1 p2"
233 236 b" rawsize totalsize compression heads chainlen\n"
234 237 )
235 238 )
236 239 ts = 0
237 240 heads = set()
238 241
239 242 for rev in range(numrevs):
240 243 dbase = r.deltaparent(rev)
241 244 if dbase == -1:
242 245 dbase = rev
243 246 cbase = r.chainbase(rev)
244 247 clen = r.chainlen(rev)
245 248 p1, p2 = r.parentrevs(rev)
246 249 rs = r.rawsize(rev)
247 250 ts = ts + rs
248 251 heads -= set(r.parentrevs(rev))
249 252 heads.add(rev)
250 253 try:
251 254 compression = ts / r.end(rev)
252 255 except ZeroDivisionError:
253 256 compression = 0
254 257 ui.write(
255 258 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
256 259 b"%11d %5d %8d\n"
257 260 % (
258 261 rev,
259 262 p1,
260 263 p2,
261 264 r.start(rev),
262 265 r.end(rev),
263 266 r.start(dbase),
264 267 r.start(cbase),
265 268 r.start(p1),
266 269 r.start(p2),
267 270 rs,
268 271 ts,
269 272 compression,
270 273 len(heads),
271 274 clen,
272 275 )
273 276 )
274 277
275 278
276 279 def debug_revlog(ui, revlog):
277 280 """code for `hg debugrevlog`"""
278 281 r = revlog
279 282 format = r._format_version
280 283 v = r._format_flags
281 284 flags = []
282 285 gdelta = False
283 286 if v & constants.FLAG_INLINE_DATA:
284 287 flags.append(b'inline')
285 288 if v & constants.FLAG_GENERALDELTA:
286 289 gdelta = True
287 290 flags.append(b'generaldelta')
288 291 if not flags:
289 292 flags = [b'(none)']
290 293
291 294 ### the total size of stored content if incompressed.
292 295 full_text_total_size = 0
293 296 ### tracks merge vs single parent
294 297 nummerges = 0
295 298
296 299 ### tracks ways the "delta" are build
297 300 # nodelta
298 301 numempty = 0
299 302 numemptytext = 0
300 303 numemptydelta = 0
301 304 # full file content
302 305 numfull = 0
303 306 # intermediate snapshot against a prior snapshot
304 307 numsemi = 0
305 308 # snapshot count per depth
306 309 numsnapdepth = collections.defaultdict(lambda: 0)
307 310 # number of snapshots with a non-ancestor delta
308 311 numsnapdepth_nad = collections.defaultdict(lambda: 0)
309 312 # delta against previous revision
310 313 numprev = 0
311 314 # delta against prev, where prev is a non-ancestor
312 315 numprev_nad = 0
313 316 # delta against first or second parent (not prev)
314 317 nump1 = 0
315 318 nump2 = 0
316 319 # delta against neither prev nor parents
317 320 numother = 0
318 321 # delta against other that is a non-ancestor
319 322 numother_nad = 0
320 323 # delta against prev that are also first or second parent
321 324 # (details of `numprev`)
322 325 nump1prev = 0
323 326 nump2prev = 0
324 327
325 328 # data about delta chain of each revs
326 329 chainlengths = []
327 330 chainbases = []
328 331 chainspans = []
329 332
330 333 # data about each revision
331 334 datasize = [None, 0, 0]
332 335 fullsize = [None, 0, 0]
333 336 semisize = [None, 0, 0]
334 337 # snapshot count per depth
335 338 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
336 339 deltasize = [None, 0, 0]
337 340 chunktypecounts = {}
338 341 chunktypesizes = {}
339 342
340 343 def addsize(size, l):
341 344 if l[0] is None or size < l[0]:
342 345 l[0] = size
343 346 if size > l[1]:
344 347 l[1] = size
345 348 l[2] += size
346 349
347 350 numrevs = len(r)
348 351 for rev in range(numrevs):
349 352 p1, p2 = r.parentrevs(rev)
350 353 delta = r.deltaparent(rev)
351 354 if format > 0:
352 355 s = r.rawsize(rev)
353 356 full_text_total_size += s
354 357 addsize(s, datasize)
355 358 if p2 != nodemod.nullrev:
356 359 nummerges += 1
357 360 size = r.length(rev)
358 361 if delta == nodemod.nullrev:
359 362 chainlengths.append(0)
360 363 chainbases.append(r.start(rev))
361 364 chainspans.append(size)
362 365 if size == 0:
363 366 numempty += 1
364 367 numemptytext += 1
365 368 else:
366 369 numfull += 1
367 370 numsnapdepth[0] += 1
368 371 addsize(size, fullsize)
369 372 addsize(size, snapsizedepth[0])
370 373 else:
371 374 nad = (
372 375 delta != p1 and delta != p2 and not r.isancestorrev(delta, rev)
373 376 )
374 377 chainlengths.append(chainlengths[delta] + 1)
375 378 baseaddr = chainbases[delta]
376 379 revaddr = r.start(rev)
377 380 chainbases.append(baseaddr)
378 381 chainspans.append((revaddr - baseaddr) + size)
379 382 if size == 0:
380 383 numempty += 1
381 384 numemptydelta += 1
382 385 elif r.issnapshot(rev):
383 386 addsize(size, semisize)
384 387 numsemi += 1
385 388 depth = r.snapshotdepth(rev)
386 389 numsnapdepth[depth] += 1
387 390 if nad:
388 391 numsnapdepth_nad[depth] += 1
389 392 addsize(size, snapsizedepth[depth])
390 393 else:
391 394 addsize(size, deltasize)
392 395 if delta == rev - 1:
393 396 numprev += 1
394 397 if delta == p1:
395 398 nump1prev += 1
396 399 elif delta == p2:
397 400 nump2prev += 1
398 401 elif nad:
399 402 numprev_nad += 1
400 403 elif delta == p1:
401 404 nump1 += 1
402 405 elif delta == p2:
403 406 nump2 += 1
404 407 elif delta != nodemod.nullrev:
405 408 numother += 1
406 409 numother_nad += 1
407 410
408 411 # Obtain data on the raw chunks in the revlog.
409 412 if util.safehasattr(r, '_getsegmentforrevs'):
410 413 segment = r._getsegmentforrevs(rev, rev)[1]
411 414 else:
412 415 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
413 416 if segment:
414 417 chunktype = bytes(segment[0:1])
415 418 else:
416 419 chunktype = b'empty'
417 420
418 421 if chunktype not in chunktypecounts:
419 422 chunktypecounts[chunktype] = 0
420 423 chunktypesizes[chunktype] = 0
421 424
422 425 chunktypecounts[chunktype] += 1
423 426 chunktypesizes[chunktype] += size
424 427
425 428 # Adjust size min value for empty cases
426 429 for size in (datasize, fullsize, semisize, deltasize):
427 430 if size[0] is None:
428 431 size[0] = 0
429 432
430 433 numdeltas = numrevs - numfull - numempty - numsemi
431 434 numoprev = numprev - nump1prev - nump2prev - numprev_nad
432 435 num_other_ancestors = numother - numother_nad
433 436 totalrawsize = datasize[2]
434 437 datasize[2] /= numrevs
435 438 fulltotal = fullsize[2]
436 439 if numfull == 0:
437 440 fullsize[2] = 0
438 441 else:
439 442 fullsize[2] /= numfull
440 443 semitotal = semisize[2]
441 444 snaptotal = {}
442 445 if numsemi > 0:
443 446 semisize[2] /= numsemi
444 447 for depth in snapsizedepth:
445 448 snaptotal[depth] = snapsizedepth[depth][2]
446 449 snapsizedepth[depth][2] /= numsnapdepth[depth]
447 450
448 451 deltatotal = deltasize[2]
449 452 if numdeltas > 0:
450 453 deltasize[2] /= numdeltas
451 454 totalsize = fulltotal + semitotal + deltatotal
452 455 avgchainlen = sum(chainlengths) / numrevs
453 456 maxchainlen = max(chainlengths)
454 457 maxchainspan = max(chainspans)
455 458 compratio = 1
456 459 if totalsize:
457 460 compratio = totalrawsize / totalsize
458 461
459 462 basedfmtstr = b'%%%dd\n'
460 463 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
461 464
462 465 def dfmtstr(max):
463 466 return basedfmtstr % len(str(max))
464 467
465 468 def pcfmtstr(max, padding=0):
466 469 return basepcfmtstr % (len(str(max)), b' ' * padding)
467 470
468 471 def pcfmt(value, total):
469 472 if total:
470 473 return (value, 100 * float(value) / total)
471 474 else:
472 475 return value, 100.0
473 476
474 477 ui.writenoi18n(b'format : %d\n' % format)
475 478 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
476 479
477 480 ui.write(b'\n')
478 481 fmt = pcfmtstr(totalsize)
479 482 fmt2 = dfmtstr(totalsize)
480 483 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
481 484 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
482 485 ui.writenoi18n(
483 486 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
484 487 )
485 488 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
486 489 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
487 490 ui.writenoi18n(
488 491 b' text : '
489 492 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
490 493 )
491 494 ui.writenoi18n(
492 495 b' delta : '
493 496 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
494 497 )
495 498 ui.writenoi18n(
496 499 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
497 500 )
498 501 for depth in sorted(numsnapdepth):
499 502 base = b' lvl-%-3d : ' % depth
500 503 count = fmt % pcfmt(numsnapdepth[depth], numrevs)
501 504 pieces = [base, count]
502 505 if numsnapdepth_nad[depth]:
503 506 pieces[-1] = count = count[:-1] # drop the final '\n'
504 507 more = b' non-ancestor-bases: '
505 508 anc_count = fmt
506 509 anc_count %= pcfmt(numsnapdepth_nad[depth], numsnapdepth[depth])
507 510 pieces.append(more)
508 511 pieces.append(anc_count)
509 512 ui.write(b''.join(pieces))
510 513 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
511 514 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
512 515 ui.writenoi18n(
513 516 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
514 517 )
515 518 for depth in sorted(numsnapdepth):
516 519 ui.write(
517 520 (b' lvl-%-3d : ' % depth)
518 521 + fmt % pcfmt(snaptotal[depth], totalsize)
519 522 )
520 523 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
521 524
522 525 letters = string.ascii_letters.encode('ascii')
523 526
524 527 def fmtchunktype(chunktype):
525 528 if chunktype == b'empty':
526 529 return b' %s : ' % chunktype
527 530 elif chunktype in letters:
528 531 return b' 0x%s (%s) : ' % (nodemod.hex(chunktype), chunktype)
529 532 else:
530 533 return b' 0x%s : ' % nodemod.hex(chunktype)
531 534
532 535 ui.write(b'\n')
533 536 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
534 537 for chunktype in sorted(chunktypecounts):
535 538 ui.write(fmtchunktype(chunktype))
536 539 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
537 540 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
538 541 for chunktype in sorted(chunktypecounts):
539 542 ui.write(fmtchunktype(chunktype))
540 543 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
541 544
542 545 ui.write(b'\n')
543 546 b_total = b"%d" % full_text_total_size
544 547 p_total = []
545 548 while len(b_total) > 3:
546 549 p_total.append(b_total[-3:])
547 550 b_total = b_total[:-3]
548 551 p_total.append(b_total)
549 552 p_total.reverse()
550 553 b_total = b' '.join(p_total)
551 554
552 555 ui.write(b'\n')
553 556 ui.writenoi18n(b'total-stored-content: %s bytes\n' % b_total)
554 557 ui.write(b'\n')
555 558 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
556 559 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
557 560 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
558 561 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
559 562 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
560 563
561 564 if format > 0:
562 565 ui.write(b'\n')
563 566 ui.writenoi18n(
564 567 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
565 568 % tuple(datasize)
566 569 )
567 570 ui.writenoi18n(
568 571 b'full revision size (min/max/avg) : %d / %d / %d\n'
569 572 % tuple(fullsize)
570 573 )
571 574 ui.writenoi18n(
572 575 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
573 576 % tuple(semisize)
574 577 )
575 578 for depth in sorted(snapsizedepth):
576 579 if depth == 0:
577 580 continue
578 581 ui.writenoi18n(
579 582 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
580 583 % ((depth,) + tuple(snapsizedepth[depth]))
581 584 )
582 585 ui.writenoi18n(
583 586 b'delta size (min/max/avg) : %d / %d / %d\n'
584 587 % tuple(deltasize)
585 588 )
586 589
587 590 if numdeltas > 0:
588 591 ui.write(b'\n')
589 592 fmt = pcfmtstr(numdeltas)
590 593 fmt2 = pcfmtstr(numdeltas, 4)
591 594 ui.writenoi18n(
592 595 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
593 596 )
594 597 if numprev > 0:
595 598 ui.writenoi18n(
596 599 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
597 600 )
598 601 ui.writenoi18n(
599 602 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
600 603 )
601 604 ui.writenoi18n(
602 605 b' other-ancestor : ' + fmt2 % pcfmt(numoprev, numprev)
603 606 )
604 607 ui.writenoi18n(
605 608 b' unrelated : ' + fmt2 % pcfmt(numoprev, numprev)
606 609 )
607 610 if gdelta:
608 611 ui.writenoi18n(
609 612 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
610 613 )
611 614 ui.writenoi18n(
612 615 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
613 616 )
614 617 ui.writenoi18n(
615 618 b'deltas against ancs : '
616 619 + fmt % pcfmt(num_other_ancestors, numdeltas)
617 620 )
618 621 ui.writenoi18n(
619 622 b'deltas against other : '
620 623 + fmt % pcfmt(numother_nad, numdeltas)
621 624 )
625
626
627 def debug_delta_find(ui, revlog, rev, base_rev=nodemod.nullrev):
628 """display the search process for a delta"""
629 deltacomputer = deltautil.deltacomputer(
630 revlog,
631 write_debug=ui.write,
632 debug_search=not ui.quiet,
633 )
634
635 node = revlog.node(rev)
636 p1r, p2r = revlog.parentrevs(rev)
637 p1 = revlog.node(p1r)
638 p2 = revlog.node(p2r)
639 full_text = revlog.revision(rev)
640 btext = [full_text]
641 textlen = len(btext[0])
642 cachedelta = None
643 flags = revlog.flags(rev)
644
645 if base_rev != nodemod.nullrev:
646 base_text = revlog.revision(base_rev)
647 delta = mdiff.textdiff(base_text, full_text)
648
649 cachedelta = (base_rev, delta)
650 btext = [None]
651
652 revinfo = revlogutils.revisioninfo(
653 node,
654 p1,
655 p2,
656 btext,
657 textlen,
658 cachedelta,
659 flags,
660 )
661
662 fh = revlog._datafp()
663 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
General Comments 0
You need to be logged in to leave comments. Login now