##// END OF EJS Templates
debugcommands: add a `--paranoid` option to `debug-repair-issue-6528`...
Raphaël Gomès -
r48625:855463b5 stable
parent child Browse files
Show More
@@ -1,4915 +1,4932
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import string
26 26 import subprocess
27 27 import sys
28 28 import time
29 29
30 30 from .i18n import _
31 31 from .node import (
32 32 bin,
33 33 hex,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 wireprotov2peer,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 deltas as deltautil,
108 108 nodemap,
109 109 rewrite,
110 110 sidedata,
111 111 )
112 112
113 113 release = lockmod.release
114 114
115 115 table = {}
116 116 table.update(strip.command._table)
117 117 command = registrar.command(table)
118 118
119 119
120 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 121 def debugancestor(ui, repo, *args):
122 122 """find the ancestor revision of two revisions in a given index"""
123 123 if len(args) == 3:
124 124 index, rev1, rev2 = args
125 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 126 lookup = r.lookup
127 127 elif len(args) == 2:
128 128 if not repo:
129 129 raise error.Abort(
130 130 _(b'there is no Mercurial repository here (.hg not found)')
131 131 )
132 132 rev1, rev2 = args
133 133 r = repo.changelog
134 134 lookup = repo.lookup
135 135 else:
136 136 raise error.Abort(_(b'either two or three arguments required'))
137 137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 139
140 140
141 141 @command(b'debugantivirusrunning', [])
142 142 def debugantivirusrunning(ui, repo):
143 143 """attempt to trigger an antivirus scanner to see if one is active"""
144 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 145 f.write(
146 146 util.b85decode(
147 147 # This is a base85-armored version of the EICAR test file. See
148 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 151 )
152 152 )
153 153 # Give an AV engine time to scan the file.
154 154 time.sleep(2)
155 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 156
157 157
158 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 159 def debugapplystreamclonebundle(ui, repo, fname):
160 160 """apply a stream clone bundle file"""
161 161 f = hg.openpath(ui, fname)
162 162 gen = exchange.readbundle(ui, f, fname)
163 163 gen.apply(repo)
164 164
165 165
166 166 @command(
167 167 b'debugbuilddag',
168 168 [
169 169 (
170 170 b'm',
171 171 b'mergeable-file',
172 172 None,
173 173 _(b'add single file mergeable changes'),
174 174 ),
175 175 (
176 176 b'o',
177 177 b'overwritten-file',
178 178 None,
179 179 _(b'add single file all revs overwrite'),
180 180 ),
181 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 182 ],
183 183 _(b'[OPTION]... [TEXT]'),
184 184 )
185 185 def debugbuilddag(
186 186 ui,
187 187 repo,
188 188 text=None,
189 189 mergeable_file=False,
190 190 overwritten_file=False,
191 191 new_file=False,
192 192 ):
193 193 """builds a repo with a given DAG from scratch in the current empty repo
194 194
195 195 The description of the DAG is read from stdin if not given on the
196 196 command line.
197 197
198 198 Elements:
199 199
200 200 - "+n" is a linear run of n nodes based on the current default parent
201 201 - "." is a single node based on the current default parent
202 202 - "$" resets the default parent to null (implied at the start);
203 203 otherwise the default parent is always the last node created
204 204 - "<p" sets the default parent to the backref p
205 205 - "*p" is a fork at parent p, which is a backref
206 206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
207 207 - "/p2" is a merge of the preceding node and p2
208 208 - ":tag" defines a local tag for the preceding node
209 209 - "@branch" sets the named branch for subsequent nodes
210 210 - "#...\\n" is a comment up to the end of the line
211 211
212 212 Whitespace between the above elements is ignored.
213 213
214 214 A backref is either
215 215
216 216 - a number n, which references the node curr-n, where curr is the current
217 217 node, or
218 218 - the name of a local tag you placed earlier using ":tag", or
219 219 - empty to denote the default parent.
220 220
221 221 All string valued-elements are either strictly alphanumeric, or must
222 222 be enclosed in double quotes ("..."), with "\\" as escape character.
223 223 """
224 224
225 225 if text is None:
226 226 ui.status(_(b"reading DAG from stdin\n"))
227 227 text = ui.fin.read()
228 228
229 229 cl = repo.changelog
230 230 if len(cl) > 0:
231 231 raise error.Abort(_(b'repository is not empty'))
232 232
233 233 # determine number of revs in DAG
234 234 total = 0
235 235 for type, data in dagparser.parsedag(text):
236 236 if type == b'n':
237 237 total += 1
238 238
239 239 if mergeable_file:
240 240 linesperrev = 2
241 241 # make a file with k lines per rev
242 242 initialmergedlines = [
243 243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
244 244 ]
245 245 initialmergedlines.append(b"")
246 246
247 247 tags = []
248 248 progress = ui.makeprogress(
249 249 _(b'building'), unit=_(b'revisions'), total=total
250 250 )
251 251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
252 252 at = -1
253 253 atbranch = b'default'
254 254 nodeids = []
255 255 id = 0
256 256 progress.update(id)
257 257 for type, data in dagparser.parsedag(text):
258 258 if type == b'n':
259 259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
260 260 id, ps = data
261 261
262 262 files = []
263 263 filecontent = {}
264 264
265 265 p2 = None
266 266 if mergeable_file:
267 267 fn = b"mf"
268 268 p1 = repo[ps[0]]
269 269 if len(ps) > 1:
270 270 p2 = repo[ps[1]]
271 271 pa = p1.ancestor(p2)
272 272 base, local, other = [
273 273 x[fn].data() for x in (pa, p1, p2)
274 274 ]
275 275 m3 = simplemerge.Merge3Text(base, local, other)
276 276 ml = [l.strip() for l in m3.merge_lines()]
277 277 ml.append(b"")
278 278 elif at > 0:
279 279 ml = p1[fn].data().split(b"\n")
280 280 else:
281 281 ml = initialmergedlines
282 282 ml[id * linesperrev] += b" r%i" % id
283 283 mergedtext = b"\n".join(ml)
284 284 files.append(fn)
285 285 filecontent[fn] = mergedtext
286 286
287 287 if overwritten_file:
288 288 fn = b"of"
289 289 files.append(fn)
290 290 filecontent[fn] = b"r%i\n" % id
291 291
292 292 if new_file:
293 293 fn = b"nf%i" % id
294 294 files.append(fn)
295 295 filecontent[fn] = b"r%i\n" % id
296 296 if len(ps) > 1:
297 297 if not p2:
298 298 p2 = repo[ps[1]]
299 299 for fn in p2:
300 300 if fn.startswith(b"nf"):
301 301 files.append(fn)
302 302 filecontent[fn] = p2[fn].data()
303 303
304 304 def fctxfn(repo, cx, path):
305 305 if path in filecontent:
306 306 return context.memfilectx(
307 307 repo, cx, path, filecontent[path]
308 308 )
309 309 return None
310 310
311 311 if len(ps) == 0 or ps[0] < 0:
312 312 pars = [None, None]
313 313 elif len(ps) == 1:
314 314 pars = [nodeids[ps[0]], None]
315 315 else:
316 316 pars = [nodeids[p] for p in ps]
317 317 cx = context.memctx(
318 318 repo,
319 319 pars,
320 320 b"r%i" % id,
321 321 files,
322 322 fctxfn,
323 323 date=(id, 0),
324 324 user=b"debugbuilddag",
325 325 extra={b'branch': atbranch},
326 326 )
327 327 nodeid = repo.commitctx(cx)
328 328 nodeids.append(nodeid)
329 329 at = id
330 330 elif type == b'l':
331 331 id, name = data
332 332 ui.note((b'tag %s\n' % name))
333 333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
334 334 elif type == b'a':
335 335 ui.note((b'branch %s\n' % data))
336 336 atbranch = data
337 337 progress.update(id)
338 338
339 339 if tags:
340 340 repo.vfs.write(b"localtags", b"".join(tags))
341 341
342 342
343 343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
344 344 indent_string = b' ' * indent
345 345 if all:
346 346 ui.writenoi18n(
347 347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
348 348 % indent_string
349 349 )
350 350
351 351 def showchunks(named):
352 352 ui.write(b"\n%s%s\n" % (indent_string, named))
353 353 for deltadata in gen.deltaiter():
354 354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
355 355 ui.write(
356 356 b"%s%s %s %s %s %s %d\n"
357 357 % (
358 358 indent_string,
359 359 hex(node),
360 360 hex(p1),
361 361 hex(p2),
362 362 hex(cs),
363 363 hex(deltabase),
364 364 len(delta),
365 365 )
366 366 )
367 367
368 368 gen.changelogheader()
369 369 showchunks(b"changelog")
370 370 gen.manifestheader()
371 371 showchunks(b"manifest")
372 372 for chunkdata in iter(gen.filelogheader, {}):
373 373 fname = chunkdata[b'filename']
374 374 showchunks(fname)
375 375 else:
376 376 if isinstance(gen, bundle2.unbundle20):
377 377 raise error.Abort(_(b'use debugbundle2 for this file'))
378 378 gen.changelogheader()
379 379 for deltadata in gen.deltaiter():
380 380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
381 381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
382 382
383 383
384 384 def _debugobsmarkers(ui, part, indent=0, **opts):
385 385 """display version and markers contained in 'data'"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 data = part.read()
388 388 indent_string = b' ' * indent
389 389 try:
390 390 version, markers = obsolete._readmarkers(data)
391 391 except error.UnknownVersion as exc:
392 392 msg = b"%sunsupported version: %s (%d bytes)\n"
393 393 msg %= indent_string, exc.version, len(data)
394 394 ui.write(msg)
395 395 else:
396 396 msg = b"%sversion: %d (%d bytes)\n"
397 397 msg %= indent_string, version, len(data)
398 398 ui.write(msg)
399 399 fm = ui.formatter(b'debugobsolete', opts)
400 400 for rawmarker in sorted(markers):
401 401 m = obsutil.marker(None, rawmarker)
402 402 fm.startitem()
403 403 fm.plain(indent_string)
404 404 cmdutil.showmarker(fm, m)
405 405 fm.end()
406 406
407 407
408 408 def _debugphaseheads(ui, data, indent=0):
409 409 """display version and markers contained in 'data'"""
410 410 indent_string = b' ' * indent
411 411 headsbyphase = phases.binarydecode(data)
412 412 for phase in phases.allphases:
413 413 for head in headsbyphase[phase]:
414 414 ui.write(indent_string)
415 415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
416 416
417 417
418 418 def _quasirepr(thing):
419 419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
420 420 return b'{%s}' % (
421 421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
422 422 )
423 423 return pycompat.bytestr(repr(thing))
424 424
425 425
426 426 def _debugbundle2(ui, gen, all=None, **opts):
427 427 """lists the contents of a bundle2"""
428 428 if not isinstance(gen, bundle2.unbundle20):
429 429 raise error.Abort(_(b'not a bundle2 file'))
430 430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
431 431 parttypes = opts.get('part_type', [])
432 432 for part in gen.iterparts():
433 433 if parttypes and part.type not in parttypes:
434 434 continue
435 435 msg = b'%s -- %s (mandatory: %r)\n'
436 436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
437 437 if part.type == b'changegroup':
438 438 version = part.params.get(b'version', b'01')
439 439 cg = changegroup.getunbundler(version, part, b'UN')
440 440 if not ui.quiet:
441 441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
442 442 if part.type == b'obsmarkers':
443 443 if not ui.quiet:
444 444 _debugobsmarkers(ui, part, indent=4, **opts)
445 445 if part.type == b'phase-heads':
446 446 if not ui.quiet:
447 447 _debugphaseheads(ui, part, indent=4)
448 448
449 449
450 450 @command(
451 451 b'debugbundle',
452 452 [
453 453 (b'a', b'all', None, _(b'show all details')),
454 454 (b'', b'part-type', [], _(b'show only the named part type')),
455 455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
456 456 ],
457 457 _(b'FILE'),
458 458 norepo=True,
459 459 )
460 460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
461 461 """lists the contents of a bundle"""
462 462 with hg.openpath(ui, bundlepath) as f:
463 463 if spec:
464 464 spec = exchange.getbundlespec(ui, f)
465 465 ui.write(b'%s\n' % spec)
466 466 return
467 467
468 468 gen = exchange.readbundle(ui, f, bundlepath)
469 469 if isinstance(gen, bundle2.unbundle20):
470 470 return _debugbundle2(ui, gen, all=all, **opts)
471 471 _debugchangegroup(ui, gen, all=all, **opts)
472 472
473 473
474 474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
475 475 def debugcapabilities(ui, path, **opts):
476 476 """lists the capabilities of a remote peer"""
477 477 opts = pycompat.byteskwargs(opts)
478 478 peer = hg.peer(ui, opts, path)
479 479 try:
480 480 caps = peer.capabilities()
481 481 ui.writenoi18n(b'Main capabilities:\n')
482 482 for c in sorted(caps):
483 483 ui.write(b' %s\n' % c)
484 484 b2caps = bundle2.bundle2caps(peer)
485 485 if b2caps:
486 486 ui.writenoi18n(b'Bundle2 capabilities:\n')
487 487 for key, values in sorted(pycompat.iteritems(b2caps)):
488 488 ui.write(b' %s\n' % key)
489 489 for v in values:
490 490 ui.write(b' %s\n' % v)
491 491 finally:
492 492 peer.close()
493 493
494 494
495 495 @command(
496 496 b'debugchangedfiles',
497 497 [
498 498 (
499 499 b'',
500 500 b'compute',
501 501 False,
502 502 b"compute information instead of reading it from storage",
503 503 ),
504 504 ],
505 505 b'REV',
506 506 )
507 507 def debugchangedfiles(ui, repo, rev, **opts):
508 508 """list the stored files changes for a revision"""
509 509 ctx = scmutil.revsingle(repo, rev, None)
510 510 files = None
511 511
512 512 if opts['compute']:
513 513 files = metadata.compute_all_files_changes(ctx)
514 514 else:
515 515 sd = repo.changelog.sidedata(ctx.rev())
516 516 files_block = sd.get(sidedata.SD_FILES)
517 517 if files_block is not None:
518 518 files = metadata.decode_files_sidedata(sd)
519 519 if files is not None:
520 520 for f in sorted(files.touched):
521 521 if f in files.added:
522 522 action = b"added"
523 523 elif f in files.removed:
524 524 action = b"removed"
525 525 elif f in files.merged:
526 526 action = b"merged"
527 527 elif f in files.salvaged:
528 528 action = b"salvaged"
529 529 else:
530 530 action = b"touched"
531 531
532 532 copy_parent = b""
533 533 copy_source = b""
534 534 if f in files.copied_from_p1:
535 535 copy_parent = b"p1"
536 536 copy_source = files.copied_from_p1[f]
537 537 elif f in files.copied_from_p2:
538 538 copy_parent = b"p2"
539 539 copy_source = files.copied_from_p2[f]
540 540
541 541 data = (action, copy_parent, f, copy_source)
542 542 template = b"%-8s %2s: %s, %s;\n"
543 543 ui.write(template % data)
544 544
545 545
546 546 @command(b'debugcheckstate', [], b'')
547 547 def debugcheckstate(ui, repo):
548 548 """validate the correctness of the current dirstate"""
549 549 parent1, parent2 = repo.dirstate.parents()
550 550 m1 = repo[parent1].manifest()
551 551 m2 = repo[parent2].manifest()
552 552 errors = 0
553 553 for f in repo.dirstate:
554 554 state = repo.dirstate[f]
555 555 if state in b"nr" and f not in m1:
556 556 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
557 557 errors += 1
558 558 if state in b"a" and f in m1:
559 559 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
560 560 errors += 1
561 561 if state in b"m" and f not in m1 and f not in m2:
562 562 ui.warn(
563 563 _(b"%s in state %s, but not in either manifest\n") % (f, state)
564 564 )
565 565 errors += 1
566 566 for f in m1:
567 567 state = repo.dirstate[f]
568 568 if state not in b"nrm":
569 569 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
570 570 errors += 1
571 571 if errors:
572 572 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
573 573 raise error.Abort(errstr)
574 574
575 575
576 576 @command(
577 577 b'debugcolor',
578 578 [(b'', b'style', None, _(b'show all configured styles'))],
579 579 b'hg debugcolor',
580 580 )
581 581 def debugcolor(ui, repo, **opts):
582 582 """show available color, effects or style"""
583 583 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
584 584 if opts.get('style'):
585 585 return _debugdisplaystyle(ui)
586 586 else:
587 587 return _debugdisplaycolor(ui)
588 588
589 589
590 590 def _debugdisplaycolor(ui):
591 591 ui = ui.copy()
592 592 ui._styles.clear()
593 593 for effect in color._activeeffects(ui).keys():
594 594 ui._styles[effect] = effect
595 595 if ui._terminfoparams:
596 596 for k, v in ui.configitems(b'color'):
597 597 if k.startswith(b'color.'):
598 598 ui._styles[k] = k[6:]
599 599 elif k.startswith(b'terminfo.'):
600 600 ui._styles[k] = k[9:]
601 601 ui.write(_(b'available colors:\n'))
602 602 # sort label with a '_' after the other to group '_background' entry.
603 603 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
604 604 for colorname, label in items:
605 605 ui.write(b'%s\n' % colorname, label=label)
606 606
607 607
608 608 def _debugdisplaystyle(ui):
609 609 ui.write(_(b'available style:\n'))
610 610 if not ui._styles:
611 611 return
612 612 width = max(len(s) for s in ui._styles)
613 613 for label, effects in sorted(ui._styles.items()):
614 614 ui.write(b'%s' % label, label=label)
615 615 if effects:
616 616 # 50
617 617 ui.write(b': ')
618 618 ui.write(b' ' * (max(0, width - len(label))))
619 619 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
620 620 ui.write(b'\n')
621 621
622 622
623 623 @command(b'debugcreatestreamclonebundle', [], b'FILE')
624 624 def debugcreatestreamclonebundle(ui, repo, fname):
625 625 """create a stream clone bundle file
626 626
627 627 Stream bundles are special bundles that are essentially archives of
628 628 revlog files. They are commonly used for cloning very quickly.
629 629 """
630 630 # TODO we may want to turn this into an abort when this functionality
631 631 # is moved into `hg bundle`.
632 632 if phases.hassecret(repo):
633 633 ui.warn(
634 634 _(
635 635 b'(warning: stream clone bundle will contain secret '
636 636 b'revisions)\n'
637 637 )
638 638 )
639 639
640 640 requirements, gen = streamclone.generatebundlev1(repo)
641 641 changegroup.writechunks(ui, gen, fname)
642 642
643 643 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
644 644
645 645
646 646 @command(
647 647 b'debugdag',
648 648 [
649 649 (b't', b'tags', None, _(b'use tags as labels')),
650 650 (b'b', b'branches', None, _(b'annotate with branch names')),
651 651 (b'', b'dots', None, _(b'use dots for runs')),
652 652 (b's', b'spaces', None, _(b'separate elements by spaces')),
653 653 ],
654 654 _(b'[OPTION]... [FILE [REV]...]'),
655 655 optionalrepo=True,
656 656 )
657 657 def debugdag(ui, repo, file_=None, *revs, **opts):
658 658 """format the changelog or an index DAG as a concise textual description
659 659
660 660 If you pass a revlog index, the revlog's DAG is emitted. If you list
661 661 revision numbers, they get labeled in the output as rN.
662 662
663 663 Otherwise, the changelog DAG of the current repo is emitted.
664 664 """
665 665 spaces = opts.get('spaces')
666 666 dots = opts.get('dots')
667 667 if file_:
668 668 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
669 669 revs = {int(r) for r in revs}
670 670
671 671 def events():
672 672 for r in rlog:
673 673 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
674 674 if r in revs:
675 675 yield b'l', (r, b"r%i" % r)
676 676
677 677 elif repo:
678 678 cl = repo.changelog
679 679 tags = opts.get('tags')
680 680 branches = opts.get('branches')
681 681 if tags:
682 682 labels = {}
683 683 for l, n in repo.tags().items():
684 684 labels.setdefault(cl.rev(n), []).append(l)
685 685
686 686 def events():
687 687 b = b"default"
688 688 for r in cl:
689 689 if branches:
690 690 newb = cl.read(cl.node(r))[5][b'branch']
691 691 if newb != b:
692 692 yield b'a', newb
693 693 b = newb
694 694 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
695 695 if tags:
696 696 ls = labels.get(r)
697 697 if ls:
698 698 for l in ls:
699 699 yield b'l', (r, l)
700 700
701 701 else:
702 702 raise error.Abort(_(b'need repo for changelog dag'))
703 703
704 704 for line in dagparser.dagtextlines(
705 705 events(),
706 706 addspaces=spaces,
707 707 wraplabels=True,
708 708 wrapannotations=True,
709 709 wrapnonlinear=dots,
710 710 usedots=dots,
711 711 maxlinewidth=70,
712 712 ):
713 713 ui.write(line)
714 714 ui.write(b"\n")
715 715
716 716
717 717 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
718 718 def debugdata(ui, repo, file_, rev=None, **opts):
719 719 """dump the contents of a data file revision"""
720 720 opts = pycompat.byteskwargs(opts)
721 721 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
722 722 if rev is not None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 file_, rev = None, file_
725 725 elif rev is None:
726 726 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
727 727 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
728 728 try:
729 729 ui.write(r.rawdata(r.lookup(rev)))
730 730 except KeyError:
731 731 raise error.Abort(_(b'invalid revision identifier %s') % rev)
732 732
733 733
734 734 @command(
735 735 b'debugdate',
736 736 [(b'e', b'extended', None, _(b'try extended date formats'))],
737 737 _(b'[-e] DATE [RANGE]'),
738 738 norepo=True,
739 739 optionalrepo=True,
740 740 )
741 741 def debugdate(ui, date, range=None, **opts):
742 742 """parse and display a date"""
743 743 if opts["extended"]:
744 744 d = dateutil.parsedate(date, dateutil.extendeddateformats)
745 745 else:
746 746 d = dateutil.parsedate(date)
747 747 ui.writenoi18n(b"internal: %d %d\n" % d)
748 748 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
749 749 if range:
750 750 m = dateutil.matchdate(range)
751 751 ui.writenoi18n(b"match: %s\n" % m(d[0]))
752 752
753 753
754 754 @command(
755 755 b'debugdeltachain',
756 756 cmdutil.debugrevlogopts + cmdutil.formatteropts,
757 757 _(b'-c|-m|FILE'),
758 758 optionalrepo=True,
759 759 )
760 760 def debugdeltachain(ui, repo, file_=None, **opts):
761 761 """dump information about delta chains in a revlog
762 762
763 763 Output can be templatized. Available template keywords are:
764 764
765 765 :``rev``: revision number
766 766 :``chainid``: delta chain identifier (numbered by unique base)
767 767 :``chainlen``: delta chain length to this revision
768 768 :``prevrev``: previous revision in delta chain
769 769 :``deltatype``: role of delta / how it was computed
770 770 :``compsize``: compressed size of revision
771 771 :``uncompsize``: uncompressed size of revision
772 772 :``chainsize``: total size of compressed revisions in chain
773 773 :``chainratio``: total chain size divided by uncompressed revision size
774 774 (new delta chains typically start at ratio 2.00)
775 775 :``lindist``: linear distance from base revision in delta chain to end
776 776 of this revision
777 777 :``extradist``: total size of revisions not part of this delta chain from
778 778 base of delta chain to end of this revision; a measurement
779 779 of how much extra data we need to read/seek across to read
780 780 the delta chain for this revision
781 781 :``extraratio``: extradist divided by chainsize; another representation of
782 782 how much unrelated data is needed to load this delta chain
783 783
784 784 If the repository is configured to use the sparse read, additional keywords
785 785 are available:
786 786
787 787 :``readsize``: total size of data read from the disk for a revision
788 788 (sum of the sizes of all the blocks)
789 789 :``largestblock``: size of the largest block of data read from the disk
790 790 :``readdensity``: density of useful bytes in the data read from the disk
791 791 :``srchunks``: in how many data hunks the whole revision would be read
792 792
793 793 The sparse read can be enabled with experimental.sparse-read = True
794 794 """
795 795 opts = pycompat.byteskwargs(opts)
796 796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
797 797 index = r.index
798 798 start = r.start
799 799 length = r.length
800 800 generaldelta = r._generaldelta
801 801 withsparseread = getattr(r, '_withsparseread', False)
802 802
803 803 def revinfo(rev):
804 804 e = index[rev]
805 805 compsize = e[1]
806 806 uncompsize = e[2]
807 807 chainsize = 0
808 808
809 809 if generaldelta:
810 810 if e[3] == e[5]:
811 811 deltatype = b'p1'
812 812 elif e[3] == e[6]:
813 813 deltatype = b'p2'
814 814 elif e[3] == rev - 1:
815 815 deltatype = b'prev'
816 816 elif e[3] == rev:
817 817 deltatype = b'base'
818 818 else:
819 819 deltatype = b'other'
820 820 else:
821 821 if e[3] == rev:
822 822 deltatype = b'base'
823 823 else:
824 824 deltatype = b'prev'
825 825
826 826 chain = r._deltachain(rev)[0]
827 827 for iterrev in chain:
828 828 e = index[iterrev]
829 829 chainsize += e[1]
830 830
831 831 return compsize, uncompsize, deltatype, chain, chainsize
832 832
833 833 fm = ui.formatter(b'debugdeltachain', opts)
834 834
835 835 fm.plain(
836 836 b' rev chain# chainlen prev delta '
837 837 b'size rawsize chainsize ratio lindist extradist '
838 838 b'extraratio'
839 839 )
840 840 if withsparseread:
841 841 fm.plain(b' readsize largestblk rddensity srchunks')
842 842 fm.plain(b'\n')
843 843
844 844 chainbases = {}
845 845 for rev in r:
846 846 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
847 847 chainbase = chain[0]
848 848 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
849 849 basestart = start(chainbase)
850 850 revstart = start(rev)
851 851 lineardist = revstart + comp - basestart
852 852 extradist = lineardist - chainsize
853 853 try:
854 854 prevrev = chain[-2]
855 855 except IndexError:
856 856 prevrev = -1
857 857
858 858 if uncomp != 0:
859 859 chainratio = float(chainsize) / float(uncomp)
860 860 else:
861 861 chainratio = chainsize
862 862
863 863 if chainsize != 0:
864 864 extraratio = float(extradist) / float(chainsize)
865 865 else:
866 866 extraratio = extradist
867 867
868 868 fm.startitem()
869 869 fm.write(
870 870 b'rev chainid chainlen prevrev deltatype compsize '
871 871 b'uncompsize chainsize chainratio lindist extradist '
872 872 b'extraratio',
873 873 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
874 874 rev,
875 875 chainid,
876 876 len(chain),
877 877 prevrev,
878 878 deltatype,
879 879 comp,
880 880 uncomp,
881 881 chainsize,
882 882 chainratio,
883 883 lineardist,
884 884 extradist,
885 885 extraratio,
886 886 rev=rev,
887 887 chainid=chainid,
888 888 chainlen=len(chain),
889 889 prevrev=prevrev,
890 890 deltatype=deltatype,
891 891 compsize=comp,
892 892 uncompsize=uncomp,
893 893 chainsize=chainsize,
894 894 chainratio=chainratio,
895 895 lindist=lineardist,
896 896 extradist=extradist,
897 897 extraratio=extraratio,
898 898 )
899 899 if withsparseread:
900 900 readsize = 0
901 901 largestblock = 0
902 902 srchunks = 0
903 903
904 904 for revschunk in deltautil.slicechunk(r, chain):
905 905 srchunks += 1
906 906 blkend = start(revschunk[-1]) + length(revschunk[-1])
907 907 blksize = blkend - start(revschunk[0])
908 908
909 909 readsize += blksize
910 910 if largestblock < blksize:
911 911 largestblock = blksize
912 912
913 913 if readsize:
914 914 readdensity = float(chainsize) / float(readsize)
915 915 else:
916 916 readdensity = 1
917 917
918 918 fm.write(
919 919 b'readsize largestblock readdensity srchunks',
920 920 b' %10d %10d %9.5f %8d',
921 921 readsize,
922 922 largestblock,
923 923 readdensity,
924 924 srchunks,
925 925 readsize=readsize,
926 926 largestblock=largestblock,
927 927 readdensity=readdensity,
928 928 srchunks=srchunks,
929 929 )
930 930
931 931 fm.plain(b'\n')
932 932
933 933 fm.end()
934 934
935 935
936 936 @command(
937 937 b'debugdirstate|debugstate',
938 938 [
939 939 (
940 940 b'',
941 941 b'nodates',
942 942 None,
943 943 _(b'do not display the saved mtime (DEPRECATED)'),
944 944 ),
945 945 (b'', b'dates', True, _(b'display the saved mtime')),
946 946 (b'', b'datesort', None, _(b'sort by saved mtime')),
947 947 (
948 948 b'',
949 949 b'all',
950 950 False,
951 951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
952 952 ),
953 953 ],
954 954 _(b'[OPTION]...'),
955 955 )
956 956 def debugstate(ui, repo, **opts):
957 957 """show the contents of the current dirstate"""
958 958
959 959 nodates = not opts['dates']
960 960 if opts.get('nodates') is not None:
961 961 nodates = True
962 962 datesort = opts.get('datesort')
963 963
964 964 if datesort:
965 965 keyfunc = lambda x: (
966 966 x[1].v1_mtime(),
967 967 x[0],
968 968 ) # sort by mtime, then by filename
969 969 else:
970 970 keyfunc = None # sort by filename
971 971 if opts['all']:
972 972 entries = list(repo.dirstate._map.debug_iter())
973 973 else:
974 974 entries = list(pycompat.iteritems(repo.dirstate))
975 975 entries.sort(key=keyfunc)
976 976 for file_, ent in entries:
977 977 if ent.v1_mtime() == -1:
978 978 timestr = b'unset '
979 979 elif nodates:
980 980 timestr = b'set '
981 981 else:
982 982 timestr = time.strftime(
983 983 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
984 984 )
985 985 timestr = encoding.strtolocal(timestr)
986 986 if ent.mode & 0o20000:
987 987 mode = b'lnk'
988 988 else:
989 989 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
990 990 ui.write(
991 991 b"%c %s %10d %s%s\n"
992 992 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
993 993 )
994 994 for f in repo.dirstate.copies():
995 995 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
996 996
997 997
998 998 @command(
999 999 b'debugdirstateignorepatternshash',
1000 1000 [],
1001 1001 _(b''),
1002 1002 )
1003 1003 def debugdirstateignorepatternshash(ui, repo, **opts):
1004 1004 """show the hash of ignore patterns stored in dirstate if v2,
1005 1005 or nothing for dirstate-v2
1006 1006 """
1007 1007 if repo.dirstate._use_dirstate_v2:
1008 1008 docket = repo.dirstate._map.docket
1009 1009 hash_len = 20 # 160 bits for SHA-1
1010 1010 hash_bytes = docket.tree_metadata[-hash_len:]
1011 1011 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1012 1012
1013 1013
1014 1014 @command(
1015 1015 b'debugdiscovery',
1016 1016 [
1017 1017 (b'', b'old', None, _(b'use old-style discovery')),
1018 1018 (
1019 1019 b'',
1020 1020 b'nonheads',
1021 1021 None,
1022 1022 _(b'use old-style discovery with non-heads included'),
1023 1023 ),
1024 1024 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1025 1025 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1026 1026 (
1027 1027 b'',
1028 1028 b'local-as-revs',
1029 1029 b"",
1030 1030 b'treat local has having these revisions only',
1031 1031 ),
1032 1032 (
1033 1033 b'',
1034 1034 b'remote-as-revs',
1035 1035 b"",
1036 1036 b'use local as remote, with only these these revisions',
1037 1037 ),
1038 1038 ]
1039 1039 + cmdutil.remoteopts
1040 1040 + cmdutil.formatteropts,
1041 1041 _(b'[--rev REV] [OTHER]'),
1042 1042 )
1043 1043 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1044 1044 """runs the changeset discovery protocol in isolation
1045 1045
1046 1046 The local peer can be "replaced" by a subset of the local repository by
1047 1047 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1048 1048 be "replaced" by a subset of the local repository using the
1049 1049 `--local-as-revs` flag. This is useful to efficiently debug pathological
1050 1050 discovery situation.
1051 1051
1052 1052 The following developer oriented config are relevant for people playing with this command:
1053 1053
1054 1054 * devel.discovery.exchange-heads=True
1055 1055
1056 1056 If False, the discovery will not start with
1057 1057 remote head fetching and local head querying.
1058 1058
1059 1059 * devel.discovery.grow-sample=True
1060 1060
1061 1061 If False, the sample size used in set discovery will not be increased
1062 1062 through the process
1063 1063
1064 1064 * devel.discovery.grow-sample.dynamic=True
1065 1065
1066 1066 When discovery.grow-sample.dynamic is True, the default, the sample size is
1067 1067 adapted to the shape of the undecided set (it is set to the max of:
1068 1068 <target-size>, len(roots(undecided)), len(heads(undecided)
1069 1069
1070 1070 * devel.discovery.grow-sample.rate=1.05
1071 1071
1072 1072 the rate at which the sample grow
1073 1073
1074 1074 * devel.discovery.randomize=True
1075 1075
1076 1076 If andom sampling during discovery are deterministic. It is meant for
1077 1077 integration tests.
1078 1078
1079 1079 * devel.discovery.sample-size=200
1080 1080
1081 1081 Control the initial size of the discovery sample
1082 1082
1083 1083 * devel.discovery.sample-size.initial=100
1084 1084
1085 1085 Control the initial size of the discovery for initial change
1086 1086 """
1087 1087 opts = pycompat.byteskwargs(opts)
1088 1088 unfi = repo.unfiltered()
1089 1089
1090 1090 # setup potential extra filtering
1091 1091 local_revs = opts[b"local_as_revs"]
1092 1092 remote_revs = opts[b"remote_as_revs"]
1093 1093
1094 1094 # make sure tests are repeatable
1095 1095 random.seed(int(opts[b'seed']))
1096 1096
1097 1097 if not remote_revs:
1098 1098
1099 1099 remoteurl, branches = urlutil.get_unique_pull_path(
1100 1100 b'debugdiscovery', repo, ui, remoteurl
1101 1101 )
1102 1102 remote = hg.peer(repo, opts, remoteurl)
1103 1103 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1104 1104 else:
1105 1105 branches = (None, [])
1106 1106 remote_filtered_revs = scmutil.revrange(
1107 1107 unfi, [b"not (::(%s))" % remote_revs]
1108 1108 )
1109 1109 remote_filtered_revs = frozenset(remote_filtered_revs)
1110 1110
1111 1111 def remote_func(x):
1112 1112 return remote_filtered_revs
1113 1113
1114 1114 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1115 1115
1116 1116 remote = repo.peer()
1117 1117 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1118 1118
1119 1119 if local_revs:
1120 1120 local_filtered_revs = scmutil.revrange(
1121 1121 unfi, [b"not (::(%s))" % local_revs]
1122 1122 )
1123 1123 local_filtered_revs = frozenset(local_filtered_revs)
1124 1124
1125 1125 def local_func(x):
1126 1126 return local_filtered_revs
1127 1127
1128 1128 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1129 1129 repo = repo.filtered(b'debug-discovery-local-filter')
1130 1130
1131 1131 data = {}
1132 1132 if opts.get(b'old'):
1133 1133
1134 1134 def doit(pushedrevs, remoteheads, remote=remote):
1135 1135 if not util.safehasattr(remote, b'branches'):
1136 1136 # enable in-client legacy support
1137 1137 remote = localrepo.locallegacypeer(remote.local())
1138 1138 common, _in, hds = treediscovery.findcommonincoming(
1139 1139 repo, remote, force=True, audit=data
1140 1140 )
1141 1141 common = set(common)
1142 1142 if not opts.get(b'nonheads'):
1143 1143 ui.writenoi18n(
1144 1144 b"unpruned common: %s\n"
1145 1145 % b" ".join(sorted(short(n) for n in common))
1146 1146 )
1147 1147
1148 1148 clnode = repo.changelog.node
1149 1149 common = repo.revs(b'heads(::%ln)', common)
1150 1150 common = {clnode(r) for r in common}
1151 1151 return common, hds
1152 1152
1153 1153 else:
1154 1154
1155 1155 def doit(pushedrevs, remoteheads, remote=remote):
1156 1156 nodes = None
1157 1157 if pushedrevs:
1158 1158 revs = scmutil.revrange(repo, pushedrevs)
1159 1159 nodes = [repo[r].node() for r in revs]
1160 1160 common, any, hds = setdiscovery.findcommonheads(
1161 1161 ui, repo, remote, ancestorsof=nodes, audit=data
1162 1162 )
1163 1163 return common, hds
1164 1164
1165 1165 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1166 1166 localrevs = opts[b'rev']
1167 1167
1168 1168 fm = ui.formatter(b'debugdiscovery', opts)
1169 1169 if fm.strict_format:
1170 1170
1171 1171 @contextlib.contextmanager
1172 1172 def may_capture_output():
1173 1173 ui.pushbuffer()
1174 1174 yield
1175 1175 data[b'output'] = ui.popbuffer()
1176 1176
1177 1177 else:
1178 1178 may_capture_output = util.nullcontextmanager
1179 1179 with may_capture_output():
1180 1180 with util.timedcm('debug-discovery') as t:
1181 1181 common, hds = doit(localrevs, remoterevs)
1182 1182
1183 1183 # compute all statistics
1184 1184 heads_common = set(common)
1185 1185 heads_remote = set(hds)
1186 1186 heads_local = set(repo.heads())
1187 1187 # note: they cannot be a local or remote head that is in common and not
1188 1188 # itself a head of common.
1189 1189 heads_common_local = heads_common & heads_local
1190 1190 heads_common_remote = heads_common & heads_remote
1191 1191 heads_common_both = heads_common & heads_remote & heads_local
1192 1192
1193 1193 all = repo.revs(b'all()')
1194 1194 common = repo.revs(b'::%ln', common)
1195 1195 roots_common = repo.revs(b'roots(::%ld)', common)
1196 1196 missing = repo.revs(b'not ::%ld', common)
1197 1197 heads_missing = repo.revs(b'heads(%ld)', missing)
1198 1198 roots_missing = repo.revs(b'roots(%ld)', missing)
1199 1199 assert len(common) + len(missing) == len(all)
1200 1200
1201 1201 initial_undecided = repo.revs(
1202 1202 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1203 1203 )
1204 1204 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1205 1205 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1206 1206 common_initial_undecided = initial_undecided & common
1207 1207 missing_initial_undecided = initial_undecided & missing
1208 1208
1209 1209 data[b'elapsed'] = t.elapsed
1210 1210 data[b'nb-common-heads'] = len(heads_common)
1211 1211 data[b'nb-common-heads-local'] = len(heads_common_local)
1212 1212 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1213 1213 data[b'nb-common-heads-both'] = len(heads_common_both)
1214 1214 data[b'nb-common-roots'] = len(roots_common)
1215 1215 data[b'nb-head-local'] = len(heads_local)
1216 1216 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1217 1217 data[b'nb-head-remote'] = len(heads_remote)
1218 1218 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1219 1219 heads_common_remote
1220 1220 )
1221 1221 data[b'nb-revs'] = len(all)
1222 1222 data[b'nb-revs-common'] = len(common)
1223 1223 data[b'nb-revs-missing'] = len(missing)
1224 1224 data[b'nb-missing-heads'] = len(heads_missing)
1225 1225 data[b'nb-missing-roots'] = len(roots_missing)
1226 1226 data[b'nb-ini_und'] = len(initial_undecided)
1227 1227 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1228 1228 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1229 1229 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1230 1230 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1231 1231
1232 1232 fm.startitem()
1233 1233 fm.data(**pycompat.strkwargs(data))
1234 1234 # display discovery summary
1235 1235 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1236 1236 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1237 1237 fm.plain(b"heads summary:\n")
1238 1238 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1239 1239 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1240 1240 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1241 1241 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1242 1242 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1243 1243 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1244 1244 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1245 1245 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1246 1246 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1247 1247 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1248 1248 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1249 1249 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1250 1250 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1251 1251 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1252 1252 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1253 1253 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1254 1254 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1255 1255 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1256 1256 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1257 1257 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1258 1258 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1259 1259 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1260 1260
1261 1261 if ui.verbose:
1262 1262 fm.plain(
1263 1263 b"common heads: %s\n"
1264 1264 % b" ".join(sorted(short(n) for n in heads_common))
1265 1265 )
1266 1266 fm.end()
1267 1267
1268 1268
1269 1269 _chunksize = 4 << 10
1270 1270
1271 1271
1272 1272 @command(
1273 1273 b'debugdownload',
1274 1274 [
1275 1275 (b'o', b'output', b'', _(b'path')),
1276 1276 ],
1277 1277 optionalrepo=True,
1278 1278 )
1279 1279 def debugdownload(ui, repo, url, output=None, **opts):
1280 1280 """download a resource using Mercurial logic and config"""
1281 1281 fh = urlmod.open(ui, url, output)
1282 1282
1283 1283 dest = ui
1284 1284 if output:
1285 1285 dest = open(output, b"wb", _chunksize)
1286 1286 try:
1287 1287 data = fh.read(_chunksize)
1288 1288 while data:
1289 1289 dest.write(data)
1290 1290 data = fh.read(_chunksize)
1291 1291 finally:
1292 1292 if output:
1293 1293 dest.close()
1294 1294
1295 1295
1296 1296 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1297 1297 def debugextensions(ui, repo, **opts):
1298 1298 '''show information about active extensions'''
1299 1299 opts = pycompat.byteskwargs(opts)
1300 1300 exts = extensions.extensions(ui)
1301 1301 hgver = util.version()
1302 1302 fm = ui.formatter(b'debugextensions', opts)
1303 1303 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1304 1304 isinternal = extensions.ismoduleinternal(extmod)
1305 1305 extsource = None
1306 1306
1307 1307 if util.safehasattr(extmod, '__file__'):
1308 1308 extsource = pycompat.fsencode(extmod.__file__)
1309 1309 elif getattr(sys, 'oxidized', False):
1310 1310 extsource = pycompat.sysexecutable
1311 1311 if isinternal:
1312 1312 exttestedwith = [] # never expose magic string to users
1313 1313 else:
1314 1314 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1315 1315 extbuglink = getattr(extmod, 'buglink', None)
1316 1316
1317 1317 fm.startitem()
1318 1318
1319 1319 if ui.quiet or ui.verbose:
1320 1320 fm.write(b'name', b'%s\n', extname)
1321 1321 else:
1322 1322 fm.write(b'name', b'%s', extname)
1323 1323 if isinternal or hgver in exttestedwith:
1324 1324 fm.plain(b'\n')
1325 1325 elif not exttestedwith:
1326 1326 fm.plain(_(b' (untested!)\n'))
1327 1327 else:
1328 1328 lasttestedversion = exttestedwith[-1]
1329 1329 fm.plain(b' (%s!)\n' % lasttestedversion)
1330 1330
1331 1331 fm.condwrite(
1332 1332 ui.verbose and extsource,
1333 1333 b'source',
1334 1334 _(b' location: %s\n'),
1335 1335 extsource or b"",
1336 1336 )
1337 1337
1338 1338 if ui.verbose:
1339 1339 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1340 1340 fm.data(bundled=isinternal)
1341 1341
1342 1342 fm.condwrite(
1343 1343 ui.verbose and exttestedwith,
1344 1344 b'testedwith',
1345 1345 _(b' tested with: %s\n'),
1346 1346 fm.formatlist(exttestedwith, name=b'ver'),
1347 1347 )
1348 1348
1349 1349 fm.condwrite(
1350 1350 ui.verbose and extbuglink,
1351 1351 b'buglink',
1352 1352 _(b' bug reporting: %s\n'),
1353 1353 extbuglink or b"",
1354 1354 )
1355 1355
1356 1356 fm.end()
1357 1357
1358 1358
1359 1359 @command(
1360 1360 b'debugfileset',
1361 1361 [
1362 1362 (
1363 1363 b'r',
1364 1364 b'rev',
1365 1365 b'',
1366 1366 _(b'apply the filespec on this revision'),
1367 1367 _(b'REV'),
1368 1368 ),
1369 1369 (
1370 1370 b'',
1371 1371 b'all-files',
1372 1372 False,
1373 1373 _(b'test files from all revisions and working directory'),
1374 1374 ),
1375 1375 (
1376 1376 b's',
1377 1377 b'show-matcher',
1378 1378 None,
1379 1379 _(b'print internal representation of matcher'),
1380 1380 ),
1381 1381 (
1382 1382 b'p',
1383 1383 b'show-stage',
1384 1384 [],
1385 1385 _(b'print parsed tree at the given stage'),
1386 1386 _(b'NAME'),
1387 1387 ),
1388 1388 ],
1389 1389 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1390 1390 )
1391 1391 def debugfileset(ui, repo, expr, **opts):
1392 1392 '''parse and apply a fileset specification'''
1393 1393 from . import fileset
1394 1394
1395 1395 fileset.symbols # force import of fileset so we have predicates to optimize
1396 1396 opts = pycompat.byteskwargs(opts)
1397 1397 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1398 1398
1399 1399 stages = [
1400 1400 (b'parsed', pycompat.identity),
1401 1401 (b'analyzed', filesetlang.analyze),
1402 1402 (b'optimized', filesetlang.optimize),
1403 1403 ]
1404 1404 stagenames = {n for n, f in stages}
1405 1405
1406 1406 showalways = set()
1407 1407 if ui.verbose and not opts[b'show_stage']:
1408 1408 # show parsed tree by --verbose (deprecated)
1409 1409 showalways.add(b'parsed')
1410 1410 if opts[b'show_stage'] == [b'all']:
1411 1411 showalways.update(stagenames)
1412 1412 else:
1413 1413 for n in opts[b'show_stage']:
1414 1414 if n not in stagenames:
1415 1415 raise error.Abort(_(b'invalid stage name: %s') % n)
1416 1416 showalways.update(opts[b'show_stage'])
1417 1417
1418 1418 tree = filesetlang.parse(expr)
1419 1419 for n, f in stages:
1420 1420 tree = f(tree)
1421 1421 if n in showalways:
1422 1422 if opts[b'show_stage'] or n != b'parsed':
1423 1423 ui.write(b"* %s:\n" % n)
1424 1424 ui.write(filesetlang.prettyformat(tree), b"\n")
1425 1425
1426 1426 files = set()
1427 1427 if opts[b'all_files']:
1428 1428 for r in repo:
1429 1429 c = repo[r]
1430 1430 files.update(c.files())
1431 1431 files.update(c.substate)
1432 1432 if opts[b'all_files'] or ctx.rev() is None:
1433 1433 wctx = repo[None]
1434 1434 files.update(
1435 1435 repo.dirstate.walk(
1436 1436 scmutil.matchall(repo),
1437 1437 subrepos=list(wctx.substate),
1438 1438 unknown=True,
1439 1439 ignored=True,
1440 1440 )
1441 1441 )
1442 1442 files.update(wctx.substate)
1443 1443 else:
1444 1444 files.update(ctx.files())
1445 1445 files.update(ctx.substate)
1446 1446
1447 1447 m = ctx.matchfileset(repo.getcwd(), expr)
1448 1448 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1449 1449 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1450 1450 for f in sorted(files):
1451 1451 if not m(f):
1452 1452 continue
1453 1453 ui.write(b"%s\n" % f)
1454 1454
1455 1455
1456 1456 @command(
1457 1457 b"debug-repair-issue6528",
1458 1458 [
1459 1459 (
1460 1460 b'',
1461 1461 b'to-report',
1462 1462 b'',
1463 1463 _(b'build a report of affected revisions to this file'),
1464 1464 _(b'FILE'),
1465 1465 ),
1466 1466 (
1467 1467 b'',
1468 1468 b'from-report',
1469 1469 b'',
1470 1470 _(b'repair revisions listed in this report file'),
1471 1471 _(b'FILE'),
1472 1472 ),
1473 (
1474 b'',
1475 b'paranoid',
1476 False,
1477 _(b'check that both detection methods do the same thing'),
1478 ),
1473 1479 ]
1474 1480 + cmdutil.dryrunopts,
1475 1481 )
1476 1482 def debug_repair_issue6528(ui, repo, **opts):
1477 1483 """find affected revisions and repair them. See issue6528 for more details.
1478 1484
1479 1485 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1480 1486 computation of affected revisions for a given repository across clones.
1481 1487 The report format is line-based (with empty lines ignored):
1482 1488
1483 1489 ```
1484 1490 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1485 1491 ```
1486 1492
1487 1493 There can be multiple broken revisions per filelog, they are separated by
1488 1494 a comma with no spaces. The only space is between the revision(s) and the
1489 1495 filename.
1490 1496
1491 1497 Note that this does *not* mean that this repairs future affected revisions,
1492 1498 that needs a separate fix at the exchange level that hasn't been written yet
1493 1499 (as of 5.9rc0).
1500
1501 There is a `--paranoid` flag to test that the fast implementation is correct
1502 by checking it against the slow implementation. Since this matter is quite
1503 urgent and testing every edge-case is probably quite costly, we use this
1504 method to test on large repositories as a fuzzing method of sorts.
1494 1505 """
1495 1506 cmdutil.check_incompatible_arguments(
1496 1507 opts, 'to_report', ['from_report', 'dry_run']
1497 1508 )
1498 1509 dry_run = opts.get('dry_run')
1499 1510 to_report = opts.get('to_report')
1500 1511 from_report = opts.get('from_report')
1512 paranoid = opts.get('paranoid')
1501 1513 # TODO maybe add filelog pattern and revision pattern parameters to help
1502 1514 # narrow down the search for users that know what they're looking for?
1503 1515
1504 1516 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1505 1517 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1506 1518 raise error.Abort(_(msg))
1507 1519
1508 1520 rewrite.repair_issue6528(
1509 ui, repo, dry_run=dry_run, to_report=to_report, from_report=from_report
1521 ui,
1522 repo,
1523 dry_run=dry_run,
1524 to_report=to_report,
1525 from_report=from_report,
1526 paranoid=paranoid,
1510 1527 )
1511 1528
1512 1529
1513 1530 @command(b'debugformat', [] + cmdutil.formatteropts)
1514 1531 def debugformat(ui, repo, **opts):
1515 1532 """display format information about the current repository
1516 1533
1517 1534 Use --verbose to get extra information about current config value and
1518 1535 Mercurial default."""
1519 1536 opts = pycompat.byteskwargs(opts)
1520 1537 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1521 1538 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1522 1539
1523 1540 def makeformatname(name):
1524 1541 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1525 1542
1526 1543 fm = ui.formatter(b'debugformat', opts)
1527 1544 if fm.isplain():
1528 1545
1529 1546 def formatvalue(value):
1530 1547 if util.safehasattr(value, b'startswith'):
1531 1548 return value
1532 1549 if value:
1533 1550 return b'yes'
1534 1551 else:
1535 1552 return b'no'
1536 1553
1537 1554 else:
1538 1555 formatvalue = pycompat.identity
1539 1556
1540 1557 fm.plain(b'format-variant')
1541 1558 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1542 1559 fm.plain(b' repo')
1543 1560 if ui.verbose:
1544 1561 fm.plain(b' config default')
1545 1562 fm.plain(b'\n')
1546 1563 for fv in upgrade.allformatvariant:
1547 1564 fm.startitem()
1548 1565 repovalue = fv.fromrepo(repo)
1549 1566 configvalue = fv.fromconfig(repo)
1550 1567
1551 1568 if repovalue != configvalue:
1552 1569 namelabel = b'formatvariant.name.mismatchconfig'
1553 1570 repolabel = b'formatvariant.repo.mismatchconfig'
1554 1571 elif repovalue != fv.default:
1555 1572 namelabel = b'formatvariant.name.mismatchdefault'
1556 1573 repolabel = b'formatvariant.repo.mismatchdefault'
1557 1574 else:
1558 1575 namelabel = b'formatvariant.name.uptodate'
1559 1576 repolabel = b'formatvariant.repo.uptodate'
1560 1577
1561 1578 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1562 1579 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1563 1580 if fv.default != configvalue:
1564 1581 configlabel = b'formatvariant.config.special'
1565 1582 else:
1566 1583 configlabel = b'formatvariant.config.default'
1567 1584 fm.condwrite(
1568 1585 ui.verbose,
1569 1586 b'config',
1570 1587 b' %6s',
1571 1588 formatvalue(configvalue),
1572 1589 label=configlabel,
1573 1590 )
1574 1591 fm.condwrite(
1575 1592 ui.verbose,
1576 1593 b'default',
1577 1594 b' %7s',
1578 1595 formatvalue(fv.default),
1579 1596 label=b'formatvariant.default',
1580 1597 )
1581 1598 fm.plain(b'\n')
1582 1599 fm.end()
1583 1600
1584 1601
1585 1602 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1586 1603 def debugfsinfo(ui, path=b"."):
1587 1604 """show information detected about current filesystem"""
1588 1605 ui.writenoi18n(b'path: %s\n' % path)
1589 1606 ui.writenoi18n(
1590 1607 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1591 1608 )
1592 1609 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1593 1610 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1594 1611 ui.writenoi18n(
1595 1612 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1596 1613 )
1597 1614 ui.writenoi18n(
1598 1615 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1599 1616 )
1600 1617 casesensitive = b'(unknown)'
1601 1618 try:
1602 1619 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1603 1620 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1604 1621 except OSError:
1605 1622 pass
1606 1623 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1607 1624
1608 1625
1609 1626 @command(
1610 1627 b'debuggetbundle',
1611 1628 [
1612 1629 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1613 1630 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1614 1631 (
1615 1632 b't',
1616 1633 b'type',
1617 1634 b'bzip2',
1618 1635 _(b'bundle compression type to use'),
1619 1636 _(b'TYPE'),
1620 1637 ),
1621 1638 ],
1622 1639 _(b'REPO FILE [-H|-C ID]...'),
1623 1640 norepo=True,
1624 1641 )
1625 1642 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1626 1643 """retrieves a bundle from a repo
1627 1644
1628 1645 Every ID must be a full-length hex node id string. Saves the bundle to the
1629 1646 given file.
1630 1647 """
1631 1648 opts = pycompat.byteskwargs(opts)
1632 1649 repo = hg.peer(ui, opts, repopath)
1633 1650 if not repo.capable(b'getbundle'):
1634 1651 raise error.Abort(b"getbundle() not supported by target repository")
1635 1652 args = {}
1636 1653 if common:
1637 1654 args['common'] = [bin(s) for s in common]
1638 1655 if head:
1639 1656 args['heads'] = [bin(s) for s in head]
1640 1657 # TODO: get desired bundlecaps from command line.
1641 1658 args['bundlecaps'] = None
1642 1659 bundle = repo.getbundle(b'debug', **args)
1643 1660
1644 1661 bundletype = opts.get(b'type', b'bzip2').lower()
1645 1662 btypes = {
1646 1663 b'none': b'HG10UN',
1647 1664 b'bzip2': b'HG10BZ',
1648 1665 b'gzip': b'HG10GZ',
1649 1666 b'bundle2': b'HG20',
1650 1667 }
1651 1668 bundletype = btypes.get(bundletype)
1652 1669 if bundletype not in bundle2.bundletypes:
1653 1670 raise error.Abort(_(b'unknown bundle type specified with --type'))
1654 1671 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1655 1672
1656 1673
1657 1674 @command(b'debugignore', [], b'[FILE]')
1658 1675 def debugignore(ui, repo, *files, **opts):
1659 1676 """display the combined ignore pattern and information about ignored files
1660 1677
1661 1678 With no argument display the combined ignore pattern.
1662 1679
1663 1680 Given space separated file names, shows if the given file is ignored and
1664 1681 if so, show the ignore rule (file and line number) that matched it.
1665 1682 """
1666 1683 ignore = repo.dirstate._ignore
1667 1684 if not files:
1668 1685 # Show all the patterns
1669 1686 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1670 1687 else:
1671 1688 m = scmutil.match(repo[None], pats=files)
1672 1689 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1673 1690 for f in m.files():
1674 1691 nf = util.normpath(f)
1675 1692 ignored = None
1676 1693 ignoredata = None
1677 1694 if nf != b'.':
1678 1695 if ignore(nf):
1679 1696 ignored = nf
1680 1697 ignoredata = repo.dirstate._ignorefileandline(nf)
1681 1698 else:
1682 1699 for p in pathutil.finddirs(nf):
1683 1700 if ignore(p):
1684 1701 ignored = p
1685 1702 ignoredata = repo.dirstate._ignorefileandline(p)
1686 1703 break
1687 1704 if ignored:
1688 1705 if ignored == nf:
1689 1706 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1690 1707 else:
1691 1708 ui.write(
1692 1709 _(
1693 1710 b"%s is ignored because of "
1694 1711 b"containing directory %s\n"
1695 1712 )
1696 1713 % (uipathfn(f), ignored)
1697 1714 )
1698 1715 ignorefile, lineno, line = ignoredata
1699 1716 ui.write(
1700 1717 _(b"(ignore rule in %s, line %d: '%s')\n")
1701 1718 % (ignorefile, lineno, line)
1702 1719 )
1703 1720 else:
1704 1721 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1705 1722
1706 1723
1707 1724 @command(
1708 1725 b'debugindex',
1709 1726 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1710 1727 _(b'-c|-m|FILE'),
1711 1728 )
1712 1729 def debugindex(ui, repo, file_=None, **opts):
1713 1730 """dump index data for a storage primitive"""
1714 1731 opts = pycompat.byteskwargs(opts)
1715 1732 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1716 1733
1717 1734 if ui.debugflag:
1718 1735 shortfn = hex
1719 1736 else:
1720 1737 shortfn = short
1721 1738
1722 1739 idlen = 12
1723 1740 for i in store:
1724 1741 idlen = len(shortfn(store.node(i)))
1725 1742 break
1726 1743
1727 1744 fm = ui.formatter(b'debugindex', opts)
1728 1745 fm.plain(
1729 1746 b' rev linkrev %s %s p2\n'
1730 1747 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1731 1748 )
1732 1749
1733 1750 for rev in store:
1734 1751 node = store.node(rev)
1735 1752 parents = store.parents(node)
1736 1753
1737 1754 fm.startitem()
1738 1755 fm.write(b'rev', b'%6d ', rev)
1739 1756 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1740 1757 fm.write(b'node', b'%s ', shortfn(node))
1741 1758 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1742 1759 fm.write(b'p2', b'%s', shortfn(parents[1]))
1743 1760 fm.plain(b'\n')
1744 1761
1745 1762 fm.end()
1746 1763
1747 1764
1748 1765 @command(
1749 1766 b'debugindexdot',
1750 1767 cmdutil.debugrevlogopts,
1751 1768 _(b'-c|-m|FILE'),
1752 1769 optionalrepo=True,
1753 1770 )
1754 1771 def debugindexdot(ui, repo, file_=None, **opts):
1755 1772 """dump an index DAG as a graphviz dot file"""
1756 1773 opts = pycompat.byteskwargs(opts)
1757 1774 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1758 1775 ui.writenoi18n(b"digraph G {\n")
1759 1776 for i in r:
1760 1777 node = r.node(i)
1761 1778 pp = r.parents(node)
1762 1779 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1763 1780 if pp[1] != repo.nullid:
1764 1781 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1765 1782 ui.write(b"}\n")
1766 1783
1767 1784
1768 1785 @command(b'debugindexstats', [])
1769 1786 def debugindexstats(ui, repo):
1770 1787 """show stats related to the changelog index"""
1771 1788 repo.changelog.shortest(repo.nullid, 1)
1772 1789 index = repo.changelog.index
1773 1790 if not util.safehasattr(index, b'stats'):
1774 1791 raise error.Abort(_(b'debugindexstats only works with native code'))
1775 1792 for k, v in sorted(index.stats().items()):
1776 1793 ui.write(b'%s: %d\n' % (k, v))
1777 1794
1778 1795
1779 1796 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1780 1797 def debuginstall(ui, **opts):
1781 1798 """test Mercurial installation
1782 1799
1783 1800 Returns 0 on success.
1784 1801 """
1785 1802 opts = pycompat.byteskwargs(opts)
1786 1803
1787 1804 problems = 0
1788 1805
1789 1806 fm = ui.formatter(b'debuginstall', opts)
1790 1807 fm.startitem()
1791 1808
1792 1809 # encoding might be unknown or wrong. don't translate these messages.
1793 1810 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1794 1811 err = None
1795 1812 try:
1796 1813 codecs.lookup(pycompat.sysstr(encoding.encoding))
1797 1814 except LookupError as inst:
1798 1815 err = stringutil.forcebytestr(inst)
1799 1816 problems += 1
1800 1817 fm.condwrite(
1801 1818 err,
1802 1819 b'encodingerror',
1803 1820 b" %s\n (check that your locale is properly set)\n",
1804 1821 err,
1805 1822 )
1806 1823
1807 1824 # Python
1808 1825 pythonlib = None
1809 1826 if util.safehasattr(os, '__file__'):
1810 1827 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1811 1828 elif getattr(sys, 'oxidized', False):
1812 1829 pythonlib = pycompat.sysexecutable
1813 1830
1814 1831 fm.write(
1815 1832 b'pythonexe',
1816 1833 _(b"checking Python executable (%s)\n"),
1817 1834 pycompat.sysexecutable or _(b"unknown"),
1818 1835 )
1819 1836 fm.write(
1820 1837 b'pythonimplementation',
1821 1838 _(b"checking Python implementation (%s)\n"),
1822 1839 pycompat.sysbytes(platform.python_implementation()),
1823 1840 )
1824 1841 fm.write(
1825 1842 b'pythonver',
1826 1843 _(b"checking Python version (%s)\n"),
1827 1844 (b"%d.%d.%d" % sys.version_info[:3]),
1828 1845 )
1829 1846 fm.write(
1830 1847 b'pythonlib',
1831 1848 _(b"checking Python lib (%s)...\n"),
1832 1849 pythonlib or _(b"unknown"),
1833 1850 )
1834 1851
1835 1852 try:
1836 1853 from . import rustext # pytype: disable=import-error
1837 1854
1838 1855 rustext.__doc__ # trigger lazy import
1839 1856 except ImportError:
1840 1857 rustext = None
1841 1858
1842 1859 security = set(sslutil.supportedprotocols)
1843 1860 if sslutil.hassni:
1844 1861 security.add(b'sni')
1845 1862
1846 1863 fm.write(
1847 1864 b'pythonsecurity',
1848 1865 _(b"checking Python security support (%s)\n"),
1849 1866 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1850 1867 )
1851 1868
1852 1869 # These are warnings, not errors. So don't increment problem count. This
1853 1870 # may change in the future.
1854 1871 if b'tls1.2' not in security:
1855 1872 fm.plain(
1856 1873 _(
1857 1874 b' TLS 1.2 not supported by Python install; '
1858 1875 b'network connections lack modern security\n'
1859 1876 )
1860 1877 )
1861 1878 if b'sni' not in security:
1862 1879 fm.plain(
1863 1880 _(
1864 1881 b' SNI not supported by Python install; may have '
1865 1882 b'connectivity issues with some servers\n'
1866 1883 )
1867 1884 )
1868 1885
1869 1886 fm.plain(
1870 1887 _(
1871 1888 b"checking Rust extensions (%s)\n"
1872 1889 % (b'missing' if rustext is None else b'installed')
1873 1890 ),
1874 1891 )
1875 1892
1876 1893 # TODO print CA cert info
1877 1894
1878 1895 # hg version
1879 1896 hgver = util.version()
1880 1897 fm.write(
1881 1898 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1882 1899 )
1883 1900 fm.write(
1884 1901 b'hgverextra',
1885 1902 _(b"checking Mercurial custom build (%s)\n"),
1886 1903 b'+'.join(hgver.split(b'+')[1:]),
1887 1904 )
1888 1905
1889 1906 # compiled modules
1890 1907 hgmodules = None
1891 1908 if util.safehasattr(sys.modules[__name__], '__file__'):
1892 1909 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1893 1910 elif getattr(sys, 'oxidized', False):
1894 1911 hgmodules = pycompat.sysexecutable
1895 1912
1896 1913 fm.write(
1897 1914 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1898 1915 )
1899 1916 fm.write(
1900 1917 b'hgmodules',
1901 1918 _(b"checking installed modules (%s)...\n"),
1902 1919 hgmodules or _(b"unknown"),
1903 1920 )
1904 1921
1905 1922 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1906 1923 rustext = rustandc # for now, that's the only case
1907 1924 cext = policy.policy in (b'c', b'allow') or rustandc
1908 1925 nopure = cext or rustext
1909 1926 if nopure:
1910 1927 err = None
1911 1928 try:
1912 1929 if cext:
1913 1930 from .cext import ( # pytype: disable=import-error
1914 1931 base85,
1915 1932 bdiff,
1916 1933 mpatch,
1917 1934 osutil,
1918 1935 )
1919 1936
1920 1937 # quiet pyflakes
1921 1938 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1922 1939 if rustext:
1923 1940 from .rustext import ( # pytype: disable=import-error
1924 1941 ancestor,
1925 1942 dirstate,
1926 1943 )
1927 1944
1928 1945 dir(ancestor), dir(dirstate) # quiet pyflakes
1929 1946 except Exception as inst:
1930 1947 err = stringutil.forcebytestr(inst)
1931 1948 problems += 1
1932 1949 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1933 1950
1934 1951 compengines = util.compengines._engines.values()
1935 1952 fm.write(
1936 1953 b'compengines',
1937 1954 _(b'checking registered compression engines (%s)\n'),
1938 1955 fm.formatlist(
1939 1956 sorted(e.name() for e in compengines),
1940 1957 name=b'compengine',
1941 1958 fmt=b'%s',
1942 1959 sep=b', ',
1943 1960 ),
1944 1961 )
1945 1962 fm.write(
1946 1963 b'compenginesavail',
1947 1964 _(b'checking available compression engines (%s)\n'),
1948 1965 fm.formatlist(
1949 1966 sorted(e.name() for e in compengines if e.available()),
1950 1967 name=b'compengine',
1951 1968 fmt=b'%s',
1952 1969 sep=b', ',
1953 1970 ),
1954 1971 )
1955 1972 wirecompengines = compression.compengines.supportedwireengines(
1956 1973 compression.SERVERROLE
1957 1974 )
1958 1975 fm.write(
1959 1976 b'compenginesserver',
1960 1977 _(
1961 1978 b'checking available compression engines '
1962 1979 b'for wire protocol (%s)\n'
1963 1980 ),
1964 1981 fm.formatlist(
1965 1982 [e.name() for e in wirecompengines if e.wireprotosupport()],
1966 1983 name=b'compengine',
1967 1984 fmt=b'%s',
1968 1985 sep=b', ',
1969 1986 ),
1970 1987 )
1971 1988 re2 = b'missing'
1972 1989 if util._re2:
1973 1990 re2 = b'available'
1974 1991 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1975 1992 fm.data(re2=bool(util._re2))
1976 1993
1977 1994 # templates
1978 1995 p = templater.templatedir()
1979 1996 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1980 1997 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1981 1998 if p:
1982 1999 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1983 2000 if m:
1984 2001 # template found, check if it is working
1985 2002 err = None
1986 2003 try:
1987 2004 templater.templater.frommapfile(m)
1988 2005 except Exception as inst:
1989 2006 err = stringutil.forcebytestr(inst)
1990 2007 p = None
1991 2008 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1992 2009 else:
1993 2010 p = None
1994 2011 fm.condwrite(
1995 2012 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1996 2013 )
1997 2014 fm.condwrite(
1998 2015 not m,
1999 2016 b'defaulttemplatenotfound',
2000 2017 _(b" template '%s' not found\n"),
2001 2018 b"default",
2002 2019 )
2003 2020 if not p:
2004 2021 problems += 1
2005 2022 fm.condwrite(
2006 2023 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2007 2024 )
2008 2025
2009 2026 # editor
2010 2027 editor = ui.geteditor()
2011 2028 editor = util.expandpath(editor)
2012 2029 editorbin = procutil.shellsplit(editor)[0]
2013 2030 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2014 2031 cmdpath = procutil.findexe(editorbin)
2015 2032 fm.condwrite(
2016 2033 not cmdpath and editor == b'vi',
2017 2034 b'vinotfound',
2018 2035 _(
2019 2036 b" No commit editor set and can't find %s in PATH\n"
2020 2037 b" (specify a commit editor in your configuration"
2021 2038 b" file)\n"
2022 2039 ),
2023 2040 not cmdpath and editor == b'vi' and editorbin,
2024 2041 )
2025 2042 fm.condwrite(
2026 2043 not cmdpath and editor != b'vi',
2027 2044 b'editornotfound',
2028 2045 _(
2029 2046 b" Can't find editor '%s' in PATH\n"
2030 2047 b" (specify a commit editor in your configuration"
2031 2048 b" file)\n"
2032 2049 ),
2033 2050 not cmdpath and editorbin,
2034 2051 )
2035 2052 if not cmdpath and editor != b'vi':
2036 2053 problems += 1
2037 2054
2038 2055 # check username
2039 2056 username = None
2040 2057 err = None
2041 2058 try:
2042 2059 username = ui.username()
2043 2060 except error.Abort as e:
2044 2061 err = e.message
2045 2062 problems += 1
2046 2063
2047 2064 fm.condwrite(
2048 2065 username, b'username', _(b"checking username (%s)\n"), username
2049 2066 )
2050 2067 fm.condwrite(
2051 2068 err,
2052 2069 b'usernameerror',
2053 2070 _(
2054 2071 b"checking username...\n %s\n"
2055 2072 b" (specify a username in your configuration file)\n"
2056 2073 ),
2057 2074 err,
2058 2075 )
2059 2076
2060 2077 for name, mod in extensions.extensions():
2061 2078 handler = getattr(mod, 'debuginstall', None)
2062 2079 if handler is not None:
2063 2080 problems += handler(ui, fm)
2064 2081
2065 2082 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2066 2083 if not problems:
2067 2084 fm.data(problems=problems)
2068 2085 fm.condwrite(
2069 2086 problems,
2070 2087 b'problems',
2071 2088 _(b"%d problems detected, please check your install!\n"),
2072 2089 problems,
2073 2090 )
2074 2091 fm.end()
2075 2092
2076 2093 return problems
2077 2094
2078 2095
2079 2096 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2080 2097 def debugknown(ui, repopath, *ids, **opts):
2081 2098 """test whether node ids are known to a repo
2082 2099
2083 2100 Every ID must be a full-length hex node id string. Returns a list of 0s
2084 2101 and 1s indicating unknown/known.
2085 2102 """
2086 2103 opts = pycompat.byteskwargs(opts)
2087 2104 repo = hg.peer(ui, opts, repopath)
2088 2105 if not repo.capable(b'known'):
2089 2106 raise error.Abort(b"known() not supported by target repository")
2090 2107 flags = repo.known([bin(s) for s in ids])
2091 2108 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2092 2109
2093 2110
2094 2111 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2095 2112 def debuglabelcomplete(ui, repo, *args):
2096 2113 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2097 2114 debugnamecomplete(ui, repo, *args)
2098 2115
2099 2116
2100 2117 @command(
2101 2118 b'debuglocks',
2102 2119 [
2103 2120 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2104 2121 (
2105 2122 b'W',
2106 2123 b'force-free-wlock',
2107 2124 None,
2108 2125 _(b'free the working state lock (DANGEROUS)'),
2109 2126 ),
2110 2127 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2111 2128 (
2112 2129 b'S',
2113 2130 b'set-wlock',
2114 2131 None,
2115 2132 _(b'set the working state lock until stopped'),
2116 2133 ),
2117 2134 ],
2118 2135 _(b'[OPTION]...'),
2119 2136 )
2120 2137 def debuglocks(ui, repo, **opts):
2121 2138 """show or modify state of locks
2122 2139
2123 2140 By default, this command will show which locks are held. This
2124 2141 includes the user and process holding the lock, the amount of time
2125 2142 the lock has been held, and the machine name where the process is
2126 2143 running if it's not local.
2127 2144
2128 2145 Locks protect the integrity of Mercurial's data, so should be
2129 2146 treated with care. System crashes or other interruptions may cause
2130 2147 locks to not be properly released, though Mercurial will usually
2131 2148 detect and remove such stale locks automatically.
2132 2149
2133 2150 However, detecting stale locks may not always be possible (for
2134 2151 instance, on a shared filesystem). Removing locks may also be
2135 2152 blocked by filesystem permissions.
2136 2153
2137 2154 Setting a lock will prevent other commands from changing the data.
2138 2155 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2139 2156 The set locks are removed when the command exits.
2140 2157
2141 2158 Returns 0 if no locks are held.
2142 2159
2143 2160 """
2144 2161
2145 2162 if opts.get('force_free_lock'):
2146 2163 repo.svfs.unlink(b'lock')
2147 2164 if opts.get('force_free_wlock'):
2148 2165 repo.vfs.unlink(b'wlock')
2149 2166 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2150 2167 return 0
2151 2168
2152 2169 locks = []
2153 2170 try:
2154 2171 if opts.get('set_wlock'):
2155 2172 try:
2156 2173 locks.append(repo.wlock(False))
2157 2174 except error.LockHeld:
2158 2175 raise error.Abort(_(b'wlock is already held'))
2159 2176 if opts.get('set_lock'):
2160 2177 try:
2161 2178 locks.append(repo.lock(False))
2162 2179 except error.LockHeld:
2163 2180 raise error.Abort(_(b'lock is already held'))
2164 2181 if len(locks):
2165 2182 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2166 2183 return 0
2167 2184 finally:
2168 2185 release(*locks)
2169 2186
2170 2187 now = time.time()
2171 2188 held = 0
2172 2189
2173 2190 def report(vfs, name, method):
2174 2191 # this causes stale locks to get reaped for more accurate reporting
2175 2192 try:
2176 2193 l = method(False)
2177 2194 except error.LockHeld:
2178 2195 l = None
2179 2196
2180 2197 if l:
2181 2198 l.release()
2182 2199 else:
2183 2200 try:
2184 2201 st = vfs.lstat(name)
2185 2202 age = now - st[stat.ST_MTIME]
2186 2203 user = util.username(st.st_uid)
2187 2204 locker = vfs.readlock(name)
2188 2205 if b":" in locker:
2189 2206 host, pid = locker.split(b':')
2190 2207 if host == socket.gethostname():
2191 2208 locker = b'user %s, process %s' % (user or b'None', pid)
2192 2209 else:
2193 2210 locker = b'user %s, process %s, host %s' % (
2194 2211 user or b'None',
2195 2212 pid,
2196 2213 host,
2197 2214 )
2198 2215 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2199 2216 return 1
2200 2217 except OSError as e:
2201 2218 if e.errno != errno.ENOENT:
2202 2219 raise
2203 2220
2204 2221 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2205 2222 return 0
2206 2223
2207 2224 held += report(repo.svfs, b"lock", repo.lock)
2208 2225 held += report(repo.vfs, b"wlock", repo.wlock)
2209 2226
2210 2227 return held
2211 2228
2212 2229
2213 2230 @command(
2214 2231 b'debugmanifestfulltextcache',
2215 2232 [
2216 2233 (b'', b'clear', False, _(b'clear the cache')),
2217 2234 (
2218 2235 b'a',
2219 2236 b'add',
2220 2237 [],
2221 2238 _(b'add the given manifest nodes to the cache'),
2222 2239 _(b'NODE'),
2223 2240 ),
2224 2241 ],
2225 2242 b'',
2226 2243 )
2227 2244 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2228 2245 """show, clear or amend the contents of the manifest fulltext cache"""
2229 2246
2230 2247 def getcache():
2231 2248 r = repo.manifestlog.getstorage(b'')
2232 2249 try:
2233 2250 return r._fulltextcache
2234 2251 except AttributeError:
2235 2252 msg = _(
2236 2253 b"Current revlog implementation doesn't appear to have a "
2237 2254 b"manifest fulltext cache\n"
2238 2255 )
2239 2256 raise error.Abort(msg)
2240 2257
2241 2258 if opts.get('clear'):
2242 2259 with repo.wlock():
2243 2260 cache = getcache()
2244 2261 cache.clear(clear_persisted_data=True)
2245 2262 return
2246 2263
2247 2264 if add:
2248 2265 with repo.wlock():
2249 2266 m = repo.manifestlog
2250 2267 store = m.getstorage(b'')
2251 2268 for n in add:
2252 2269 try:
2253 2270 manifest = m[store.lookup(n)]
2254 2271 except error.LookupError as e:
2255 2272 raise error.Abort(
2256 2273 bytes(e), hint=b"Check your manifest node id"
2257 2274 )
2258 2275 manifest.read() # stores revisision in cache too
2259 2276 return
2260 2277
2261 2278 cache = getcache()
2262 2279 if not len(cache):
2263 2280 ui.write(_(b'cache empty\n'))
2264 2281 else:
2265 2282 ui.write(
2266 2283 _(
2267 2284 b'cache contains %d manifest entries, in order of most to '
2268 2285 b'least recent:\n'
2269 2286 )
2270 2287 % (len(cache),)
2271 2288 )
2272 2289 totalsize = 0
2273 2290 for nodeid in cache:
2274 2291 # Use cache.get to not update the LRU order
2275 2292 data = cache.peek(nodeid)
2276 2293 size = len(data)
2277 2294 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2278 2295 ui.write(
2279 2296 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2280 2297 )
2281 2298 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2282 2299 ui.write(
2283 2300 _(b'total cache data size %s, on-disk %s\n')
2284 2301 % (util.bytecount(totalsize), util.bytecount(ondisk))
2285 2302 )
2286 2303
2287 2304
2288 2305 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2289 2306 def debugmergestate(ui, repo, *args, **opts):
2290 2307 """print merge state
2291 2308
2292 2309 Use --verbose to print out information about whether v1 or v2 merge state
2293 2310 was chosen."""
2294 2311
2295 2312 if ui.verbose:
2296 2313 ms = mergestatemod.mergestate(repo)
2297 2314
2298 2315 # sort so that reasonable information is on top
2299 2316 v1records = ms._readrecordsv1()
2300 2317 v2records = ms._readrecordsv2()
2301 2318
2302 2319 if not v1records and not v2records:
2303 2320 pass
2304 2321 elif not v2records:
2305 2322 ui.writenoi18n(b'no version 2 merge state\n')
2306 2323 elif ms._v1v2match(v1records, v2records):
2307 2324 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2308 2325 else:
2309 2326 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2310 2327
2311 2328 opts = pycompat.byteskwargs(opts)
2312 2329 if not opts[b'template']:
2313 2330 opts[b'template'] = (
2314 2331 b'{if(commits, "", "no merge state found\n")}'
2315 2332 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2316 2333 b'{files % "file: {path} (state \\"{state}\\")\n'
2317 2334 b'{if(local_path, "'
2318 2335 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2319 2336 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2320 2337 b' other path: {other_path} (node {other_node})\n'
2321 2338 b'")}'
2322 2339 b'{if(rename_side, "'
2323 2340 b' rename side: {rename_side}\n'
2324 2341 b' renamed path: {renamed_path}\n'
2325 2342 b'")}'
2326 2343 b'{extras % " extra: {key} = {value}\n"}'
2327 2344 b'"}'
2328 2345 b'{extras % "extra: {file} ({key} = {value})\n"}'
2329 2346 )
2330 2347
2331 2348 ms = mergestatemod.mergestate.read(repo)
2332 2349
2333 2350 fm = ui.formatter(b'debugmergestate', opts)
2334 2351 fm.startitem()
2335 2352
2336 2353 fm_commits = fm.nested(b'commits')
2337 2354 if ms.active():
2338 2355 for name, node, label_index in (
2339 2356 (b'local', ms.local, 0),
2340 2357 (b'other', ms.other, 1),
2341 2358 ):
2342 2359 fm_commits.startitem()
2343 2360 fm_commits.data(name=name)
2344 2361 fm_commits.data(node=hex(node))
2345 2362 if ms._labels and len(ms._labels) > label_index:
2346 2363 fm_commits.data(label=ms._labels[label_index])
2347 2364 fm_commits.end()
2348 2365
2349 2366 fm_files = fm.nested(b'files')
2350 2367 if ms.active():
2351 2368 for f in ms:
2352 2369 fm_files.startitem()
2353 2370 fm_files.data(path=f)
2354 2371 state = ms._state[f]
2355 2372 fm_files.data(state=state[0])
2356 2373 if state[0] in (
2357 2374 mergestatemod.MERGE_RECORD_UNRESOLVED,
2358 2375 mergestatemod.MERGE_RECORD_RESOLVED,
2359 2376 ):
2360 2377 fm_files.data(local_key=state[1])
2361 2378 fm_files.data(local_path=state[2])
2362 2379 fm_files.data(ancestor_path=state[3])
2363 2380 fm_files.data(ancestor_node=state[4])
2364 2381 fm_files.data(other_path=state[5])
2365 2382 fm_files.data(other_node=state[6])
2366 2383 fm_files.data(local_flags=state[7])
2367 2384 elif state[0] in (
2368 2385 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2369 2386 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2370 2387 ):
2371 2388 fm_files.data(renamed_path=state[1])
2372 2389 fm_files.data(rename_side=state[2])
2373 2390 fm_extras = fm_files.nested(b'extras')
2374 2391 for k, v in sorted(ms.extras(f).items()):
2375 2392 fm_extras.startitem()
2376 2393 fm_extras.data(key=k)
2377 2394 fm_extras.data(value=v)
2378 2395 fm_extras.end()
2379 2396
2380 2397 fm_files.end()
2381 2398
2382 2399 fm_extras = fm.nested(b'extras')
2383 2400 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2384 2401 if f in ms:
2385 2402 # If file is in mergestate, we have already processed it's extras
2386 2403 continue
2387 2404 for k, v in pycompat.iteritems(d):
2388 2405 fm_extras.startitem()
2389 2406 fm_extras.data(file=f)
2390 2407 fm_extras.data(key=k)
2391 2408 fm_extras.data(value=v)
2392 2409 fm_extras.end()
2393 2410
2394 2411 fm.end()
2395 2412
2396 2413
2397 2414 @command(b'debugnamecomplete', [], _(b'NAME...'))
2398 2415 def debugnamecomplete(ui, repo, *args):
2399 2416 '''complete "names" - tags, open branch names, bookmark names'''
2400 2417
2401 2418 names = set()
2402 2419 # since we previously only listed open branches, we will handle that
2403 2420 # specially (after this for loop)
2404 2421 for name, ns in pycompat.iteritems(repo.names):
2405 2422 if name != b'branches':
2406 2423 names.update(ns.listnames(repo))
2407 2424 names.update(
2408 2425 tag
2409 2426 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2410 2427 if not closed
2411 2428 )
2412 2429 completions = set()
2413 2430 if not args:
2414 2431 args = [b'']
2415 2432 for a in args:
2416 2433 completions.update(n for n in names if n.startswith(a))
2417 2434 ui.write(b'\n'.join(sorted(completions)))
2418 2435 ui.write(b'\n')
2419 2436
2420 2437
2421 2438 @command(
2422 2439 b'debugnodemap',
2423 2440 [
2424 2441 (
2425 2442 b'',
2426 2443 b'dump-new',
2427 2444 False,
2428 2445 _(b'write a (new) persistent binary nodemap on stdout'),
2429 2446 ),
2430 2447 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2431 2448 (
2432 2449 b'',
2433 2450 b'check',
2434 2451 False,
2435 2452 _(b'check that the data on disk data are correct.'),
2436 2453 ),
2437 2454 (
2438 2455 b'',
2439 2456 b'metadata',
2440 2457 False,
2441 2458 _(b'display the on disk meta data for the nodemap'),
2442 2459 ),
2443 2460 ],
2444 2461 )
2445 2462 def debugnodemap(ui, repo, **opts):
2446 2463 """write and inspect on disk nodemap"""
2447 2464 if opts['dump_new']:
2448 2465 unfi = repo.unfiltered()
2449 2466 cl = unfi.changelog
2450 2467 if util.safehasattr(cl.index, "nodemap_data_all"):
2451 2468 data = cl.index.nodemap_data_all()
2452 2469 else:
2453 2470 data = nodemap.persistent_data(cl.index)
2454 2471 ui.write(data)
2455 2472 elif opts['dump_disk']:
2456 2473 unfi = repo.unfiltered()
2457 2474 cl = unfi.changelog
2458 2475 nm_data = nodemap.persisted_data(cl)
2459 2476 if nm_data is not None:
2460 2477 docket, data = nm_data
2461 2478 ui.write(data[:])
2462 2479 elif opts['check']:
2463 2480 unfi = repo.unfiltered()
2464 2481 cl = unfi.changelog
2465 2482 nm_data = nodemap.persisted_data(cl)
2466 2483 if nm_data is not None:
2467 2484 docket, data = nm_data
2468 2485 return nodemap.check_data(ui, cl.index, data)
2469 2486 elif opts['metadata']:
2470 2487 unfi = repo.unfiltered()
2471 2488 cl = unfi.changelog
2472 2489 nm_data = nodemap.persisted_data(cl)
2473 2490 if nm_data is not None:
2474 2491 docket, data = nm_data
2475 2492 ui.write((b"uid: %s\n") % docket.uid)
2476 2493 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2477 2494 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2478 2495 ui.write((b"data-length: %d\n") % docket.data_length)
2479 2496 ui.write((b"data-unused: %d\n") % docket.data_unused)
2480 2497 unused_perc = docket.data_unused * 100.0 / docket.data_length
2481 2498 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2482 2499
2483 2500
2484 2501 @command(
2485 2502 b'debugobsolete',
2486 2503 [
2487 2504 (b'', b'flags', 0, _(b'markers flag')),
2488 2505 (
2489 2506 b'',
2490 2507 b'record-parents',
2491 2508 False,
2492 2509 _(b'record parent information for the precursor'),
2493 2510 ),
2494 2511 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2495 2512 (
2496 2513 b'',
2497 2514 b'exclusive',
2498 2515 False,
2499 2516 _(b'restrict display to markers only relevant to REV'),
2500 2517 ),
2501 2518 (b'', b'index', False, _(b'display index of the marker')),
2502 2519 (b'', b'delete', [], _(b'delete markers specified by indices')),
2503 2520 ]
2504 2521 + cmdutil.commitopts2
2505 2522 + cmdutil.formatteropts,
2506 2523 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2507 2524 )
2508 2525 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2509 2526 """create arbitrary obsolete marker
2510 2527
2511 2528 With no arguments, displays the list of obsolescence markers."""
2512 2529
2513 2530 opts = pycompat.byteskwargs(opts)
2514 2531
2515 2532 def parsenodeid(s):
2516 2533 try:
2517 2534 # We do not use revsingle/revrange functions here to accept
2518 2535 # arbitrary node identifiers, possibly not present in the
2519 2536 # local repository.
2520 2537 n = bin(s)
2521 2538 if len(n) != repo.nodeconstants.nodelen:
2522 2539 raise TypeError()
2523 2540 return n
2524 2541 except TypeError:
2525 2542 raise error.InputError(
2526 2543 b'changeset references must be full hexadecimal '
2527 2544 b'node identifiers'
2528 2545 )
2529 2546
2530 2547 if opts.get(b'delete'):
2531 2548 indices = []
2532 2549 for v in opts.get(b'delete'):
2533 2550 try:
2534 2551 indices.append(int(v))
2535 2552 except ValueError:
2536 2553 raise error.InputError(
2537 2554 _(b'invalid index value: %r') % v,
2538 2555 hint=_(b'use integers for indices'),
2539 2556 )
2540 2557
2541 2558 if repo.currenttransaction():
2542 2559 raise error.Abort(
2543 2560 _(b'cannot delete obsmarkers in the middle of transaction.')
2544 2561 )
2545 2562
2546 2563 with repo.lock():
2547 2564 n = repair.deleteobsmarkers(repo.obsstore, indices)
2548 2565 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2549 2566
2550 2567 return
2551 2568
2552 2569 if precursor is not None:
2553 2570 if opts[b'rev']:
2554 2571 raise error.InputError(
2555 2572 b'cannot select revision when creating marker'
2556 2573 )
2557 2574 metadata = {}
2558 2575 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2559 2576 succs = tuple(parsenodeid(succ) for succ in successors)
2560 2577 l = repo.lock()
2561 2578 try:
2562 2579 tr = repo.transaction(b'debugobsolete')
2563 2580 try:
2564 2581 date = opts.get(b'date')
2565 2582 if date:
2566 2583 date = dateutil.parsedate(date)
2567 2584 else:
2568 2585 date = None
2569 2586 prec = parsenodeid(precursor)
2570 2587 parents = None
2571 2588 if opts[b'record_parents']:
2572 2589 if prec not in repo.unfiltered():
2573 2590 raise error.Abort(
2574 2591 b'cannot used --record-parents on '
2575 2592 b'unknown changesets'
2576 2593 )
2577 2594 parents = repo.unfiltered()[prec].parents()
2578 2595 parents = tuple(p.node() for p in parents)
2579 2596 repo.obsstore.create(
2580 2597 tr,
2581 2598 prec,
2582 2599 succs,
2583 2600 opts[b'flags'],
2584 2601 parents=parents,
2585 2602 date=date,
2586 2603 metadata=metadata,
2587 2604 ui=ui,
2588 2605 )
2589 2606 tr.close()
2590 2607 except ValueError as exc:
2591 2608 raise error.Abort(
2592 2609 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2593 2610 )
2594 2611 finally:
2595 2612 tr.release()
2596 2613 finally:
2597 2614 l.release()
2598 2615 else:
2599 2616 if opts[b'rev']:
2600 2617 revs = scmutil.revrange(repo, opts[b'rev'])
2601 2618 nodes = [repo[r].node() for r in revs]
2602 2619 markers = list(
2603 2620 obsutil.getmarkers(
2604 2621 repo, nodes=nodes, exclusive=opts[b'exclusive']
2605 2622 )
2606 2623 )
2607 2624 markers.sort(key=lambda x: x._data)
2608 2625 else:
2609 2626 markers = obsutil.getmarkers(repo)
2610 2627
2611 2628 markerstoiter = markers
2612 2629 isrelevant = lambda m: True
2613 2630 if opts.get(b'rev') and opts.get(b'index'):
2614 2631 markerstoiter = obsutil.getmarkers(repo)
2615 2632 markerset = set(markers)
2616 2633 isrelevant = lambda m: m in markerset
2617 2634
2618 2635 fm = ui.formatter(b'debugobsolete', opts)
2619 2636 for i, m in enumerate(markerstoiter):
2620 2637 if not isrelevant(m):
2621 2638 # marker can be irrelevant when we're iterating over a set
2622 2639 # of markers (markerstoiter) which is bigger than the set
2623 2640 # of markers we want to display (markers)
2624 2641 # this can happen if both --index and --rev options are
2625 2642 # provided and thus we need to iterate over all of the markers
2626 2643 # to get the correct indices, but only display the ones that
2627 2644 # are relevant to --rev value
2628 2645 continue
2629 2646 fm.startitem()
2630 2647 ind = i if opts.get(b'index') else None
2631 2648 cmdutil.showmarker(fm, m, index=ind)
2632 2649 fm.end()
2633 2650
2634 2651
2635 2652 @command(
2636 2653 b'debugp1copies',
2637 2654 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2638 2655 _(b'[-r REV]'),
2639 2656 )
2640 2657 def debugp1copies(ui, repo, **opts):
2641 2658 """dump copy information compared to p1"""
2642 2659
2643 2660 opts = pycompat.byteskwargs(opts)
2644 2661 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2645 2662 for dst, src in ctx.p1copies().items():
2646 2663 ui.write(b'%s -> %s\n' % (src, dst))
2647 2664
2648 2665
2649 2666 @command(
2650 2667 b'debugp2copies',
2651 2668 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2652 2669 _(b'[-r REV]'),
2653 2670 )
2654 2671 def debugp1copies(ui, repo, **opts):
2655 2672 """dump copy information compared to p2"""
2656 2673
2657 2674 opts = pycompat.byteskwargs(opts)
2658 2675 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2659 2676 for dst, src in ctx.p2copies().items():
2660 2677 ui.write(b'%s -> %s\n' % (src, dst))
2661 2678
2662 2679
2663 2680 @command(
2664 2681 b'debugpathcomplete',
2665 2682 [
2666 2683 (b'f', b'full', None, _(b'complete an entire path')),
2667 2684 (b'n', b'normal', None, _(b'show only normal files')),
2668 2685 (b'a', b'added', None, _(b'show only added files')),
2669 2686 (b'r', b'removed', None, _(b'show only removed files')),
2670 2687 ],
2671 2688 _(b'FILESPEC...'),
2672 2689 )
2673 2690 def debugpathcomplete(ui, repo, *specs, **opts):
2674 2691 """complete part or all of a tracked path
2675 2692
2676 2693 This command supports shells that offer path name completion. It
2677 2694 currently completes only files already known to the dirstate.
2678 2695
2679 2696 Completion extends only to the next path segment unless
2680 2697 --full is specified, in which case entire paths are used."""
2681 2698
2682 2699 def complete(path, acceptable):
2683 2700 dirstate = repo.dirstate
2684 2701 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2685 2702 rootdir = repo.root + pycompat.ossep
2686 2703 if spec != repo.root and not spec.startswith(rootdir):
2687 2704 return [], []
2688 2705 if os.path.isdir(spec):
2689 2706 spec += b'/'
2690 2707 spec = spec[len(rootdir) :]
2691 2708 fixpaths = pycompat.ossep != b'/'
2692 2709 if fixpaths:
2693 2710 spec = spec.replace(pycompat.ossep, b'/')
2694 2711 speclen = len(spec)
2695 2712 fullpaths = opts['full']
2696 2713 files, dirs = set(), set()
2697 2714 adddir, addfile = dirs.add, files.add
2698 2715 for f, st in pycompat.iteritems(dirstate):
2699 2716 if f.startswith(spec) and st.state in acceptable:
2700 2717 if fixpaths:
2701 2718 f = f.replace(b'/', pycompat.ossep)
2702 2719 if fullpaths:
2703 2720 addfile(f)
2704 2721 continue
2705 2722 s = f.find(pycompat.ossep, speclen)
2706 2723 if s >= 0:
2707 2724 adddir(f[:s])
2708 2725 else:
2709 2726 addfile(f)
2710 2727 return files, dirs
2711 2728
2712 2729 acceptable = b''
2713 2730 if opts['normal']:
2714 2731 acceptable += b'nm'
2715 2732 if opts['added']:
2716 2733 acceptable += b'a'
2717 2734 if opts['removed']:
2718 2735 acceptable += b'r'
2719 2736 cwd = repo.getcwd()
2720 2737 if not specs:
2721 2738 specs = [b'.']
2722 2739
2723 2740 files, dirs = set(), set()
2724 2741 for spec in specs:
2725 2742 f, d = complete(spec, acceptable or b'nmar')
2726 2743 files.update(f)
2727 2744 dirs.update(d)
2728 2745 files.update(dirs)
2729 2746 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2730 2747 ui.write(b'\n')
2731 2748
2732 2749
2733 2750 @command(
2734 2751 b'debugpathcopies',
2735 2752 cmdutil.walkopts,
2736 2753 b'hg debugpathcopies REV1 REV2 [FILE]',
2737 2754 inferrepo=True,
2738 2755 )
2739 2756 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2740 2757 """show copies between two revisions"""
2741 2758 ctx1 = scmutil.revsingle(repo, rev1)
2742 2759 ctx2 = scmutil.revsingle(repo, rev2)
2743 2760 m = scmutil.match(ctx1, pats, opts)
2744 2761 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2745 2762 ui.write(b'%s -> %s\n' % (src, dst))
2746 2763
2747 2764
2748 2765 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2749 2766 def debugpeer(ui, path):
2750 2767 """establish a connection to a peer repository"""
2751 2768 # Always enable peer request logging. Requires --debug to display
2752 2769 # though.
2753 2770 overrides = {
2754 2771 (b'devel', b'debug.peer-request'): True,
2755 2772 }
2756 2773
2757 2774 with ui.configoverride(overrides):
2758 2775 peer = hg.peer(ui, {}, path)
2759 2776
2760 2777 try:
2761 2778 local = peer.local() is not None
2762 2779 canpush = peer.canpush()
2763 2780
2764 2781 ui.write(_(b'url: %s\n') % peer.url())
2765 2782 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2766 2783 ui.write(
2767 2784 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2768 2785 )
2769 2786 finally:
2770 2787 peer.close()
2771 2788
2772 2789
2773 2790 @command(
2774 2791 b'debugpickmergetool',
2775 2792 [
2776 2793 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2777 2794 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2778 2795 ]
2779 2796 + cmdutil.walkopts
2780 2797 + cmdutil.mergetoolopts,
2781 2798 _(b'[PATTERN]...'),
2782 2799 inferrepo=True,
2783 2800 )
2784 2801 def debugpickmergetool(ui, repo, *pats, **opts):
2785 2802 """examine which merge tool is chosen for specified file
2786 2803
2787 2804 As described in :hg:`help merge-tools`, Mercurial examines
2788 2805 configurations below in this order to decide which merge tool is
2789 2806 chosen for specified file.
2790 2807
2791 2808 1. ``--tool`` option
2792 2809 2. ``HGMERGE`` environment variable
2793 2810 3. configurations in ``merge-patterns`` section
2794 2811 4. configuration of ``ui.merge``
2795 2812 5. configurations in ``merge-tools`` section
2796 2813 6. ``hgmerge`` tool (for historical reason only)
2797 2814 7. default tool for fallback (``:merge`` or ``:prompt``)
2798 2815
2799 2816 This command writes out examination result in the style below::
2800 2817
2801 2818 FILE = MERGETOOL
2802 2819
2803 2820 By default, all files known in the first parent context of the
2804 2821 working directory are examined. Use file patterns and/or -I/-X
2805 2822 options to limit target files. -r/--rev is also useful to examine
2806 2823 files in another context without actual updating to it.
2807 2824
2808 2825 With --debug, this command shows warning messages while matching
2809 2826 against ``merge-patterns`` and so on, too. It is recommended to
2810 2827 use this option with explicit file patterns and/or -I/-X options,
2811 2828 because this option increases amount of output per file according
2812 2829 to configurations in hgrc.
2813 2830
2814 2831 With -v/--verbose, this command shows configurations below at
2815 2832 first (only if specified).
2816 2833
2817 2834 - ``--tool`` option
2818 2835 - ``HGMERGE`` environment variable
2819 2836 - configuration of ``ui.merge``
2820 2837
2821 2838 If merge tool is chosen before matching against
2822 2839 ``merge-patterns``, this command can't show any helpful
2823 2840 information, even with --debug. In such case, information above is
2824 2841 useful to know why a merge tool is chosen.
2825 2842 """
2826 2843 opts = pycompat.byteskwargs(opts)
2827 2844 overrides = {}
2828 2845 if opts[b'tool']:
2829 2846 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2830 2847 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2831 2848
2832 2849 with ui.configoverride(overrides, b'debugmergepatterns'):
2833 2850 hgmerge = encoding.environ.get(b"HGMERGE")
2834 2851 if hgmerge is not None:
2835 2852 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2836 2853 uimerge = ui.config(b"ui", b"merge")
2837 2854 if uimerge:
2838 2855 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2839 2856
2840 2857 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2841 2858 m = scmutil.match(ctx, pats, opts)
2842 2859 changedelete = opts[b'changedelete']
2843 2860 for path in ctx.walk(m):
2844 2861 fctx = ctx[path]
2845 2862 with ui.silent(
2846 2863 error=True
2847 2864 ) if not ui.debugflag else util.nullcontextmanager():
2848 2865 tool, toolpath = filemerge._picktool(
2849 2866 repo,
2850 2867 ui,
2851 2868 path,
2852 2869 fctx.isbinary(),
2853 2870 b'l' in fctx.flags(),
2854 2871 changedelete,
2855 2872 )
2856 2873 ui.write(b'%s = %s\n' % (path, tool))
2857 2874
2858 2875
2859 2876 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2860 2877 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2861 2878 """access the pushkey key/value protocol
2862 2879
2863 2880 With two args, list the keys in the given namespace.
2864 2881
2865 2882 With five args, set a key to new if it currently is set to old.
2866 2883 Reports success or failure.
2867 2884 """
2868 2885
2869 2886 target = hg.peer(ui, {}, repopath)
2870 2887 try:
2871 2888 if keyinfo:
2872 2889 key, old, new = keyinfo
2873 2890 with target.commandexecutor() as e:
2874 2891 r = e.callcommand(
2875 2892 b'pushkey',
2876 2893 {
2877 2894 b'namespace': namespace,
2878 2895 b'key': key,
2879 2896 b'old': old,
2880 2897 b'new': new,
2881 2898 },
2882 2899 ).result()
2883 2900
2884 2901 ui.status(pycompat.bytestr(r) + b'\n')
2885 2902 return not r
2886 2903 else:
2887 2904 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2888 2905 ui.write(
2889 2906 b"%s\t%s\n"
2890 2907 % (stringutil.escapestr(k), stringutil.escapestr(v))
2891 2908 )
2892 2909 finally:
2893 2910 target.close()
2894 2911
2895 2912
2896 2913 @command(b'debugpvec', [], _(b'A B'))
2897 2914 def debugpvec(ui, repo, a, b=None):
2898 2915 ca = scmutil.revsingle(repo, a)
2899 2916 cb = scmutil.revsingle(repo, b)
2900 2917 pa = pvec.ctxpvec(ca)
2901 2918 pb = pvec.ctxpvec(cb)
2902 2919 if pa == pb:
2903 2920 rel = b"="
2904 2921 elif pa > pb:
2905 2922 rel = b">"
2906 2923 elif pa < pb:
2907 2924 rel = b"<"
2908 2925 elif pa | pb:
2909 2926 rel = b"|"
2910 2927 ui.write(_(b"a: %s\n") % pa)
2911 2928 ui.write(_(b"b: %s\n") % pb)
2912 2929 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2913 2930 ui.write(
2914 2931 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2915 2932 % (
2916 2933 abs(pa._depth - pb._depth),
2917 2934 pvec._hamming(pa._vec, pb._vec),
2918 2935 pa.distance(pb),
2919 2936 rel,
2920 2937 )
2921 2938 )
2922 2939
2923 2940
2924 2941 @command(
2925 2942 b'debugrebuilddirstate|debugrebuildstate',
2926 2943 [
2927 2944 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2928 2945 (
2929 2946 b'',
2930 2947 b'minimal',
2931 2948 None,
2932 2949 _(
2933 2950 b'only rebuild files that are inconsistent with '
2934 2951 b'the working copy parent'
2935 2952 ),
2936 2953 ),
2937 2954 ],
2938 2955 _(b'[-r REV]'),
2939 2956 )
2940 2957 def debugrebuilddirstate(ui, repo, rev, **opts):
2941 2958 """rebuild the dirstate as it would look like for the given revision
2942 2959
2943 2960 If no revision is specified the first current parent will be used.
2944 2961
2945 2962 The dirstate will be set to the files of the given revision.
2946 2963 The actual working directory content or existing dirstate
2947 2964 information such as adds or removes is not considered.
2948 2965
2949 2966 ``minimal`` will only rebuild the dirstate status for files that claim to be
2950 2967 tracked but are not in the parent manifest, or that exist in the parent
2951 2968 manifest but are not in the dirstate. It will not change adds, removes, or
2952 2969 modified files that are in the working copy parent.
2953 2970
2954 2971 One use of this command is to make the next :hg:`status` invocation
2955 2972 check the actual file content.
2956 2973 """
2957 2974 ctx = scmutil.revsingle(repo, rev)
2958 2975 with repo.wlock():
2959 2976 dirstate = repo.dirstate
2960 2977 changedfiles = None
2961 2978 # See command doc for what minimal does.
2962 2979 if opts.get('minimal'):
2963 2980 manifestfiles = set(ctx.manifest().keys())
2964 2981 dirstatefiles = set(dirstate)
2965 2982 manifestonly = manifestfiles - dirstatefiles
2966 2983 dsonly = dirstatefiles - manifestfiles
2967 2984 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2968 2985 changedfiles = manifestonly | dsnotadded
2969 2986
2970 2987 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2971 2988
2972 2989
2973 2990 @command(b'debugrebuildfncache', [], b'')
2974 2991 def debugrebuildfncache(ui, repo):
2975 2992 """rebuild the fncache file"""
2976 2993 repair.rebuildfncache(ui, repo)
2977 2994
2978 2995
2979 2996 @command(
2980 2997 b'debugrename',
2981 2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2982 2999 _(b'[-r REV] [FILE]...'),
2983 3000 )
2984 3001 def debugrename(ui, repo, *pats, **opts):
2985 3002 """dump rename information"""
2986 3003
2987 3004 opts = pycompat.byteskwargs(opts)
2988 3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2989 3006 m = scmutil.match(ctx, pats, opts)
2990 3007 for abs in ctx.walk(m):
2991 3008 fctx = ctx[abs]
2992 3009 o = fctx.filelog().renamed(fctx.filenode())
2993 3010 rel = repo.pathto(abs)
2994 3011 if o:
2995 3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2996 3013 else:
2997 3014 ui.write(_(b"%s not renamed\n") % rel)
2998 3015
2999 3016
3000 3017 @command(b'debugrequires|debugrequirements', [], b'')
3001 3018 def debugrequirements(ui, repo):
3002 3019 """print the current repo requirements"""
3003 3020 for r in sorted(repo.requirements):
3004 3021 ui.write(b"%s\n" % r)
3005 3022
3006 3023
3007 3024 @command(
3008 3025 b'debugrevlog',
3009 3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3010 3027 _(b'-c|-m|FILE'),
3011 3028 optionalrepo=True,
3012 3029 )
3013 3030 def debugrevlog(ui, repo, file_=None, **opts):
3014 3031 """show data and statistics about a revlog"""
3015 3032 opts = pycompat.byteskwargs(opts)
3016 3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3017 3034
3018 3035 if opts.get(b"dump"):
3019 3036 numrevs = len(r)
3020 3037 ui.write(
3021 3038 (
3022 3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3023 3040 b" rawsize totalsize compression heads chainlen\n"
3024 3041 )
3025 3042 )
3026 3043 ts = 0
3027 3044 heads = set()
3028 3045
3029 3046 for rev in pycompat.xrange(numrevs):
3030 3047 dbase = r.deltaparent(rev)
3031 3048 if dbase == -1:
3032 3049 dbase = rev
3033 3050 cbase = r.chainbase(rev)
3034 3051 clen = r.chainlen(rev)
3035 3052 p1, p2 = r.parentrevs(rev)
3036 3053 rs = r.rawsize(rev)
3037 3054 ts = ts + rs
3038 3055 heads -= set(r.parentrevs(rev))
3039 3056 heads.add(rev)
3040 3057 try:
3041 3058 compression = ts / r.end(rev)
3042 3059 except ZeroDivisionError:
3043 3060 compression = 0
3044 3061 ui.write(
3045 3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3046 3063 b"%11d %5d %8d\n"
3047 3064 % (
3048 3065 rev,
3049 3066 p1,
3050 3067 p2,
3051 3068 r.start(rev),
3052 3069 r.end(rev),
3053 3070 r.start(dbase),
3054 3071 r.start(cbase),
3055 3072 r.start(p1),
3056 3073 r.start(p2),
3057 3074 rs,
3058 3075 ts,
3059 3076 compression,
3060 3077 len(heads),
3061 3078 clen,
3062 3079 )
3063 3080 )
3064 3081 return 0
3065 3082
3066 3083 format = r._format_version
3067 3084 v = r._format_flags
3068 3085 flags = []
3069 3086 gdelta = False
3070 3087 if v & revlog.FLAG_INLINE_DATA:
3071 3088 flags.append(b'inline')
3072 3089 if v & revlog.FLAG_GENERALDELTA:
3073 3090 gdelta = True
3074 3091 flags.append(b'generaldelta')
3075 3092 if not flags:
3076 3093 flags = [b'(none)']
3077 3094
3078 3095 ### tracks merge vs single parent
3079 3096 nummerges = 0
3080 3097
3081 3098 ### tracks ways the "delta" are build
3082 3099 # nodelta
3083 3100 numempty = 0
3084 3101 numemptytext = 0
3085 3102 numemptydelta = 0
3086 3103 # full file content
3087 3104 numfull = 0
3088 3105 # intermediate snapshot against a prior snapshot
3089 3106 numsemi = 0
3090 3107 # snapshot count per depth
3091 3108 numsnapdepth = collections.defaultdict(lambda: 0)
3092 3109 # delta against previous revision
3093 3110 numprev = 0
3094 3111 # delta against first or second parent (not prev)
3095 3112 nump1 = 0
3096 3113 nump2 = 0
3097 3114 # delta against neither prev nor parents
3098 3115 numother = 0
3099 3116 # delta against prev that are also first or second parent
3100 3117 # (details of `numprev`)
3101 3118 nump1prev = 0
3102 3119 nump2prev = 0
3103 3120
3104 3121 # data about delta chain of each revs
3105 3122 chainlengths = []
3106 3123 chainbases = []
3107 3124 chainspans = []
3108 3125
3109 3126 # data about each revision
3110 3127 datasize = [None, 0, 0]
3111 3128 fullsize = [None, 0, 0]
3112 3129 semisize = [None, 0, 0]
3113 3130 # snapshot count per depth
3114 3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3115 3132 deltasize = [None, 0, 0]
3116 3133 chunktypecounts = {}
3117 3134 chunktypesizes = {}
3118 3135
3119 3136 def addsize(size, l):
3120 3137 if l[0] is None or size < l[0]:
3121 3138 l[0] = size
3122 3139 if size > l[1]:
3123 3140 l[1] = size
3124 3141 l[2] += size
3125 3142
3126 3143 numrevs = len(r)
3127 3144 for rev in pycompat.xrange(numrevs):
3128 3145 p1, p2 = r.parentrevs(rev)
3129 3146 delta = r.deltaparent(rev)
3130 3147 if format > 0:
3131 3148 addsize(r.rawsize(rev), datasize)
3132 3149 if p2 != nullrev:
3133 3150 nummerges += 1
3134 3151 size = r.length(rev)
3135 3152 if delta == nullrev:
3136 3153 chainlengths.append(0)
3137 3154 chainbases.append(r.start(rev))
3138 3155 chainspans.append(size)
3139 3156 if size == 0:
3140 3157 numempty += 1
3141 3158 numemptytext += 1
3142 3159 else:
3143 3160 numfull += 1
3144 3161 numsnapdepth[0] += 1
3145 3162 addsize(size, fullsize)
3146 3163 addsize(size, snapsizedepth[0])
3147 3164 else:
3148 3165 chainlengths.append(chainlengths[delta] + 1)
3149 3166 baseaddr = chainbases[delta]
3150 3167 revaddr = r.start(rev)
3151 3168 chainbases.append(baseaddr)
3152 3169 chainspans.append((revaddr - baseaddr) + size)
3153 3170 if size == 0:
3154 3171 numempty += 1
3155 3172 numemptydelta += 1
3156 3173 elif r.issnapshot(rev):
3157 3174 addsize(size, semisize)
3158 3175 numsemi += 1
3159 3176 depth = r.snapshotdepth(rev)
3160 3177 numsnapdepth[depth] += 1
3161 3178 addsize(size, snapsizedepth[depth])
3162 3179 else:
3163 3180 addsize(size, deltasize)
3164 3181 if delta == rev - 1:
3165 3182 numprev += 1
3166 3183 if delta == p1:
3167 3184 nump1prev += 1
3168 3185 elif delta == p2:
3169 3186 nump2prev += 1
3170 3187 elif delta == p1:
3171 3188 nump1 += 1
3172 3189 elif delta == p2:
3173 3190 nump2 += 1
3174 3191 elif delta != nullrev:
3175 3192 numother += 1
3176 3193
3177 3194 # Obtain data on the raw chunks in the revlog.
3178 3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3179 3196 segment = r._getsegmentforrevs(rev, rev)[1]
3180 3197 else:
3181 3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3182 3199 if segment:
3183 3200 chunktype = bytes(segment[0:1])
3184 3201 else:
3185 3202 chunktype = b'empty'
3186 3203
3187 3204 if chunktype not in chunktypecounts:
3188 3205 chunktypecounts[chunktype] = 0
3189 3206 chunktypesizes[chunktype] = 0
3190 3207
3191 3208 chunktypecounts[chunktype] += 1
3192 3209 chunktypesizes[chunktype] += size
3193 3210
3194 3211 # Adjust size min value for empty cases
3195 3212 for size in (datasize, fullsize, semisize, deltasize):
3196 3213 if size[0] is None:
3197 3214 size[0] = 0
3198 3215
3199 3216 numdeltas = numrevs - numfull - numempty - numsemi
3200 3217 numoprev = numprev - nump1prev - nump2prev
3201 3218 totalrawsize = datasize[2]
3202 3219 datasize[2] /= numrevs
3203 3220 fulltotal = fullsize[2]
3204 3221 if numfull == 0:
3205 3222 fullsize[2] = 0
3206 3223 else:
3207 3224 fullsize[2] /= numfull
3208 3225 semitotal = semisize[2]
3209 3226 snaptotal = {}
3210 3227 if numsemi > 0:
3211 3228 semisize[2] /= numsemi
3212 3229 for depth in snapsizedepth:
3213 3230 snaptotal[depth] = snapsizedepth[depth][2]
3214 3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3215 3232
3216 3233 deltatotal = deltasize[2]
3217 3234 if numdeltas > 0:
3218 3235 deltasize[2] /= numdeltas
3219 3236 totalsize = fulltotal + semitotal + deltatotal
3220 3237 avgchainlen = sum(chainlengths) / numrevs
3221 3238 maxchainlen = max(chainlengths)
3222 3239 maxchainspan = max(chainspans)
3223 3240 compratio = 1
3224 3241 if totalsize:
3225 3242 compratio = totalrawsize / totalsize
3226 3243
3227 3244 basedfmtstr = b'%%%dd\n'
3228 3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3229 3246
3230 3247 def dfmtstr(max):
3231 3248 return basedfmtstr % len(str(max))
3232 3249
3233 3250 def pcfmtstr(max, padding=0):
3234 3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3235 3252
3236 3253 def pcfmt(value, total):
3237 3254 if total:
3238 3255 return (value, 100 * float(value) / total)
3239 3256 else:
3240 3257 return value, 100.0
3241 3258
3242 3259 ui.writenoi18n(b'format : %d\n' % format)
3243 3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3244 3261
3245 3262 ui.write(b'\n')
3246 3263 fmt = pcfmtstr(totalsize)
3247 3264 fmt2 = dfmtstr(totalsize)
3248 3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3249 3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3250 3267 ui.writenoi18n(
3251 3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3252 3269 )
3253 3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3254 3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3255 3272 ui.writenoi18n(
3256 3273 b' text : '
3257 3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3258 3275 )
3259 3276 ui.writenoi18n(
3260 3277 b' delta : '
3261 3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3262 3279 )
3263 3280 ui.writenoi18n(
3264 3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3265 3282 )
3266 3283 for depth in sorted(numsnapdepth):
3267 3284 ui.write(
3268 3285 (b' lvl-%-3d : ' % depth)
3269 3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3270 3287 )
3271 3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3272 3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3273 3290 ui.writenoi18n(
3274 3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3275 3292 )
3276 3293 for depth in sorted(numsnapdepth):
3277 3294 ui.write(
3278 3295 (b' lvl-%-3d : ' % depth)
3279 3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3280 3297 )
3281 3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3282 3299
3283 3300 def fmtchunktype(chunktype):
3284 3301 if chunktype == b'empty':
3285 3302 return b' %s : ' % chunktype
3286 3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3287 3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3288 3305 else:
3289 3306 return b' 0x%s : ' % hex(chunktype)
3290 3307
3291 3308 ui.write(b'\n')
3292 3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3293 3310 for chunktype in sorted(chunktypecounts):
3294 3311 ui.write(fmtchunktype(chunktype))
3295 3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3296 3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3297 3314 for chunktype in sorted(chunktypecounts):
3298 3315 ui.write(fmtchunktype(chunktype))
3299 3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3300 3317
3301 3318 ui.write(b'\n')
3302 3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3303 3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3304 3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3305 3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3306 3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3307 3324
3308 3325 if format > 0:
3309 3326 ui.write(b'\n')
3310 3327 ui.writenoi18n(
3311 3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3312 3329 % tuple(datasize)
3313 3330 )
3314 3331 ui.writenoi18n(
3315 3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3316 3333 % tuple(fullsize)
3317 3334 )
3318 3335 ui.writenoi18n(
3319 3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3320 3337 % tuple(semisize)
3321 3338 )
3322 3339 for depth in sorted(snapsizedepth):
3323 3340 if depth == 0:
3324 3341 continue
3325 3342 ui.writenoi18n(
3326 3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3327 3344 % ((depth,) + tuple(snapsizedepth[depth]))
3328 3345 )
3329 3346 ui.writenoi18n(
3330 3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3331 3348 % tuple(deltasize)
3332 3349 )
3333 3350
3334 3351 if numdeltas > 0:
3335 3352 ui.write(b'\n')
3336 3353 fmt = pcfmtstr(numdeltas)
3337 3354 fmt2 = pcfmtstr(numdeltas, 4)
3338 3355 ui.writenoi18n(
3339 3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3340 3357 )
3341 3358 if numprev > 0:
3342 3359 ui.writenoi18n(
3343 3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3344 3361 )
3345 3362 ui.writenoi18n(
3346 3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3347 3364 )
3348 3365 ui.writenoi18n(
3349 3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3350 3367 )
3351 3368 if gdelta:
3352 3369 ui.writenoi18n(
3353 3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3354 3371 )
3355 3372 ui.writenoi18n(
3356 3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3357 3374 )
3358 3375 ui.writenoi18n(
3359 3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3360 3377 )
3361 3378
3362 3379
3363 3380 @command(
3364 3381 b'debugrevlogindex',
3365 3382 cmdutil.debugrevlogopts
3366 3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3367 3384 _(b'[-f FORMAT] -c|-m|FILE'),
3368 3385 optionalrepo=True,
3369 3386 )
3370 3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3371 3388 """dump the contents of a revlog index"""
3372 3389 opts = pycompat.byteskwargs(opts)
3373 3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3374 3391 format = opts.get(b'format', 0)
3375 3392 if format not in (0, 1):
3376 3393 raise error.Abort(_(b"unknown format %d") % format)
3377 3394
3378 3395 if ui.debugflag:
3379 3396 shortfn = hex
3380 3397 else:
3381 3398 shortfn = short
3382 3399
3383 3400 # There might not be anything in r, so have a sane default
3384 3401 idlen = 12
3385 3402 for i in r:
3386 3403 idlen = len(shortfn(r.node(i)))
3387 3404 break
3388 3405
3389 3406 if format == 0:
3390 3407 if ui.verbose:
3391 3408 ui.writenoi18n(
3392 3409 b" rev offset length linkrev %s %s p2\n"
3393 3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3394 3411 )
3395 3412 else:
3396 3413 ui.writenoi18n(
3397 3414 b" rev linkrev %s %s p2\n"
3398 3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3399 3416 )
3400 3417 elif format == 1:
3401 3418 if ui.verbose:
3402 3419 ui.writenoi18n(
3403 3420 (
3404 3421 b" rev flag offset length size link p1"
3405 3422 b" p2 %s\n"
3406 3423 )
3407 3424 % b"nodeid".rjust(idlen)
3408 3425 )
3409 3426 else:
3410 3427 ui.writenoi18n(
3411 3428 b" rev flag size link p1 p2 %s\n"
3412 3429 % b"nodeid".rjust(idlen)
3413 3430 )
3414 3431
3415 3432 for i in r:
3416 3433 node = r.node(i)
3417 3434 if format == 0:
3418 3435 try:
3419 3436 pp = r.parents(node)
3420 3437 except Exception:
3421 3438 pp = [repo.nullid, repo.nullid]
3422 3439 if ui.verbose:
3423 3440 ui.write(
3424 3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3425 3442 % (
3426 3443 i,
3427 3444 r.start(i),
3428 3445 r.length(i),
3429 3446 r.linkrev(i),
3430 3447 shortfn(node),
3431 3448 shortfn(pp[0]),
3432 3449 shortfn(pp[1]),
3433 3450 )
3434 3451 )
3435 3452 else:
3436 3453 ui.write(
3437 3454 b"% 6d % 7d %s %s %s\n"
3438 3455 % (
3439 3456 i,
3440 3457 r.linkrev(i),
3441 3458 shortfn(node),
3442 3459 shortfn(pp[0]),
3443 3460 shortfn(pp[1]),
3444 3461 )
3445 3462 )
3446 3463 elif format == 1:
3447 3464 pr = r.parentrevs(i)
3448 3465 if ui.verbose:
3449 3466 ui.write(
3450 3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3451 3468 % (
3452 3469 i,
3453 3470 r.flags(i),
3454 3471 r.start(i),
3455 3472 r.length(i),
3456 3473 r.rawsize(i),
3457 3474 r.linkrev(i),
3458 3475 pr[0],
3459 3476 pr[1],
3460 3477 shortfn(node),
3461 3478 )
3462 3479 )
3463 3480 else:
3464 3481 ui.write(
3465 3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3466 3483 % (
3467 3484 i,
3468 3485 r.flags(i),
3469 3486 r.rawsize(i),
3470 3487 r.linkrev(i),
3471 3488 pr[0],
3472 3489 pr[1],
3473 3490 shortfn(node),
3474 3491 )
3475 3492 )
3476 3493
3477 3494
3478 3495 @command(
3479 3496 b'debugrevspec',
3480 3497 [
3481 3498 (
3482 3499 b'',
3483 3500 b'optimize',
3484 3501 None,
3485 3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3486 3503 ),
3487 3504 (
3488 3505 b'',
3489 3506 b'show-revs',
3490 3507 True,
3491 3508 _(b'print list of result revisions (default)'),
3492 3509 ),
3493 3510 (
3494 3511 b's',
3495 3512 b'show-set',
3496 3513 None,
3497 3514 _(b'print internal representation of result set'),
3498 3515 ),
3499 3516 (
3500 3517 b'p',
3501 3518 b'show-stage',
3502 3519 [],
3503 3520 _(b'print parsed tree at the given stage'),
3504 3521 _(b'NAME'),
3505 3522 ),
3506 3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3507 3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3508 3525 ],
3509 3526 b'REVSPEC',
3510 3527 )
3511 3528 def debugrevspec(ui, repo, expr, **opts):
3512 3529 """parse and apply a revision specification
3513 3530
3514 3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3515 3532 Use -p all to print tree at every stage.
3516 3533
3517 3534 Use --no-show-revs option with -s or -p to print only the set
3518 3535 representation or the parsed tree respectively.
3519 3536
3520 3537 Use --verify-optimized to compare the optimized result with the unoptimized
3521 3538 one. Returns 1 if the optimized result differs.
3522 3539 """
3523 3540 opts = pycompat.byteskwargs(opts)
3524 3541 aliases = ui.configitems(b'revsetalias')
3525 3542 stages = [
3526 3543 (b'parsed', lambda tree: tree),
3527 3544 (
3528 3545 b'expanded',
3529 3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3530 3547 ),
3531 3548 (b'concatenated', revsetlang.foldconcat),
3532 3549 (b'analyzed', revsetlang.analyze),
3533 3550 (b'optimized', revsetlang.optimize),
3534 3551 ]
3535 3552 if opts[b'no_optimized']:
3536 3553 stages = stages[:-1]
3537 3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3538 3555 raise error.Abort(
3539 3556 _(b'cannot use --verify-optimized with --no-optimized')
3540 3557 )
3541 3558 stagenames = {n for n, f in stages}
3542 3559
3543 3560 showalways = set()
3544 3561 showchanged = set()
3545 3562 if ui.verbose and not opts[b'show_stage']:
3546 3563 # show parsed tree by --verbose (deprecated)
3547 3564 showalways.add(b'parsed')
3548 3565 showchanged.update([b'expanded', b'concatenated'])
3549 3566 if opts[b'optimize']:
3550 3567 showalways.add(b'optimized')
3551 3568 if opts[b'show_stage'] and opts[b'optimize']:
3552 3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3553 3570 if opts[b'show_stage'] == [b'all']:
3554 3571 showalways.update(stagenames)
3555 3572 else:
3556 3573 for n in opts[b'show_stage']:
3557 3574 if n not in stagenames:
3558 3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3559 3576 showalways.update(opts[b'show_stage'])
3560 3577
3561 3578 treebystage = {}
3562 3579 printedtree = None
3563 3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3564 3581 for n, f in stages:
3565 3582 treebystage[n] = tree = f(tree)
3566 3583 if n in showalways or (n in showchanged and tree != printedtree):
3567 3584 if opts[b'show_stage'] or n != b'parsed':
3568 3585 ui.write(b"* %s:\n" % n)
3569 3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3570 3587 printedtree = tree
3571 3588
3572 3589 if opts[b'verify_optimized']:
3573 3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3574 3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3575 3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3576 3593 ui.writenoi18n(
3577 3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3578 3595 )
3579 3596 ui.writenoi18n(
3580 3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3581 3598 )
3582 3599 arevs = list(arevs)
3583 3600 brevs = list(brevs)
3584 3601 if arevs == brevs:
3585 3602 return 0
3586 3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3587 3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3588 3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3589 3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3590 3607 if tag in ('delete', 'replace'):
3591 3608 for c in arevs[alo:ahi]:
3592 3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3593 3610 if tag in ('insert', 'replace'):
3594 3611 for c in brevs[blo:bhi]:
3595 3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3596 3613 if tag == 'equal':
3597 3614 for c in arevs[alo:ahi]:
3598 3615 ui.write(b' %d\n' % c)
3599 3616 return 1
3600 3617
3601 3618 func = revset.makematcher(tree)
3602 3619 revs = func(repo)
3603 3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3604 3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3605 3622 if not opts[b'show_revs']:
3606 3623 return
3607 3624 for c in revs:
3608 3625 ui.write(b"%d\n" % c)
3609 3626
3610 3627
3611 3628 @command(
3612 3629 b'debugserve',
3613 3630 [
3614 3631 (
3615 3632 b'',
3616 3633 b'sshstdio',
3617 3634 False,
3618 3635 _(b'run an SSH server bound to process handles'),
3619 3636 ),
3620 3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3621 3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3622 3639 ],
3623 3640 b'',
3624 3641 )
3625 3642 def debugserve(ui, repo, **opts):
3626 3643 """run a server with advanced settings
3627 3644
3628 3645 This command is similar to :hg:`serve`. It exists partially as a
3629 3646 workaround to the fact that ``hg serve --stdio`` must have specific
3630 3647 arguments for security reasons.
3631 3648 """
3632 3649 opts = pycompat.byteskwargs(opts)
3633 3650
3634 3651 if not opts[b'sshstdio']:
3635 3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3636 3653
3637 3654 logfh = None
3638 3655
3639 3656 if opts[b'logiofd'] and opts[b'logiofile']:
3640 3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3641 3658
3642 3659 if opts[b'logiofd']:
3643 3660 # Ideally we would be line buffered. But line buffering in binary
3644 3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3645 3662 # buffering could have performance impacts. But since this isn't
3646 3663 # performance critical code, it should be fine.
3647 3664 try:
3648 3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3649 3666 except OSError as e:
3650 3667 if e.errno != errno.ESPIPE:
3651 3668 raise
3652 3669 # can't seek a pipe, so `ab` mode fails on py3
3653 3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3654 3671 elif opts[b'logiofile']:
3655 3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3656 3673
3657 3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3658 3675 s.serve_forever()
3659 3676
3660 3677
3661 3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3662 3679 def debugsetparents(ui, repo, rev1, rev2=None):
3663 3680 """manually set the parents of the current working directory (DANGEROUS)
3664 3681
3665 3682 This command is not what you are looking for and should not be used. Using
3666 3683 this command will most certainly results in slight corruption of the file
3667 3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3668 3685
3669 3686 The command update the p1 and p2 field in the dirstate, and not touching
3670 3687 anything else. This useful for writing repository conversion tools, but
3671 3688 should be used with extreme care. For example, neither the working
3672 3689 directory nor the dirstate is updated, so file status may be incorrect
3673 3690 after running this command. Only used if you are one of the few people that
3674 3691 deeply unstand both conversion tools and file level histories. If you are
3675 3692 reading this help, you are not one of this people (most of them sailed west
3676 3693 from Mithlond anyway.
3677 3694
3678 3695 So one last time DO NOT USE THIS COMMAND.
3679 3696
3680 3697 Returns 0 on success.
3681 3698 """
3682 3699
3683 3700 node1 = scmutil.revsingle(repo, rev1).node()
3684 3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3685 3702
3686 3703 with repo.wlock():
3687 3704 repo.setparents(node1, node2)
3688 3705
3689 3706
3690 3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3691 3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3692 3709 """dump the side data for a cl/manifest/file revision
3693 3710
3694 3711 Use --verbose to dump the sidedata content."""
3695 3712 opts = pycompat.byteskwargs(opts)
3696 3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3697 3714 if rev is not None:
3698 3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3699 3716 file_, rev = None, file_
3700 3717 elif rev is None:
3701 3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3702 3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3703 3720 r = getattr(r, '_revlog', r)
3704 3721 try:
3705 3722 sidedata = r.sidedata(r.lookup(rev))
3706 3723 except KeyError:
3707 3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3708 3725 if sidedata:
3709 3726 sidedata = list(sidedata.items())
3710 3727 sidedata.sort()
3711 3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3712 3729 for key, value in sidedata:
3713 3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3714 3731 if ui.verbose:
3715 3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3716 3733
3717 3734
3718 3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3719 3736 def debugssl(ui, repo, source=None, **opts):
3720 3737 """test a secure connection to a server
3721 3738
3722 3739 This builds the certificate chain for the server on Windows, installing the
3723 3740 missing intermediates and trusted root via Windows Update if necessary. It
3724 3741 does nothing on other platforms.
3725 3742
3726 3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3727 3744 that server is used. See :hg:`help urls` for more information.
3728 3745
3729 3746 If the update succeeds, retry the original operation. Otherwise, the cause
3730 3747 of the SSL error is likely another issue.
3731 3748 """
3732 3749 if not pycompat.iswindows:
3733 3750 raise error.Abort(
3734 3751 _(b'certificate chain building is only possible on Windows')
3735 3752 )
3736 3753
3737 3754 if not source:
3738 3755 if not repo:
3739 3756 raise error.Abort(
3740 3757 _(
3741 3758 b"there is no Mercurial repository here, and no "
3742 3759 b"server specified"
3743 3760 )
3744 3761 )
3745 3762 source = b"default"
3746 3763
3747 3764 source, branches = urlutil.get_unique_pull_path(
3748 3765 b'debugssl', repo, ui, source
3749 3766 )
3750 3767 url = urlutil.url(source)
3751 3768
3752 3769 defaultport = {b'https': 443, b'ssh': 22}
3753 3770 if url.scheme in defaultport:
3754 3771 try:
3755 3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3756 3773 except ValueError:
3757 3774 raise error.Abort(_(b"malformed port number in URL"))
3758 3775 else:
3759 3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3760 3777
3761 3778 from . import win32
3762 3779
3763 3780 s = ssl.wrap_socket(
3764 3781 socket.socket(),
3765 3782 ssl_version=ssl.PROTOCOL_TLS,
3766 3783 cert_reqs=ssl.CERT_NONE,
3767 3784 ca_certs=None,
3768 3785 )
3769 3786
3770 3787 try:
3771 3788 s.connect(addr)
3772 3789 cert = s.getpeercert(True)
3773 3790
3774 3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3775 3792
3776 3793 complete = win32.checkcertificatechain(cert, build=False)
3777 3794
3778 3795 if not complete:
3779 3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3780 3797
3781 3798 if not win32.checkcertificatechain(cert):
3782 3799 ui.status(_(b'failed.\n'))
3783 3800 else:
3784 3801 ui.status(_(b'done.\n'))
3785 3802 else:
3786 3803 ui.status(_(b'full certificate chain is available\n'))
3787 3804 finally:
3788 3805 s.close()
3789 3806
3790 3807
3791 3808 @command(
3792 3809 b"debugbackupbundle",
3793 3810 [
3794 3811 (
3795 3812 b"",
3796 3813 b"recover",
3797 3814 b"",
3798 3815 b"brings the specified changeset back into the repository",
3799 3816 )
3800 3817 ]
3801 3818 + cmdutil.logopts,
3802 3819 _(b"hg debugbackupbundle [--recover HASH]"),
3803 3820 )
3804 3821 def debugbackupbundle(ui, repo, *pats, **opts):
3805 3822 """lists the changesets available in backup bundles
3806 3823
3807 3824 Without any arguments, this command prints a list of the changesets in each
3808 3825 backup bundle.
3809 3826
3810 3827 --recover takes a changeset hash and unbundles the first bundle that
3811 3828 contains that hash, which puts that changeset back in your repository.
3812 3829
3813 3830 --verbose will print the entire commit message and the bundle path for that
3814 3831 backup.
3815 3832 """
3816 3833 backups = list(
3817 3834 filter(
3818 3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3819 3836 )
3820 3837 )
3821 3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3822 3839
3823 3840 opts = pycompat.byteskwargs(opts)
3824 3841 opts[b"bundle"] = b""
3825 3842 opts[b"force"] = None
3826 3843 limit = logcmdutil.getlimit(opts)
3827 3844
3828 3845 def display(other, chlist, displayer):
3829 3846 if opts.get(b"newest_first"):
3830 3847 chlist.reverse()
3831 3848 count = 0
3832 3849 for n in chlist:
3833 3850 if limit is not None and count >= limit:
3834 3851 break
3835 3852 parents = [
3836 3853 True for p in other.changelog.parents(n) if p != repo.nullid
3837 3854 ]
3838 3855 if opts.get(b"no_merges") and len(parents) == 2:
3839 3856 continue
3840 3857 count += 1
3841 3858 displayer.show(other[n])
3842 3859
3843 3860 recovernode = opts.get(b"recover")
3844 3861 if recovernode:
3845 3862 if scmutil.isrevsymbol(repo, recovernode):
3846 3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3847 3864 return
3848 3865 elif backups:
3849 3866 msg = _(
3850 3867 b"Recover changesets using: hg debugbackupbundle --recover "
3851 3868 b"<changeset hash>\n\nAvailable backup changesets:"
3852 3869 )
3853 3870 ui.status(msg, label=b"status.removed")
3854 3871 else:
3855 3872 ui.status(_(b"no backup changesets found\n"))
3856 3873 return
3857 3874
3858 3875 for backup in backups:
3859 3876 # Much of this is copied from the hg incoming logic
3860 3877 source = os.path.relpath(backup, encoding.getcwd())
3861 3878 source, branches = urlutil.get_unique_pull_path(
3862 3879 b'debugbackupbundle',
3863 3880 repo,
3864 3881 ui,
3865 3882 source,
3866 3883 default_branches=opts.get(b'branch'),
3867 3884 )
3868 3885 try:
3869 3886 other = hg.peer(repo, opts, source)
3870 3887 except error.LookupError as ex:
3871 3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3872 3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3873 3890 ui.warn(msg, hint=hint)
3874 3891 continue
3875 3892 revs, checkout = hg.addbranchrevs(
3876 3893 repo, other, branches, opts.get(b"rev")
3877 3894 )
3878 3895
3879 3896 if revs:
3880 3897 revs = [other.lookup(rev) for rev in revs]
3881 3898
3882 3899 with ui.silent():
3883 3900 try:
3884 3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3885 3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3886 3903 )
3887 3904 except error.LookupError:
3888 3905 continue
3889 3906
3890 3907 try:
3891 3908 if not chlist:
3892 3909 continue
3893 3910 if recovernode:
3894 3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3895 3912 if scmutil.isrevsymbol(other, recovernode):
3896 3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3897 3914 f = hg.openpath(ui, source)
3898 3915 gen = exchange.readbundle(ui, f, source)
3899 3916 if isinstance(gen, bundle2.unbundle20):
3900 3917 bundle2.applybundle(
3901 3918 repo,
3902 3919 gen,
3903 3920 tr,
3904 3921 source=b"unbundle",
3905 3922 url=b"bundle:" + source,
3906 3923 )
3907 3924 else:
3908 3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3909 3926 break
3910 3927 else:
3911 3928 backupdate = encoding.strtolocal(
3912 3929 time.strftime(
3913 3930 "%a %H:%M, %Y-%m-%d",
3914 3931 time.localtime(os.path.getmtime(source)),
3915 3932 )
3916 3933 )
3917 3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3918 3935 if ui.verbose:
3919 3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3920 3937 else:
3921 3938 opts[
3922 3939 b"template"
3923 3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3924 3941 displayer = logcmdutil.changesetdisplayer(
3925 3942 ui, other, opts, False
3926 3943 )
3927 3944 display(other, chlist, displayer)
3928 3945 displayer.close()
3929 3946 finally:
3930 3947 cleanupfn()
3931 3948
3932 3949
3933 3950 @command(
3934 3951 b'debugsub',
3935 3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3936 3953 _(b'[-r REV] [REV]'),
3937 3954 )
3938 3955 def debugsub(ui, repo, rev=None):
3939 3956 ctx = scmutil.revsingle(repo, rev, None)
3940 3957 for k, v in sorted(ctx.substate.items()):
3941 3958 ui.writenoi18n(b'path %s\n' % k)
3942 3959 ui.writenoi18n(b' source %s\n' % v[0])
3943 3960 ui.writenoi18n(b' revision %s\n' % v[1])
3944 3961
3945 3962
3946 3963 @command(b'debugshell', optionalrepo=True)
3947 3964 def debugshell(ui, repo):
3948 3965 """run an interactive Python interpreter
3949 3966
3950 3967 The local namespace is provided with a reference to the ui and
3951 3968 the repo instance (if available).
3952 3969 """
3953 3970 import code
3954 3971
3955 3972 imported_objects = {
3956 3973 'ui': ui,
3957 3974 'repo': repo,
3958 3975 }
3959 3976
3960 3977 code.interact(local=imported_objects)
3961 3978
3962 3979
3963 3980 @command(
3964 3981 b'debugsuccessorssets',
3965 3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3966 3983 _(b'[REV]'),
3967 3984 )
3968 3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3969 3986 """show set of successors for revision
3970 3987
3971 3988 A successors set of changeset A is a consistent group of revisions that
3972 3989 succeed A. It contains non-obsolete changesets only unless closests
3973 3990 successors set is set.
3974 3991
3975 3992 In most cases a changeset A has a single successors set containing a single
3976 3993 successor (changeset A replaced by A').
3977 3994
3978 3995 A changeset that is made obsolete with no successors are called "pruned".
3979 3996 Such changesets have no successors sets at all.
3980 3997
3981 3998 A changeset that has been "split" will have a successors set containing
3982 3999 more than one successor.
3983 4000
3984 4001 A changeset that has been rewritten in multiple different ways is called
3985 4002 "divergent". Such changesets have multiple successor sets (each of which
3986 4003 may also be split, i.e. have multiple successors).
3987 4004
3988 4005 Results are displayed as follows::
3989 4006
3990 4007 <rev1>
3991 4008 <successors-1A>
3992 4009 <rev2>
3993 4010 <successors-2A>
3994 4011 <successors-2B1> <successors-2B2> <successors-2B3>
3995 4012
3996 4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
3997 4014 holds one element, whereas the second holds three (i.e. the changeset has
3998 4015 been split).
3999 4016 """
4000 4017 # passed to successorssets caching computation from one call to another
4001 4018 cache = {}
4002 4019 ctx2str = bytes
4003 4020 node2str = short
4004 4021 for rev in scmutil.revrange(repo, revs):
4005 4022 ctx = repo[rev]
4006 4023 ui.write(b'%s\n' % ctx2str(ctx))
4007 4024 for succsset in obsutil.successorssets(
4008 4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4009 4026 ):
4010 4027 if succsset:
4011 4028 ui.write(b' ')
4012 4029 ui.write(node2str(succsset[0]))
4013 4030 for node in succsset[1:]:
4014 4031 ui.write(b' ')
4015 4032 ui.write(node2str(node))
4016 4033 ui.write(b'\n')
4017 4034
4018 4035
4019 4036 @command(b'debugtagscache', [])
4020 4037 def debugtagscache(ui, repo):
4021 4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4022 4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4023 4040 flog = repo.file(b'.hgtags')
4024 4041 for r in repo:
4025 4042 node = repo[r].node()
4026 4043 tagsnode = cache.getfnode(node, computemissing=False)
4027 4044 if tagsnode:
4028 4045 tagsnodedisplay = hex(tagsnode)
4029 4046 if not flog.hasnode(tagsnode):
4030 4047 tagsnodedisplay += b' (unknown node)'
4031 4048 elif tagsnode is None:
4032 4049 tagsnodedisplay = b'missing'
4033 4050 else:
4034 4051 tagsnodedisplay = b'invalid'
4035 4052
4036 4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4037 4054
4038 4055
4039 4056 @command(
4040 4057 b'debugtemplate',
4041 4058 [
4042 4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4043 4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4044 4061 ],
4045 4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4046 4063 optionalrepo=True,
4047 4064 )
4048 4065 def debugtemplate(ui, repo, tmpl, **opts):
4049 4066 """parse and apply a template
4050 4067
4051 4068 If -r/--rev is given, the template is processed as a log template and
4052 4069 applied to the given changesets. Otherwise, it is processed as a generic
4053 4070 template.
4054 4071
4055 4072 Use --verbose to print the parsed tree.
4056 4073 """
4057 4074 revs = None
4058 4075 if opts['rev']:
4059 4076 if repo is None:
4060 4077 raise error.RepoError(
4061 4078 _(b'there is no Mercurial repository here (.hg not found)')
4062 4079 )
4063 4080 revs = scmutil.revrange(repo, opts['rev'])
4064 4081
4065 4082 props = {}
4066 4083 for d in opts['define']:
4067 4084 try:
4068 4085 k, v = (e.strip() for e in d.split(b'=', 1))
4069 4086 if not k or k == b'ui':
4070 4087 raise ValueError
4071 4088 props[k] = v
4072 4089 except ValueError:
4073 4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4074 4091
4075 4092 if ui.verbose:
4076 4093 aliases = ui.configitems(b'templatealias')
4077 4094 tree = templater.parse(tmpl)
4078 4095 ui.note(templater.prettyformat(tree), b'\n')
4079 4096 newtree = templater.expandaliases(tree, aliases)
4080 4097 if newtree != tree:
4081 4098 ui.notenoi18n(
4082 4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4083 4100 )
4084 4101
4085 4102 if revs is None:
4086 4103 tres = formatter.templateresources(ui, repo)
4087 4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4088 4105 if ui.verbose:
4089 4106 kwds, funcs = t.symbolsuseddefault()
4090 4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4091 4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4092 4109 ui.write(t.renderdefault(props))
4093 4110 else:
4094 4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4095 4112 if ui.verbose:
4096 4113 kwds, funcs = displayer.t.symbolsuseddefault()
4097 4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4098 4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4099 4116 for r in revs:
4100 4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4101 4118 displayer.close()
4102 4119
4103 4120
4104 4121 @command(
4105 4122 b'debuguigetpass',
4106 4123 [
4107 4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4108 4125 ],
4109 4126 _(b'[-p TEXT]'),
4110 4127 norepo=True,
4111 4128 )
4112 4129 def debuguigetpass(ui, prompt=b''):
4113 4130 """show prompt to type password"""
4114 4131 r = ui.getpass(prompt)
4115 4132 if r is None:
4116 4133 r = b"<default response>"
4117 4134 ui.writenoi18n(b'response: %s\n' % r)
4118 4135
4119 4136
4120 4137 @command(
4121 4138 b'debuguiprompt',
4122 4139 [
4123 4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 4141 ],
4125 4142 _(b'[-p TEXT]'),
4126 4143 norepo=True,
4127 4144 )
4128 4145 def debuguiprompt(ui, prompt=b''):
4129 4146 """show plain prompt"""
4130 4147 r = ui.prompt(prompt)
4131 4148 ui.writenoi18n(b'response: %s\n' % r)
4132 4149
4133 4150
4134 4151 @command(b'debugupdatecaches', [])
4135 4152 def debugupdatecaches(ui, repo, *pats, **opts):
4136 4153 """warm all known caches in the repository"""
4137 4154 with repo.wlock(), repo.lock():
4138 4155 repo.updatecaches(caches=repository.CACHES_ALL)
4139 4156
4140 4157
4141 4158 @command(
4142 4159 b'debugupgraderepo',
4143 4160 [
4144 4161 (
4145 4162 b'o',
4146 4163 b'optimize',
4147 4164 [],
4148 4165 _(b'extra optimization to perform'),
4149 4166 _(b'NAME'),
4150 4167 ),
4151 4168 (b'', b'run', False, _(b'performs an upgrade')),
4152 4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4153 4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4154 4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4155 4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4156 4173 ],
4157 4174 )
4158 4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4159 4176 """upgrade a repository to use different features
4160 4177
4161 4178 If no arguments are specified, the repository is evaluated for upgrade
4162 4179 and a list of problems and potential optimizations is printed.
4163 4180
4164 4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4165 4182 can be influenced via additional arguments. More details will be provided
4166 4183 by the command output when run without ``--run``.
4167 4184
4168 4185 During the upgrade, the repository will be locked and no writes will be
4169 4186 allowed.
4170 4187
4171 4188 At the end of the upgrade, the repository may not be readable while new
4172 4189 repository data is swapped in. This window will be as long as it takes to
4173 4190 rename some directories inside the ``.hg`` directory. On most machines, this
4174 4191 should complete almost instantaneously and the chances of a consumer being
4175 4192 unable to access the repository should be low.
4176 4193
4177 4194 By default, all revlogs will be upgraded. You can restrict this using flags
4178 4195 such as `--manifest`:
4179 4196
4180 4197 * `--manifest`: only optimize the manifest
4181 4198 * `--no-manifest`: optimize all revlog but the manifest
4182 4199 * `--changelog`: optimize the changelog only
4183 4200 * `--no-changelog --no-manifest`: optimize filelogs only
4184 4201 * `--filelogs`: optimize the filelogs only
4185 4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4186 4203 """
4187 4204 return upgrade.upgraderepo(
4188 4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4189 4206 )
4190 4207
4191 4208
4192 4209 @command(
4193 4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4194 4211 )
4195 4212 def debugwalk(ui, repo, *pats, **opts):
4196 4213 """show how files match on given patterns"""
4197 4214 opts = pycompat.byteskwargs(opts)
4198 4215 m = scmutil.match(repo[None], pats, opts)
4199 4216 if ui.verbose:
4200 4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4201 4218 items = list(repo[None].walk(m))
4202 4219 if not items:
4203 4220 return
4204 4221 f = lambda fn: fn
4205 4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4206 4223 f = lambda fn: util.normpath(fn)
4207 4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4208 4225 max([len(abs) for abs in items]),
4209 4226 max([len(repo.pathto(abs)) for abs in items]),
4210 4227 )
4211 4228 for abs in items:
4212 4229 line = fmt % (
4213 4230 abs,
4214 4231 f(repo.pathto(abs)),
4215 4232 m.exact(abs) and b'exact' or b'',
4216 4233 )
4217 4234 ui.write(b"%s\n" % line.rstrip())
4218 4235
4219 4236
4220 4237 @command(b'debugwhyunstable', [], _(b'REV'))
4221 4238 def debugwhyunstable(ui, repo, rev):
4222 4239 """explain instabilities of a changeset"""
4223 4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4224 4241 dnodes = b''
4225 4242 if entry.get(b'divergentnodes'):
4226 4243 dnodes = (
4227 4244 b' '.join(
4228 4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4229 4246 for ctx in entry[b'divergentnodes']
4230 4247 )
4231 4248 + b' '
4232 4249 )
4233 4250 ui.write(
4234 4251 b'%s: %s%s %s\n'
4235 4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4236 4253 )
4237 4254
4238 4255
4239 4256 @command(
4240 4257 b'debugwireargs',
4241 4258 [
4242 4259 (b'', b'three', b'', b'three'),
4243 4260 (b'', b'four', b'', b'four'),
4244 4261 (b'', b'five', b'', b'five'),
4245 4262 ]
4246 4263 + cmdutil.remoteopts,
4247 4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4248 4265 norepo=True,
4249 4266 )
4250 4267 def debugwireargs(ui, repopath, *vals, **opts):
4251 4268 opts = pycompat.byteskwargs(opts)
4252 4269 repo = hg.peer(ui, opts, repopath)
4253 4270 try:
4254 4271 for opt in cmdutil.remoteopts:
4255 4272 del opts[opt[1]]
4256 4273 args = {}
4257 4274 for k, v in pycompat.iteritems(opts):
4258 4275 if v:
4259 4276 args[k] = v
4260 4277 args = pycompat.strkwargs(args)
4261 4278 # run twice to check that we don't mess up the stream for the next command
4262 4279 res1 = repo.debugwireargs(*vals, **args)
4263 4280 res2 = repo.debugwireargs(*vals, **args)
4264 4281 ui.write(b"%s\n" % res1)
4265 4282 if res1 != res2:
4266 4283 ui.warn(b"%s\n" % res2)
4267 4284 finally:
4268 4285 repo.close()
4269 4286
4270 4287
4271 4288 def _parsewirelangblocks(fh):
4272 4289 activeaction = None
4273 4290 blocklines = []
4274 4291 lastindent = 0
4275 4292
4276 4293 for line in fh:
4277 4294 line = line.rstrip()
4278 4295 if not line:
4279 4296 continue
4280 4297
4281 4298 if line.startswith(b'#'):
4282 4299 continue
4283 4300
4284 4301 if not line.startswith(b' '):
4285 4302 # New block. Flush previous one.
4286 4303 if activeaction:
4287 4304 yield activeaction, blocklines
4288 4305
4289 4306 activeaction = line
4290 4307 blocklines = []
4291 4308 lastindent = 0
4292 4309 continue
4293 4310
4294 4311 # Else we start with an indent.
4295 4312
4296 4313 if not activeaction:
4297 4314 raise error.Abort(_(b'indented line outside of block'))
4298 4315
4299 4316 indent = len(line) - len(line.lstrip())
4300 4317
4301 4318 # If this line is indented more than the last line, concatenate it.
4302 4319 if indent > lastindent and blocklines:
4303 4320 blocklines[-1] += line.lstrip()
4304 4321 else:
4305 4322 blocklines.append(line)
4306 4323 lastindent = indent
4307 4324
4308 4325 # Flush last block.
4309 4326 if activeaction:
4310 4327 yield activeaction, blocklines
4311 4328
4312 4329
4313 4330 @command(
4314 4331 b'debugwireproto',
4315 4332 [
4316 4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4317 4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4318 4335 (
4319 4336 b'',
4320 4337 b'noreadstderr',
4321 4338 False,
4322 4339 _(b'do not read from stderr of the remote'),
4323 4340 ),
4324 4341 (
4325 4342 b'',
4326 4343 b'nologhandshake',
4327 4344 False,
4328 4345 _(b'do not log I/O related to the peer handshake'),
4329 4346 ),
4330 4347 ]
4331 4348 + cmdutil.remoteopts,
4332 4349 _(b'[PATH]'),
4333 4350 optionalrepo=True,
4334 4351 )
4335 4352 def debugwireproto(ui, repo, path=None, **opts):
4336 4353 """send wire protocol commands to a server
4337 4354
4338 4355 This command can be used to issue wire protocol commands to remote
4339 4356 peers and to debug the raw data being exchanged.
4340 4357
4341 4358 ``--localssh`` will start an SSH server against the current repository
4342 4359 and connect to that. By default, the connection will perform a handshake
4343 4360 and establish an appropriate peer instance.
4344 4361
4345 4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4346 4363 peer instance using the specified class type. Valid values are ``raw``,
4347 4364 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4348 4365 raw data payloads and don't support higher-level command actions.
4349 4366
4350 4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4351 4368 of the peer (for SSH connections only). Disabling automatic reading of
4352 4369 stderr is useful for making output more deterministic.
4353 4370
4354 4371 Commands are issued via a mini language which is specified via stdin.
4355 4372 The language consists of individual actions to perform. An action is
4356 4373 defined by a block. A block is defined as a line with no leading
4357 4374 space followed by 0 or more lines with leading space. Blocks are
4358 4375 effectively a high-level command with additional metadata.
4359 4376
4360 4377 Lines beginning with ``#`` are ignored.
4361 4378
4362 4379 The following sections denote available actions.
4363 4380
4364 4381 raw
4365 4382 ---
4366 4383
4367 4384 Send raw data to the server.
4368 4385
4369 4386 The block payload contains the raw data to send as one atomic send
4370 4387 operation. The data may not actually be delivered in a single system
4371 4388 call: it depends on the abilities of the transport being used.
4372 4389
4373 4390 Each line in the block is de-indented and concatenated. Then, that
4374 4391 value is evaluated as a Python b'' literal. This allows the use of
4375 4392 backslash escaping, etc.
4376 4393
4377 4394 raw+
4378 4395 ----
4379 4396
4380 4397 Behaves like ``raw`` except flushes output afterwards.
4381 4398
4382 4399 command <X>
4383 4400 -----------
4384 4401
4385 4402 Send a request to run a named command, whose name follows the ``command``
4386 4403 string.
4387 4404
4388 4405 Arguments to the command are defined as lines in this block. The format of
4389 4406 each line is ``<key> <value>``. e.g.::
4390 4407
4391 4408 command listkeys
4392 4409 namespace bookmarks
4393 4410
4394 4411 If the value begins with ``eval:``, it will be interpreted as a Python
4395 4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4396 4413 This allows sending complex types and encoding special byte sequences via
4397 4414 backslash escaping.
4398 4415
4399 4416 The following arguments have special meaning:
4400 4417
4401 4418 ``PUSHFILE``
4402 4419 When defined, the *push* mechanism of the peer will be used instead
4403 4420 of the static request-response mechanism and the content of the
4404 4421 file specified in the value of this argument will be sent as the
4405 4422 command payload.
4406 4423
4407 4424 This can be used to submit a local bundle file to the remote.
4408 4425
4409 4426 batchbegin
4410 4427 ----------
4411 4428
4412 4429 Instruct the peer to begin a batched send.
4413 4430
4414 4431 All ``command`` blocks are queued for execution until the next
4415 4432 ``batchsubmit`` block.
4416 4433
4417 4434 batchsubmit
4418 4435 -----------
4419 4436
4420 4437 Submit previously queued ``command`` blocks as a batch request.
4421 4438
4422 4439 This action MUST be paired with a ``batchbegin`` action.
4423 4440
4424 4441 httprequest <method> <path>
4425 4442 ---------------------------
4426 4443
4427 4444 (HTTP peer only)
4428 4445
4429 4446 Send an HTTP request to the peer.
4430 4447
4431 4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4432 4449
4433 4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4434 4451 headers to add to the request. e.g. ``Accept: foo``.
4435 4452
4436 4453 The following arguments are special:
4437 4454
4438 4455 ``BODYFILE``
4439 4456 The content of the file defined as the value to this argument will be
4440 4457 transferred verbatim as the HTTP request body.
4441 4458
4442 4459 ``frame <type> <flags> <payload>``
4443 4460 Send a unified protocol frame as part of the request body.
4444 4461
4445 4462 All frames will be collected and sent as the body to the HTTP
4446 4463 request.
4447 4464
4448 4465 close
4449 4466 -----
4450 4467
4451 4468 Close the connection to the server.
4452 4469
4453 4470 flush
4454 4471 -----
4455 4472
4456 4473 Flush data written to the server.
4457 4474
4458 4475 readavailable
4459 4476 -------------
4460 4477
4461 4478 Close the write end of the connection and read all available data from
4462 4479 the server.
4463 4480
4464 4481 If the connection to the server encompasses multiple pipes, we poll both
4465 4482 pipes and read available data.
4466 4483
4467 4484 readline
4468 4485 --------
4469 4486
4470 4487 Read a line of output from the server. If there are multiple output
4471 4488 pipes, reads only the main pipe.
4472 4489
4473 4490 ereadline
4474 4491 ---------
4475 4492
4476 4493 Like ``readline``, but read from the stderr pipe, if available.
4477 4494
4478 4495 read <X>
4479 4496 --------
4480 4497
4481 4498 ``read()`` N bytes from the server's main output pipe.
4482 4499
4483 4500 eread <X>
4484 4501 ---------
4485 4502
4486 4503 ``read()`` N bytes from the server's stderr pipe, if available.
4487 4504
4488 4505 Specifying Unified Frame-Based Protocol Frames
4489 4506 ----------------------------------------------
4490 4507
4491 4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4492 4509 syntax.
4493 4510
4494 4511 A frame is composed as a type, flags, and payload. These can be parsed
4495 4512 from a string of the form:
4496 4513
4497 4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4498 4515
4499 4516 ``request-id`` and ``stream-id`` are integers defining the request and
4500 4517 stream identifiers.
4501 4518
4502 4519 ``type`` can be an integer value for the frame type or the string name
4503 4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4504 4521 ``command-name``.
4505 4522
4506 4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4507 4524 components. Each component (and there can be just one) can be an integer
4508 4525 or a flag name for stream flags or frame flags, respectively. Values are
4509 4526 resolved to integers and then bitwise OR'd together.
4510 4527
4511 4528 ``payload`` represents the raw frame payload. If it begins with
4512 4529 ``cbor:``, the following string is evaluated as Python code and the
4513 4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4514 4531 as a Python byte string literal.
4515 4532 """
4516 4533 opts = pycompat.byteskwargs(opts)
4517 4534
4518 4535 if opts[b'localssh'] and not repo:
4519 4536 raise error.Abort(_(b'--localssh requires a repository'))
4520 4537
4521 4538 if opts[b'peer'] and opts[b'peer'] not in (
4522 4539 b'raw',
4523 4540 b'http2',
4524 4541 b'ssh1',
4525 4542 b'ssh2',
4526 4543 ):
4527 4544 raise error.Abort(
4528 4545 _(b'invalid value for --peer'),
4529 4546 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4530 4547 )
4531 4548
4532 4549 if path and opts[b'localssh']:
4533 4550 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4534 4551
4535 4552 if ui.interactive():
4536 4553 ui.write(_(b'(waiting for commands on stdin)\n'))
4537 4554
4538 4555 blocks = list(_parsewirelangblocks(ui.fin))
4539 4556
4540 4557 proc = None
4541 4558 stdin = None
4542 4559 stdout = None
4543 4560 stderr = None
4544 4561 opener = None
4545 4562
4546 4563 if opts[b'localssh']:
4547 4564 # We start the SSH server in its own process so there is process
4548 4565 # separation. This prevents a whole class of potential bugs around
4549 4566 # shared state from interfering with server operation.
4550 4567 args = procutil.hgcmd() + [
4551 4568 b'-R',
4552 4569 repo.root,
4553 4570 b'debugserve',
4554 4571 b'--sshstdio',
4555 4572 ]
4556 4573 proc = subprocess.Popen(
4557 4574 pycompat.rapply(procutil.tonativestr, args),
4558 4575 stdin=subprocess.PIPE,
4559 4576 stdout=subprocess.PIPE,
4560 4577 stderr=subprocess.PIPE,
4561 4578 bufsize=0,
4562 4579 )
4563 4580
4564 4581 stdin = proc.stdin
4565 4582 stdout = proc.stdout
4566 4583 stderr = proc.stderr
4567 4584
4568 4585 # We turn the pipes into observers so we can log I/O.
4569 4586 if ui.verbose or opts[b'peer'] == b'raw':
4570 4587 stdin = util.makeloggingfileobject(
4571 4588 ui, proc.stdin, b'i', logdata=True
4572 4589 )
4573 4590 stdout = util.makeloggingfileobject(
4574 4591 ui, proc.stdout, b'o', logdata=True
4575 4592 )
4576 4593 stderr = util.makeloggingfileobject(
4577 4594 ui, proc.stderr, b'e', logdata=True
4578 4595 )
4579 4596
4580 4597 # --localssh also implies the peer connection settings.
4581 4598
4582 4599 url = b'ssh://localserver'
4583 4600 autoreadstderr = not opts[b'noreadstderr']
4584 4601
4585 4602 if opts[b'peer'] == b'ssh1':
4586 4603 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4587 4604 peer = sshpeer.sshv1peer(
4588 4605 ui,
4589 4606 url,
4590 4607 proc,
4591 4608 stdin,
4592 4609 stdout,
4593 4610 stderr,
4594 4611 None,
4595 4612 autoreadstderr=autoreadstderr,
4596 4613 )
4597 4614 elif opts[b'peer'] == b'ssh2':
4598 4615 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4599 4616 peer = sshpeer.sshv2peer(
4600 4617 ui,
4601 4618 url,
4602 4619 proc,
4603 4620 stdin,
4604 4621 stdout,
4605 4622 stderr,
4606 4623 None,
4607 4624 autoreadstderr=autoreadstderr,
4608 4625 )
4609 4626 elif opts[b'peer'] == b'raw':
4610 4627 ui.write(_(b'using raw connection to peer\n'))
4611 4628 peer = None
4612 4629 else:
4613 4630 ui.write(_(b'creating ssh peer from handshake results\n'))
4614 4631 peer = sshpeer.makepeer(
4615 4632 ui,
4616 4633 url,
4617 4634 proc,
4618 4635 stdin,
4619 4636 stdout,
4620 4637 stderr,
4621 4638 autoreadstderr=autoreadstderr,
4622 4639 )
4623 4640
4624 4641 elif path:
4625 4642 # We bypass hg.peer() so we can proxy the sockets.
4626 4643 # TODO consider not doing this because we skip
4627 4644 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4628 4645 u = urlutil.url(path)
4629 4646 if u.scheme != b'http':
4630 4647 raise error.Abort(_(b'only http:// paths are currently supported'))
4631 4648
4632 4649 url, authinfo = u.authinfo()
4633 4650 openerargs = {
4634 4651 'useragent': b'Mercurial debugwireproto',
4635 4652 }
4636 4653
4637 4654 # Turn pipes/sockets into observers so we can log I/O.
4638 4655 if ui.verbose:
4639 4656 openerargs.update(
4640 4657 {
4641 4658 'loggingfh': ui,
4642 4659 'loggingname': b's',
4643 4660 'loggingopts': {
4644 4661 'logdata': True,
4645 4662 'logdataapis': False,
4646 4663 },
4647 4664 }
4648 4665 )
4649 4666
4650 4667 if ui.debugflag:
4651 4668 openerargs['loggingopts']['logdataapis'] = True
4652 4669
4653 4670 # Don't send default headers when in raw mode. This allows us to
4654 4671 # bypass most of the behavior of our URL handling code so we can
4655 4672 # have near complete control over what's sent on the wire.
4656 4673 if opts[b'peer'] == b'raw':
4657 4674 openerargs['sendaccept'] = False
4658 4675
4659 4676 opener = urlmod.opener(ui, authinfo, **openerargs)
4660 4677
4661 4678 if opts[b'peer'] == b'http2':
4662 4679 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4663 4680 # We go through makepeer() because we need an API descriptor for
4664 4681 # the peer instance to be useful.
4665 4682 maybe_silent = (
4666 4683 ui.silent()
4667 4684 if opts[b'nologhandshake']
4668 4685 else util.nullcontextmanager()
4669 4686 )
4670 4687 with maybe_silent, ui.configoverride(
4671 4688 {(b'experimental', b'httppeer.advertise-v2'): True}
4672 4689 ):
4673 4690 peer = httppeer.makepeer(ui, path, opener=opener)
4674 4691
4675 4692 if not isinstance(peer, httppeer.httpv2peer):
4676 4693 raise error.Abort(
4677 4694 _(
4678 4695 b'could not instantiate HTTP peer for '
4679 4696 b'wire protocol version 2'
4680 4697 ),
4681 4698 hint=_(
4682 4699 b'the server may not have the feature '
4683 4700 b'enabled or is not allowing this '
4684 4701 b'client version'
4685 4702 ),
4686 4703 )
4687 4704
4688 4705 elif opts[b'peer'] == b'raw':
4689 4706 ui.write(_(b'using raw connection to peer\n'))
4690 4707 peer = None
4691 4708 elif opts[b'peer']:
4692 4709 raise error.Abort(
4693 4710 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4694 4711 )
4695 4712 else:
4696 4713 peer = httppeer.makepeer(ui, path, opener=opener)
4697 4714
4698 4715 # We /could/ populate stdin/stdout with sock.makefile()...
4699 4716 else:
4700 4717 raise error.Abort(_(b'unsupported connection configuration'))
4701 4718
4702 4719 batchedcommands = None
4703 4720
4704 4721 # Now perform actions based on the parsed wire language instructions.
4705 4722 for action, lines in blocks:
4706 4723 if action in (b'raw', b'raw+'):
4707 4724 if not stdin:
4708 4725 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4709 4726
4710 4727 # Concatenate the data together.
4711 4728 data = b''.join(l.lstrip() for l in lines)
4712 4729 data = stringutil.unescapestr(data)
4713 4730 stdin.write(data)
4714 4731
4715 4732 if action == b'raw+':
4716 4733 stdin.flush()
4717 4734 elif action == b'flush':
4718 4735 if not stdin:
4719 4736 raise error.Abort(_(b'cannot call flush on this peer'))
4720 4737 stdin.flush()
4721 4738 elif action.startswith(b'command'):
4722 4739 if not peer:
4723 4740 raise error.Abort(
4724 4741 _(
4725 4742 b'cannot send commands unless peer instance '
4726 4743 b'is available'
4727 4744 )
4728 4745 )
4729 4746
4730 4747 command = action.split(b' ', 1)[1]
4731 4748
4732 4749 args = {}
4733 4750 for line in lines:
4734 4751 # We need to allow empty values.
4735 4752 fields = line.lstrip().split(b' ', 1)
4736 4753 if len(fields) == 1:
4737 4754 key = fields[0]
4738 4755 value = b''
4739 4756 else:
4740 4757 key, value = fields
4741 4758
4742 4759 if value.startswith(b'eval:'):
4743 4760 value = stringutil.evalpythonliteral(value[5:])
4744 4761 else:
4745 4762 value = stringutil.unescapestr(value)
4746 4763
4747 4764 args[key] = value
4748 4765
4749 4766 if batchedcommands is not None:
4750 4767 batchedcommands.append((command, args))
4751 4768 continue
4752 4769
4753 4770 ui.status(_(b'sending %s command\n') % command)
4754 4771
4755 4772 if b'PUSHFILE' in args:
4756 4773 with open(args[b'PUSHFILE'], 'rb') as fh:
4757 4774 del args[b'PUSHFILE']
4758 4775 res, output = peer._callpush(
4759 4776 command, fh, **pycompat.strkwargs(args)
4760 4777 )
4761 4778 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4762 4779 ui.status(
4763 4780 _(b'remote output: %s\n') % stringutil.escapestr(output)
4764 4781 )
4765 4782 else:
4766 4783 with peer.commandexecutor() as e:
4767 4784 res = e.callcommand(command, args).result()
4768 4785
4769 4786 if isinstance(res, wireprotov2peer.commandresponse):
4770 4787 val = res.objects()
4771 4788 ui.status(
4772 4789 _(b'response: %s\n')
4773 4790 % stringutil.pprint(val, bprefix=True, indent=2)
4774 4791 )
4775 4792 else:
4776 4793 ui.status(
4777 4794 _(b'response: %s\n')
4778 4795 % stringutil.pprint(res, bprefix=True, indent=2)
4779 4796 )
4780 4797
4781 4798 elif action == b'batchbegin':
4782 4799 if batchedcommands is not None:
4783 4800 raise error.Abort(_(b'nested batchbegin not allowed'))
4784 4801
4785 4802 batchedcommands = []
4786 4803 elif action == b'batchsubmit':
4787 4804 # There is a batching API we could go through. But it would be
4788 4805 # difficult to normalize requests into function calls. It is easier
4789 4806 # to bypass this layer and normalize to commands + args.
4790 4807 ui.status(
4791 4808 _(b'sending batch with %d sub-commands\n')
4792 4809 % len(batchedcommands)
4793 4810 )
4794 4811 assert peer is not None
4795 4812 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4796 4813 ui.status(
4797 4814 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4798 4815 )
4799 4816
4800 4817 batchedcommands = None
4801 4818
4802 4819 elif action.startswith(b'httprequest '):
4803 4820 if not opener:
4804 4821 raise error.Abort(
4805 4822 _(b'cannot use httprequest without an HTTP peer')
4806 4823 )
4807 4824
4808 4825 request = action.split(b' ', 2)
4809 4826 if len(request) != 3:
4810 4827 raise error.Abort(
4811 4828 _(
4812 4829 b'invalid httprequest: expected format is '
4813 4830 b'"httprequest <method> <path>'
4814 4831 )
4815 4832 )
4816 4833
4817 4834 method, httppath = request[1:]
4818 4835 headers = {}
4819 4836 body = None
4820 4837 frames = []
4821 4838 for line in lines:
4822 4839 line = line.lstrip()
4823 4840 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4824 4841 if m:
4825 4842 # Headers need to use native strings.
4826 4843 key = pycompat.strurl(m.group(1))
4827 4844 value = pycompat.strurl(m.group(2))
4828 4845 headers[key] = value
4829 4846 continue
4830 4847
4831 4848 if line.startswith(b'BODYFILE '):
4832 4849 with open(line.split(b' ', 1), b'rb') as fh:
4833 4850 body = fh.read()
4834 4851 elif line.startswith(b'frame '):
4835 4852 frame = wireprotoframing.makeframefromhumanstring(
4836 4853 line[len(b'frame ') :]
4837 4854 )
4838 4855
4839 4856 frames.append(frame)
4840 4857 else:
4841 4858 raise error.Abort(
4842 4859 _(b'unknown argument to httprequest: %s') % line
4843 4860 )
4844 4861
4845 4862 url = path + httppath
4846 4863
4847 4864 if frames:
4848 4865 body = b''.join(bytes(f) for f in frames)
4849 4866
4850 4867 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4851 4868
4852 4869 # urllib.Request insists on using has_data() as a proxy for
4853 4870 # determining the request method. Override that to use our
4854 4871 # explicitly requested method.
4855 4872 req.get_method = lambda: pycompat.sysstr(method)
4856 4873
4857 4874 try:
4858 4875 res = opener.open(req)
4859 4876 body = res.read()
4860 4877 except util.urlerr.urlerror as e:
4861 4878 # read() method must be called, but only exists in Python 2
4862 4879 getattr(e, 'read', lambda: None)()
4863 4880 continue
4864 4881
4865 4882 ct = res.headers.get('Content-Type')
4866 4883 if ct == 'application/mercurial-cbor':
4867 4884 ui.write(
4868 4885 _(b'cbor> %s\n')
4869 4886 % stringutil.pprint(
4870 4887 cborutil.decodeall(body), bprefix=True, indent=2
4871 4888 )
4872 4889 )
4873 4890
4874 4891 elif action == b'close':
4875 4892 assert peer is not None
4876 4893 peer.close()
4877 4894 elif action == b'readavailable':
4878 4895 if not stdout or not stderr:
4879 4896 raise error.Abort(
4880 4897 _(b'readavailable not available on this peer')
4881 4898 )
4882 4899
4883 4900 stdin.close()
4884 4901 stdout.read()
4885 4902 stderr.read()
4886 4903
4887 4904 elif action == b'readline':
4888 4905 if not stdout:
4889 4906 raise error.Abort(_(b'readline not available on this peer'))
4890 4907 stdout.readline()
4891 4908 elif action == b'ereadline':
4892 4909 if not stderr:
4893 4910 raise error.Abort(_(b'ereadline not available on this peer'))
4894 4911 stderr.readline()
4895 4912 elif action.startswith(b'read '):
4896 4913 count = int(action.split(b' ', 1)[1])
4897 4914 if not stdout:
4898 4915 raise error.Abort(_(b'read not available on this peer'))
4899 4916 stdout.read(count)
4900 4917 elif action.startswith(b'eread '):
4901 4918 count = int(action.split(b' ', 1)[1])
4902 4919 if not stderr:
4903 4920 raise error.Abort(_(b'eread not available on this peer'))
4904 4921 stderr.read(count)
4905 4922 else:
4906 4923 raise error.Abort(_(b'unknown action: %s') % action)
4907 4924
4908 4925 if batchedcommands is not None:
4909 4926 raise error.Abort(_(b'unclosed "batchbegin" request'))
4910 4927
4911 4928 if peer:
4912 4929 peer.close()
4913 4930
4914 4931 if proc:
4915 4932 proc.kill()
@@ -1,747 +1,755
1 1 # censor code related to censoring revision
2 2 # coding: utf8
3 3 #
4 4 # Copyright 2021 Pierre-Yves David <pierre-yves.david@octobus.net>
5 5 # Copyright 2015 Google, Inc <martinvonz@google.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 import binascii
11 11 import contextlib
12 12 import os
13 13 import struct
14 14
15 15 from ..node import (
16 16 nullrev,
17 17 )
18 18 from .constants import (
19 19 COMP_MODE_PLAIN,
20 20 ENTRY_DATA_COMPRESSED_LENGTH,
21 21 ENTRY_DATA_COMPRESSION_MODE,
22 22 ENTRY_DATA_OFFSET,
23 23 ENTRY_DATA_UNCOMPRESSED_LENGTH,
24 24 ENTRY_DELTA_BASE,
25 25 ENTRY_LINK_REV,
26 26 ENTRY_NODE_ID,
27 27 ENTRY_PARENT_1,
28 28 ENTRY_PARENT_2,
29 29 ENTRY_SIDEDATA_COMPRESSED_LENGTH,
30 30 ENTRY_SIDEDATA_COMPRESSION_MODE,
31 31 ENTRY_SIDEDATA_OFFSET,
32 32 REVLOGV0,
33 33 REVLOGV1,
34 34 )
35 35 from ..i18n import _
36 36
37 37 from .. import (
38 38 error,
39 39 pycompat,
40 40 revlogutils,
41 41 util,
42 42 )
43 43 from ..utils import (
44 44 storageutil,
45 45 )
46 46 from . import (
47 47 constants,
48 48 deltas,
49 49 )
50 50
51 51
52 52 def v1_censor(rl, tr, censornode, tombstone=b''):
53 53 """censors a revision in a "version 1" revlog"""
54 54 assert rl._format_version == constants.REVLOGV1, rl._format_version
55 55
56 56 # avoid cycle
57 57 from .. import revlog
58 58
59 59 censorrev = rl.rev(censornode)
60 60 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
61 61
62 62 # Rewriting the revlog in place is hard. Our strategy for censoring is
63 63 # to create a new revlog, copy all revisions to it, then replace the
64 64 # revlogs on transaction close.
65 65 #
66 66 # This is a bit dangerous. We could easily have a mismatch of state.
67 67 newrl = revlog.revlog(
68 68 rl.opener,
69 69 target=rl.target,
70 70 radix=rl.radix,
71 71 postfix=b'tmpcensored',
72 72 censorable=True,
73 73 )
74 74 newrl._format_version = rl._format_version
75 75 newrl._format_flags = rl._format_flags
76 76 newrl._generaldelta = rl._generaldelta
77 77 newrl._parse_index = rl._parse_index
78 78
79 79 for rev in rl.revs():
80 80 node = rl.node(rev)
81 81 p1, p2 = rl.parents(node)
82 82
83 83 if rev == censorrev:
84 84 newrl.addrawrevision(
85 85 tombstone,
86 86 tr,
87 87 rl.linkrev(censorrev),
88 88 p1,
89 89 p2,
90 90 censornode,
91 91 constants.REVIDX_ISCENSORED,
92 92 )
93 93
94 94 if newrl.deltaparent(rev) != nullrev:
95 95 m = _(b'censored revision stored as delta; cannot censor')
96 96 h = _(
97 97 b'censoring of revlogs is not fully implemented;'
98 98 b' please report this bug'
99 99 )
100 100 raise error.Abort(m, hint=h)
101 101 continue
102 102
103 103 if rl.iscensored(rev):
104 104 if rl.deltaparent(rev) != nullrev:
105 105 m = _(
106 106 b'cannot censor due to censored '
107 107 b'revision having delta stored'
108 108 )
109 109 raise error.Abort(m)
110 110 rawtext = rl._chunk(rev)
111 111 else:
112 112 rawtext = rl.rawdata(rev)
113 113
114 114 newrl.addrawrevision(
115 115 rawtext, tr, rl.linkrev(rev), p1, p2, node, rl.flags(rev)
116 116 )
117 117
118 118 tr.addbackup(rl._indexfile, location=b'store')
119 119 if not rl._inline:
120 120 tr.addbackup(rl._datafile, location=b'store')
121 121
122 122 rl.opener.rename(newrl._indexfile, rl._indexfile)
123 123 if not rl._inline:
124 124 rl.opener.rename(newrl._datafile, rl._datafile)
125 125
126 126 rl.clearcaches()
127 127 rl._loadindex()
128 128
129 129
130 130 def v2_censor(revlog, tr, censornode, tombstone=b''):
131 131 """censors a revision in a "version 2" revlog"""
132 132 assert revlog._format_version != REVLOGV0, revlog._format_version
133 133 assert revlog._format_version != REVLOGV1, revlog._format_version
134 134
135 135 censor_revs = {revlog.rev(censornode)}
136 136 _rewrite_v2(revlog, tr, censor_revs, tombstone)
137 137
138 138
139 139 def _rewrite_v2(revlog, tr, censor_revs, tombstone=b''):
140 140 """rewrite a revlog to censor some of its content
141 141
142 142 General principle
143 143
144 144 We create new revlog files (index/data/sidedata) to copy the content of
145 145 the existing data without the censored data.
146 146
147 147 We need to recompute new delta for any revision that used the censored
148 148 revision as delta base. As the cumulative size of the new delta may be
149 149 large, we store them in a temporary file until they are stored in their
150 150 final destination.
151 151
152 152 All data before the censored data can be blindly copied. The rest needs
153 153 to be copied as we go and the associated index entry needs adjustement.
154 154 """
155 155 assert revlog._format_version != REVLOGV0, revlog._format_version
156 156 assert revlog._format_version != REVLOGV1, revlog._format_version
157 157
158 158 old_index = revlog.index
159 159 docket = revlog._docket
160 160
161 161 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
162 162
163 163 first_excl_rev = min(censor_revs)
164 164
165 165 first_excl_entry = revlog.index[first_excl_rev]
166 166 index_cutoff = revlog.index.entry_size * first_excl_rev
167 167 data_cutoff = first_excl_entry[ENTRY_DATA_OFFSET] >> 16
168 168 sidedata_cutoff = revlog.sidedata_cut_off(first_excl_rev)
169 169
170 170 with pycompat.unnamedtempfile(mode=b"w+b") as tmp_storage:
171 171 # rev → (new_base, data_start, data_end, compression_mode)
172 172 rewritten_entries = _precompute_rewritten_delta(
173 173 revlog,
174 174 old_index,
175 175 censor_revs,
176 176 tmp_storage,
177 177 )
178 178
179 179 all_files = _setup_new_files(
180 180 revlog,
181 181 index_cutoff,
182 182 data_cutoff,
183 183 sidedata_cutoff,
184 184 )
185 185
186 186 # we dont need to open the old index file since its content already
187 187 # exist in a usable form in `old_index`.
188 188 with all_files() as open_files:
189 189 (
190 190 old_data_file,
191 191 old_sidedata_file,
192 192 new_index_file,
193 193 new_data_file,
194 194 new_sidedata_file,
195 195 ) = open_files
196 196
197 197 # writing the censored revision
198 198
199 199 # Writing all subsequent revisions
200 200 for rev in range(first_excl_rev, len(old_index)):
201 201 if rev in censor_revs:
202 202 _rewrite_censor(
203 203 revlog,
204 204 old_index,
205 205 open_files,
206 206 rev,
207 207 tombstone,
208 208 )
209 209 else:
210 210 _rewrite_simple(
211 211 revlog,
212 212 old_index,
213 213 open_files,
214 214 rev,
215 215 rewritten_entries,
216 216 tmp_storage,
217 217 )
218 218 docket.write(transaction=None, stripping=True)
219 219
220 220
221 221 def _precompute_rewritten_delta(
222 222 revlog,
223 223 old_index,
224 224 excluded_revs,
225 225 tmp_storage,
226 226 ):
227 227 """Compute new delta for revisions whose delta is based on revision that
228 228 will not survive as is.
229 229
230 230 Return a mapping: {rev → (new_base, data_start, data_end, compression_mode)}
231 231 """
232 232 dc = deltas.deltacomputer(revlog)
233 233 rewritten_entries = {}
234 234 first_excl_rev = min(excluded_revs)
235 235 with revlog._segmentfile._open_read() as dfh:
236 236 for rev in range(first_excl_rev, len(old_index)):
237 237 if rev in excluded_revs:
238 238 # this revision will be preserved as is, so we don't need to
239 239 # consider recomputing a delta.
240 240 continue
241 241 entry = old_index[rev]
242 242 if entry[ENTRY_DELTA_BASE] not in excluded_revs:
243 243 continue
244 244 # This is a revision that use the censored revision as the base
245 245 # for its delta. We need a need new deltas
246 246 if entry[ENTRY_DATA_UNCOMPRESSED_LENGTH] == 0:
247 247 # this revision is empty, we can delta against nullrev
248 248 rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN)
249 249 else:
250 250
251 251 text = revlog.rawdata(rev, _df=dfh)
252 252 info = revlogutils.revisioninfo(
253 253 node=entry[ENTRY_NODE_ID],
254 254 p1=revlog.node(entry[ENTRY_PARENT_1]),
255 255 p2=revlog.node(entry[ENTRY_PARENT_2]),
256 256 btext=[text],
257 257 textlen=len(text),
258 258 cachedelta=None,
259 259 flags=entry[ENTRY_DATA_OFFSET] & 0xFFFF,
260 260 )
261 261 d = dc.finddeltainfo(
262 262 info, dfh, excluded_bases=excluded_revs, target_rev=rev
263 263 )
264 264 default_comp = revlog._docket.default_compression_header
265 265 comp_mode, d = deltas.delta_compression(default_comp, d)
266 266 # using `tell` is a bit lazy, but we are not here for speed
267 267 start = tmp_storage.tell()
268 268 tmp_storage.write(d.data[1])
269 269 end = tmp_storage.tell()
270 270 rewritten_entries[rev] = (d.base, start, end, comp_mode)
271 271 return rewritten_entries
272 272
273 273
274 274 def _setup_new_files(
275 275 revlog,
276 276 index_cutoff,
277 277 data_cutoff,
278 278 sidedata_cutoff,
279 279 ):
280 280 """
281 281
282 282 return a context manager to open all the relevant files:
283 283 - old_data_file,
284 284 - old_sidedata_file,
285 285 - new_index_file,
286 286 - new_data_file,
287 287 - new_sidedata_file,
288 288
289 289 The old_index_file is not here because it is accessed through the
290 290 `old_index` object if the caller function.
291 291 """
292 292 docket = revlog._docket
293 293 old_index_filepath = revlog.opener.join(docket.index_filepath())
294 294 old_data_filepath = revlog.opener.join(docket.data_filepath())
295 295 old_sidedata_filepath = revlog.opener.join(docket.sidedata_filepath())
296 296
297 297 new_index_filepath = revlog.opener.join(docket.new_index_file())
298 298 new_data_filepath = revlog.opener.join(docket.new_data_file())
299 299 new_sidedata_filepath = revlog.opener.join(docket.new_sidedata_file())
300 300
301 301 util.copyfile(old_index_filepath, new_index_filepath, nb_bytes=index_cutoff)
302 302 util.copyfile(old_data_filepath, new_data_filepath, nb_bytes=data_cutoff)
303 303 util.copyfile(
304 304 old_sidedata_filepath,
305 305 new_sidedata_filepath,
306 306 nb_bytes=sidedata_cutoff,
307 307 )
308 308 revlog.opener.register_file(docket.index_filepath())
309 309 revlog.opener.register_file(docket.data_filepath())
310 310 revlog.opener.register_file(docket.sidedata_filepath())
311 311
312 312 docket.index_end = index_cutoff
313 313 docket.data_end = data_cutoff
314 314 docket.sidedata_end = sidedata_cutoff
315 315
316 316 # reload the revlog internal information
317 317 revlog.clearcaches()
318 318 revlog._loadindex(docket=docket)
319 319
320 320 @contextlib.contextmanager
321 321 def all_files_opener():
322 322 # hide opening in an helper function to please check-code, black
323 323 # and various python version at the same time
324 324 with open(old_data_filepath, 'rb') as old_data_file:
325 325 with open(old_sidedata_filepath, 'rb') as old_sidedata_file:
326 326 with open(new_index_filepath, 'r+b') as new_index_file:
327 327 with open(new_data_filepath, 'r+b') as new_data_file:
328 328 with open(
329 329 new_sidedata_filepath, 'r+b'
330 330 ) as new_sidedata_file:
331 331 new_index_file.seek(0, os.SEEK_END)
332 332 assert new_index_file.tell() == index_cutoff
333 333 new_data_file.seek(0, os.SEEK_END)
334 334 assert new_data_file.tell() == data_cutoff
335 335 new_sidedata_file.seek(0, os.SEEK_END)
336 336 assert new_sidedata_file.tell() == sidedata_cutoff
337 337 yield (
338 338 old_data_file,
339 339 old_sidedata_file,
340 340 new_index_file,
341 341 new_data_file,
342 342 new_sidedata_file,
343 343 )
344 344
345 345 return all_files_opener
346 346
347 347
348 348 def _rewrite_simple(
349 349 revlog,
350 350 old_index,
351 351 all_files,
352 352 rev,
353 353 rewritten_entries,
354 354 tmp_storage,
355 355 ):
356 356 """append a normal revision to the index after the rewritten one(s)"""
357 357 (
358 358 old_data_file,
359 359 old_sidedata_file,
360 360 new_index_file,
361 361 new_data_file,
362 362 new_sidedata_file,
363 363 ) = all_files
364 364 entry = old_index[rev]
365 365 flags = entry[ENTRY_DATA_OFFSET] & 0xFFFF
366 366 old_data_offset = entry[ENTRY_DATA_OFFSET] >> 16
367 367
368 368 if rev not in rewritten_entries:
369 369 old_data_file.seek(old_data_offset)
370 370 new_data_size = entry[ENTRY_DATA_COMPRESSED_LENGTH]
371 371 new_data = old_data_file.read(new_data_size)
372 372 data_delta_base = entry[ENTRY_DELTA_BASE]
373 373 d_comp_mode = entry[ENTRY_DATA_COMPRESSION_MODE]
374 374 else:
375 375 (
376 376 data_delta_base,
377 377 start,
378 378 end,
379 379 d_comp_mode,
380 380 ) = rewritten_entries[rev]
381 381 new_data_size = end - start
382 382 tmp_storage.seek(start)
383 383 new_data = tmp_storage.read(new_data_size)
384 384
385 385 # It might be faster to group continuous read/write operation,
386 386 # however, this is censor, an operation that is not focussed
387 387 # around stellar performance. So I have not written this
388 388 # optimisation yet.
389 389 new_data_offset = new_data_file.tell()
390 390 new_data_file.write(new_data)
391 391
392 392 sidedata_size = entry[ENTRY_SIDEDATA_COMPRESSED_LENGTH]
393 393 new_sidedata_offset = new_sidedata_file.tell()
394 394 if 0 < sidedata_size:
395 395 old_sidedata_offset = entry[ENTRY_SIDEDATA_OFFSET]
396 396 old_sidedata_file.seek(old_sidedata_offset)
397 397 new_sidedata = old_sidedata_file.read(sidedata_size)
398 398 new_sidedata_file.write(new_sidedata)
399 399
400 400 data_uncompressed_length = entry[ENTRY_DATA_UNCOMPRESSED_LENGTH]
401 401 sd_com_mode = entry[ENTRY_SIDEDATA_COMPRESSION_MODE]
402 402 assert data_delta_base <= rev, (data_delta_base, rev)
403 403
404 404 new_entry = revlogutils.entry(
405 405 flags=flags,
406 406 data_offset=new_data_offset,
407 407 data_compressed_length=new_data_size,
408 408 data_uncompressed_length=data_uncompressed_length,
409 409 data_delta_base=data_delta_base,
410 410 link_rev=entry[ENTRY_LINK_REV],
411 411 parent_rev_1=entry[ENTRY_PARENT_1],
412 412 parent_rev_2=entry[ENTRY_PARENT_2],
413 413 node_id=entry[ENTRY_NODE_ID],
414 414 sidedata_offset=new_sidedata_offset,
415 415 sidedata_compressed_length=sidedata_size,
416 416 data_compression_mode=d_comp_mode,
417 417 sidedata_compression_mode=sd_com_mode,
418 418 )
419 419 revlog.index.append(new_entry)
420 420 entry_bin = revlog.index.entry_binary(rev)
421 421 new_index_file.write(entry_bin)
422 422
423 423 revlog._docket.index_end = new_index_file.tell()
424 424 revlog._docket.data_end = new_data_file.tell()
425 425 revlog._docket.sidedata_end = new_sidedata_file.tell()
426 426
427 427
428 428 def _rewrite_censor(
429 429 revlog,
430 430 old_index,
431 431 all_files,
432 432 rev,
433 433 tombstone,
434 434 ):
435 435 """rewrite and append a censored revision"""
436 436 (
437 437 old_data_file,
438 438 old_sidedata_file,
439 439 new_index_file,
440 440 new_data_file,
441 441 new_sidedata_file,
442 442 ) = all_files
443 443 entry = old_index[rev]
444 444
445 445 # XXX consider trying the default compression too
446 446 new_data_size = len(tombstone)
447 447 new_data_offset = new_data_file.tell()
448 448 new_data_file.write(tombstone)
449 449
450 450 # we are not adding any sidedata as they might leak info about the censored version
451 451
452 452 link_rev = entry[ENTRY_LINK_REV]
453 453
454 454 p1 = entry[ENTRY_PARENT_1]
455 455 p2 = entry[ENTRY_PARENT_2]
456 456
457 457 new_entry = revlogutils.entry(
458 458 flags=constants.REVIDX_ISCENSORED,
459 459 data_offset=new_data_offset,
460 460 data_compressed_length=new_data_size,
461 461 data_uncompressed_length=new_data_size,
462 462 data_delta_base=rev,
463 463 link_rev=link_rev,
464 464 parent_rev_1=p1,
465 465 parent_rev_2=p2,
466 466 node_id=entry[ENTRY_NODE_ID],
467 467 sidedata_offset=0,
468 468 sidedata_compressed_length=0,
469 469 data_compression_mode=COMP_MODE_PLAIN,
470 470 sidedata_compression_mode=COMP_MODE_PLAIN,
471 471 )
472 472 revlog.index.append(new_entry)
473 473 entry_bin = revlog.index.entry_binary(rev)
474 474 new_index_file.write(entry_bin)
475 475 revlog._docket.index_end = new_index_file.tell()
476 476 revlog._docket.data_end = new_data_file.tell()
477 477
478 478
479 479 def _get_filename_from_filelog_index(path):
480 480 # Drop the extension and the `data/` prefix
481 481 path_part = path.rsplit(b'.', 1)[0].split(b'/', 1)
482 482 if len(path_part) < 2:
483 483 msg = _(b"cannot recognize filelog from filename: '%s'")
484 484 msg %= path
485 485 raise error.Abort(msg)
486 486
487 487 return path_part[1]
488 488
489 489
490 490 def _filelog_from_filename(repo, path):
491 491 """Returns the filelog for the given `path`. Stolen from `engine.py`"""
492 492
493 493 from .. import filelog # avoid cycle
494 494
495 495 fl = filelog.filelog(repo.svfs, path)
496 496 return fl
497 497
498 498
499 499 def _write_swapped_parents(repo, rl, rev, offset, fp):
500 500 """Swaps p1 and p2 and overwrites the revlog entry for `rev` in `fp`"""
501 501 from ..pure import parsers # avoid cycle
502 502
503 503 if repo._currentlock(repo._lockref) is None:
504 504 # Let's be paranoid about it
505 505 msg = "repo needs to be locked to rewrite parents"
506 506 raise error.ProgrammingError(msg)
507 507
508 508 index_format = parsers.IndexObject.index_format
509 509 entry = rl.index[rev]
510 510 new_entry = list(entry)
511 511 new_entry[5], new_entry[6] = entry[6], entry[5]
512 512 packed = index_format.pack(*new_entry[:8])
513 513 fp.seek(offset)
514 514 fp.write(packed)
515 515
516 516
517 517 def _reorder_filelog_parents(repo, fl, to_fix):
518 518 """
519 519 Swaps p1 and p2 for all `to_fix` revisions of filelog `fl` and writes the
520 520 new version to disk, overwriting the old one with a rename.
521 521 """
522 522 from ..pure import parsers # avoid cycle
523 523
524 524 ui = repo.ui
525 525 assert len(to_fix) > 0
526 526 rl = fl._revlog
527 527 if rl._format_version != constants.REVLOGV1:
528 528 msg = "expected version 1 revlog, got version '%d'" % rl._format_version
529 529 raise error.ProgrammingError(msg)
530 530
531 531 index_file = rl._indexfile
532 532 new_file_path = index_file + b'.tmp-parents-fix'
533 533 repaired_msg = _(b"repaired revision %d of 'filelog %s'\n")
534 534
535 535 with ui.uninterruptible():
536 536 try:
537 537 util.copyfile(
538 538 rl.opener.join(index_file),
539 539 rl.opener.join(new_file_path),
540 540 checkambig=rl._checkambig,
541 541 )
542 542
543 543 with rl.opener(new_file_path, mode=b"r+") as fp:
544 544 if rl._inline:
545 545 index = parsers.InlinedIndexObject(fp.read())
546 546 for rev in fl.revs():
547 547 if rev in to_fix:
548 548 offset = index._calculate_index(rev)
549 549 _write_swapped_parents(repo, rl, rev, offset, fp)
550 550 ui.write(repaired_msg % (rev, index_file))
551 551 else:
552 552 index_format = parsers.IndexObject.index_format
553 553 for rev in to_fix:
554 554 offset = rev * index_format.size
555 555 _write_swapped_parents(repo, rl, rev, offset, fp)
556 556 ui.write(repaired_msg % (rev, index_file))
557 557
558 558 rl.opener.rename(new_file_path, index_file)
559 559 rl.clearcaches()
560 560 rl._loadindex()
561 561 finally:
562 562 util.tryunlink(new_file_path)
563 563
564 564
565 565 def _is_revision_affected(fl, filerev, metadata_cache=None):
566 566 """Mercurial currently (5.9rc0) uses `p1 == nullrev and p2 != nullrev` as a
567 567 special meaning compared to the reverse in the context of filelog-based
568 568 copytracing. issue6528 exists because new code assumed that parent ordering
569 569 didn't matter, so this detects if the revision contains metadata (since
570 570 it's only used for filelog-based copytracing) and its parents are in the
571 571 "wrong" order."""
572 572 try:
573 573 raw_text = fl.rawdata(filerev)
574 574 except error.CensoredNodeError:
575 575 # We don't care about censored nodes as they never carry metadata
576 576 return False
577 577 has_meta = raw_text.startswith(b'\x01\n')
578 578 if metadata_cache is not None:
579 579 metadata_cache[filerev] = has_meta
580 580 if has_meta:
581 581 (p1, p2) = fl.parentrevs(filerev)
582 582 if p1 != nullrev and p2 == nullrev:
583 583 return True
584 584 return False
585 585
586 586
587 587 def _is_revision_affected_fast(repo, fl, filerev, metadata_cache):
588 588 """Optimization fast-path for `_is_revision_affected`.
589 589
590 590 `metadata_cache` is a dict of `{rev: has_metadata}` which allows any
591 591 revision to check if its base has metadata, saving computation of the full
592 592 text, instead looking at the current delta.
593 593
594 594 This optimization only works if the revisions are looked at in order."""
595 595 rl = fl._revlog
596 596
597 597 if rl.iscensored(filerev):
598 598 # Censored revisions don't contain metadata, so they cannot be affected
599 599 metadata_cache[filerev] = False
600 600 return False
601 601
602 602 p1, p2 = rl.parentrevs(filerev)
603 603 if p1 == nullrev or p2 != nullrev:
604 604 return False
605 605
606 606 delta_parent = rl.deltaparent(filerev)
607 607 parent_has_metadata = metadata_cache.get(delta_parent)
608 608 if parent_has_metadata is None:
609 609 is_affected = _is_revision_affected(fl, filerev, metadata_cache)
610 610 return is_affected
611 611
612 612 chunk = rl._chunk(filerev)
613 613 if not len(chunk):
614 614 # No diff for this revision
615 615 return parent_has_metadata
616 616
617 617 header_length = 12
618 618 if len(chunk) < header_length:
619 619 raise error.Abort(_(b"patch cannot be decoded"))
620 620
621 621 start, _end, _length = struct.unpack(b">lll", chunk[:header_length])
622 622
623 623 if start < 2: # len(b'\x01\n') == 2
624 624 # This delta does *something* to the metadata marker (if any).
625 625 # Check it the slow way
626 626 is_affected = _is_revision_affected(fl, filerev, metadata_cache)
627 627 return is_affected
628 628
629 629 # The diff did not remove or add the metadata header, it's then in the same
630 630 # situation as its parent
631 631 metadata_cache[filerev] = parent_has_metadata
632 632 return parent_has_metadata
633 633
634 634
635 635 def _from_report(ui, repo, context, from_report, dry_run):
636 636 """
637 637 Fix the revisions given in the `from_report` file, but still checks if the
638 638 revisions are indeed affected to prevent an unfortunate cyclic situation
639 639 where we'd swap well-ordered parents again.
640 640
641 641 See the doc for `debug_fix_issue6528` for the format documentation.
642 642 """
643 643 ui.write(_(b"loading report file '%s'\n") % from_report)
644 644
645 645 with context(), open(from_report, mode='rb') as f:
646 646 for line in f.read().split(b'\n'):
647 647 if not line:
648 648 continue
649 649 filenodes, filename = line.split(b' ', 1)
650 650 fl = _filelog_from_filename(repo, filename)
651 651 to_fix = set(
652 652 fl.rev(binascii.unhexlify(n)) for n in filenodes.split(b',')
653 653 )
654 654 excluded = set()
655 655
656 656 for filerev in to_fix:
657 657 if _is_revision_affected(fl, filerev):
658 658 msg = b"found affected revision %d for filelog '%s'\n"
659 659 ui.warn(msg % (filerev, filename))
660 660 else:
661 661 msg = _(b"revision %s of file '%s' is not affected\n")
662 662 msg %= (binascii.hexlify(fl.node(filerev)), filename)
663 663 ui.warn(msg)
664 664 excluded.add(filerev)
665 665
666 666 to_fix = to_fix - excluded
667 667 if not to_fix:
668 668 msg = _(b"no affected revisions were found for '%s'\n")
669 669 ui.write(msg % filename)
670 670 continue
671 671 if not dry_run:
672 672 _reorder_filelog_parents(repo, fl, sorted(to_fix))
673 673
674 674
675 def repair_issue6528(ui, repo, dry_run=False, to_report=None, from_report=None):
675 def repair_issue6528(
676 ui, repo, dry_run=False, to_report=None, from_report=None, paranoid=False
677 ):
676 678 from .. import store # avoid cycle
677 679
678 680 @contextlib.contextmanager
679 681 def context():
680 682 if dry_run or to_report: # No need for locking
681 683 yield
682 684 else:
683 685 with repo.wlock(), repo.lock():
684 686 yield
685 687
686 688 if from_report:
687 689 return _from_report(ui, repo, context, from_report, dry_run)
688 690
689 691 report_entries = []
690 692
691 693 with context():
692 694 files = list(
693 695 (file_type, path)
694 696 for (file_type, path, _e, _s) in repo.store.datafiles()
695 697 if path.endswith(b'.i') and file_type & store.FILEFLAGS_FILELOG
696 698 )
697 699
698 700 progress = ui.makeprogress(
699 701 _(b"looking for affected revisions"),
700 702 unit=_(b"filelogs"),
701 703 total=len(files),
702 704 )
703 705 found_nothing = True
704 706
705 707 for file_type, path in files:
706 708 if (
707 709 not path.endswith(b'.i')
708 710 or not file_type & store.FILEFLAGS_FILELOG
709 711 ):
710 712 continue
711 713 progress.increment()
712 714 filename = _get_filename_from_filelog_index(path)
713 715 fl = _filelog_from_filename(repo, filename)
714 716
715 717 # Set of filerevs (or hex filenodes if `to_report`) that need fixing
716 718 to_fix = set()
717 719 metadata_cache = {}
718 720 for filerev in fl.revs():
719 721 affected = _is_revision_affected_fast(
720 722 repo, fl, filerev, metadata_cache
721 723 )
724 if paranoid:
725 slow = _is_revision_affected(fl, filerev)
726 if slow != affected:
727 msg = _(b"paranoid check failed for '%s' at node %s")
728 node = binascii.hexlify(fl.node(filerev))
729 raise error.Abort(msg % (filename, node))
722 730 if affected:
723 731 msg = b"found affected revision %d for filelog '%s'\n"
724 732 ui.warn(msg % (filerev, path))
725 733 found_nothing = False
726 734 if not dry_run:
727 735 if to_report:
728 736 to_fix.add(binascii.hexlify(fl.node(filerev)))
729 737 else:
730 738 to_fix.add(filerev)
731 739
732 740 if to_fix:
733 741 to_fix = sorted(to_fix)
734 742 if to_report:
735 743 report_entries.append((filename, to_fix))
736 744 else:
737 745 _reorder_filelog_parents(repo, fl, to_fix)
738 746
739 747 if found_nothing:
740 748 ui.write(_(b"no affected revisions were found\n"))
741 749
742 750 if to_report and report_entries:
743 751 with open(to_report, mode="wb") as f:
744 752 for path, to_fix in report_entries:
745 753 f.write(b"%s %s\n" % (b",".join(to_fix), path))
746 754
747 755 progress.complete()
@@ -1,447 +1,447
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 abort
4 4 add
5 5 addremove
6 6 annotate
7 7 archive
8 8 backout
9 9 bisect
10 10 bookmarks
11 11 branch
12 12 branches
13 13 bundle
14 14 cat
15 15 clone
16 16 commit
17 17 config
18 18 continue
19 19 copy
20 20 diff
21 21 export
22 22 files
23 23 forget
24 24 graft
25 25 grep
26 26 heads
27 27 help
28 28 identify
29 29 import
30 30 incoming
31 31 init
32 32 locate
33 33 log
34 34 manifest
35 35 merge
36 36 outgoing
37 37 parents
38 38 paths
39 39 phase
40 40 pull
41 41 purge
42 42 push
43 43 recover
44 44 remove
45 45 rename
46 46 resolve
47 47 revert
48 48 rollback
49 49 root
50 50 serve
51 51 shelve
52 52 status
53 53 summary
54 54 tag
55 55 tags
56 56 tip
57 57 unbundle
58 58 unshelve
59 59 update
60 60 verify
61 61 version
62 62
63 63 Show all commands that start with "a"
64 64 $ hg debugcomplete a
65 65 abort
66 66 add
67 67 addremove
68 68 annotate
69 69 archive
70 70
71 71 Do not show debug commands if there are other candidates
72 72 $ hg debugcomplete d
73 73 diff
74 74
75 75 Show debug commands if there are no other candidates
76 76 $ hg debugcomplete debug
77 77 debug-repair-issue6528
78 78 debugancestor
79 79 debugantivirusrunning
80 80 debugapplystreamclonebundle
81 81 debugbackupbundle
82 82 debugbuilddag
83 83 debugbundle
84 84 debugcapabilities
85 85 debugchangedfiles
86 86 debugcheckstate
87 87 debugcolor
88 88 debugcommands
89 89 debugcomplete
90 90 debugconfig
91 91 debugcreatestreamclonebundle
92 92 debugdag
93 93 debugdata
94 94 debugdate
95 95 debugdeltachain
96 96 debugdirstate
97 97 debugdirstateignorepatternshash
98 98 debugdiscovery
99 99 debugdownload
100 100 debugextensions
101 101 debugfileset
102 102 debugformat
103 103 debugfsinfo
104 104 debuggetbundle
105 105 debugignore
106 106 debugindex
107 107 debugindexdot
108 108 debugindexstats
109 109 debuginstall
110 110 debugknown
111 111 debuglabelcomplete
112 112 debuglocks
113 113 debugmanifestfulltextcache
114 114 debugmergestate
115 115 debugnamecomplete
116 116 debugnodemap
117 117 debugobsolete
118 118 debugp1copies
119 119 debugp2copies
120 120 debugpathcomplete
121 121 debugpathcopies
122 122 debugpeer
123 123 debugpickmergetool
124 124 debugpushkey
125 125 debugpvec
126 126 debugrebuilddirstate
127 127 debugrebuildfncache
128 128 debugrename
129 129 debugrequires
130 130 debugrevlog
131 131 debugrevlogindex
132 132 debugrevspec
133 133 debugserve
134 134 debugsetparents
135 135 debugshell
136 136 debugsidedata
137 137 debugssl
138 138 debugstrip
139 139 debugsub
140 140 debugsuccessorssets
141 141 debugtagscache
142 142 debugtemplate
143 143 debuguigetpass
144 144 debuguiprompt
145 145 debugupdatecaches
146 146 debugupgraderepo
147 147 debugwalk
148 148 debugwhyunstable
149 149 debugwireargs
150 150 debugwireproto
151 151
152 152 Do not show the alias of a debug command if there are other candidates
153 153 (this should hide rawcommit)
154 154 $ hg debugcomplete r
155 155 recover
156 156 remove
157 157 rename
158 158 resolve
159 159 revert
160 160 rollback
161 161 root
162 162 Show the alias of a debug command if there are no other candidates
163 163 $ hg debugcomplete rawc
164 164
165 165
166 166 Show the global options
167 167 $ hg debugcomplete --options | sort
168 168 --color
169 169 --config
170 170 --cwd
171 171 --debug
172 172 --debugger
173 173 --encoding
174 174 --encodingmode
175 175 --help
176 176 --hidden
177 177 --noninteractive
178 178 --pager
179 179 --profile
180 180 --quiet
181 181 --repository
182 182 --time
183 183 --traceback
184 184 --verbose
185 185 --version
186 186 -R
187 187 -h
188 188 -q
189 189 -v
190 190 -y
191 191
192 192 Show the options for the "serve" command
193 193 $ hg debugcomplete --options serve | sort
194 194 --accesslog
195 195 --address
196 196 --certificate
197 197 --cmdserver
198 198 --color
199 199 --config
200 200 --cwd
201 201 --daemon
202 202 --daemon-postexec
203 203 --debug
204 204 --debugger
205 205 --encoding
206 206 --encodingmode
207 207 --errorlog
208 208 --help
209 209 --hidden
210 210 --ipv6
211 211 --name
212 212 --noninteractive
213 213 --pager
214 214 --pid-file
215 215 --port
216 216 --prefix
217 217 --print-url
218 218 --profile
219 219 --quiet
220 220 --repository
221 221 --stdio
222 222 --style
223 223 --subrepos
224 224 --templates
225 225 --time
226 226 --traceback
227 227 --verbose
228 228 --version
229 229 --web-conf
230 230 -6
231 231 -A
232 232 -E
233 233 -R
234 234 -S
235 235 -a
236 236 -d
237 237 -h
238 238 -n
239 239 -p
240 240 -q
241 241 -t
242 242 -v
243 243 -y
244 244
245 245 Show an error if we use --options with an ambiguous abbreviation
246 246 $ hg debugcomplete --options s
247 247 hg: command 's' is ambiguous:
248 248 serve shelve showconfig status summary
249 249 [10]
250 250
251 251 Show all commands + options
252 252 $ hg debugcommands
253 253 abort: dry-run
254 254 add: include, exclude, subrepos, dry-run
255 255 addremove: similarity, subrepos, include, exclude, dry-run
256 256 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
257 257 archive: no-decode, prefix, rev, type, subrepos, include, exclude
258 258 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
259 259 bisect: reset, good, bad, skip, extend, command, noupdate
260 260 bookmarks: force, rev, delete, rename, inactive, list, template
261 261 branch: force, clean, rev
262 262 branches: active, closed, rev, template
263 263 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
264 264 cat: output, rev, decode, include, exclude, template
265 265 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
266 266 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
267 267 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
268 268 continue: dry-run
269 269 copy: forget, after, at-rev, force, include, exclude, dry-run
270 debug-repair-issue6528: to-report, from-report, dry-run
270 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
271 271 debugancestor:
272 272 debugantivirusrunning:
273 273 debugapplystreamclonebundle:
274 274 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
275 275 debugbuilddag: mergeable-file, overwritten-file, new-file
276 276 debugbundle: all, part-type, spec
277 277 debugcapabilities:
278 278 debugchangedfiles: compute
279 279 debugcheckstate:
280 280 debugcolor: style
281 281 debugcommands:
282 282 debugcomplete: options
283 283 debugcreatestreamclonebundle:
284 284 debugdag: tags, branches, dots, spaces
285 285 debugdata: changelog, manifest, dir
286 286 debugdate: extended
287 287 debugdeltachain: changelog, manifest, dir, template
288 288 debugdirstateignorepatternshash:
289 289 debugdirstate: nodates, dates, datesort, all
290 290 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
291 291 debugdownload: output
292 292 debugextensions: template
293 293 debugfileset: rev, all-files, show-matcher, show-stage
294 294 debugformat: template
295 295 debugfsinfo:
296 296 debuggetbundle: head, common, type
297 297 debugignore:
298 298 debugindex: changelog, manifest, dir, template
299 299 debugindexdot: changelog, manifest, dir
300 300 debugindexstats:
301 301 debuginstall: template
302 302 debugknown:
303 303 debuglabelcomplete:
304 304 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
305 305 debugmanifestfulltextcache: clear, add
306 306 debugmergestate: style, template
307 307 debugnamecomplete:
308 308 debugnodemap: dump-new, dump-disk, check, metadata
309 309 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
310 310 debugp1copies: rev
311 311 debugp2copies: rev
312 312 debugpathcomplete: full, normal, added, removed
313 313 debugpathcopies: include, exclude
314 314 debugpeer:
315 315 debugpickmergetool: rev, changedelete, include, exclude, tool
316 316 debugpushkey:
317 317 debugpvec:
318 318 debugrebuilddirstate: rev, minimal
319 319 debugrebuildfncache:
320 320 debugrename: rev
321 321 debugrequires:
322 322 debugrevlog: changelog, manifest, dir, dump
323 323 debugrevlogindex: changelog, manifest, dir, format
324 324 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
325 325 debugserve: sshstdio, logiofd, logiofile
326 326 debugsetparents:
327 327 debugshell:
328 328 debugsidedata: changelog, manifest, dir
329 329 debugssl:
330 330 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
331 331 debugsub: rev
332 332 debugsuccessorssets: closest
333 333 debugtagscache:
334 334 debugtemplate: rev, define
335 335 debuguigetpass: prompt
336 336 debuguiprompt: prompt
337 337 debugupdatecaches:
338 338 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
339 339 debugwalk: include, exclude
340 340 debugwhyunstable:
341 341 debugwireargs: three, four, five, ssh, remotecmd, insecure
342 342 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
343 343 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
344 344 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
345 345 files: rev, print0, include, exclude, template, subrepos
346 346 forget: interactive, include, exclude, dry-run
347 347 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
348 348 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
349 349 heads: rev, topo, active, closed, style, template
350 350 help: extension, command, keyword, system
351 351 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
352 352 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
353 353 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
354 354 init: ssh, remotecmd, insecure
355 355 locate: rev, print0, fullpath, include, exclude
356 356 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
357 357 manifest: rev, all, template
358 358 merge: force, rev, preview, abort, tool
359 359 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
360 360 parents: rev, style, template
361 361 paths: template
362 362 phase: public, draft, secret, force, rev
363 363 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
364 364 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
365 365 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
366 366 recover: verify
367 367 remove: after, force, subrepos, include, exclude, dry-run
368 368 rename: forget, after, at-rev, force, include, exclude, dry-run
369 369 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
370 370 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
371 371 rollback: dry-run, force
372 372 root: template
373 373 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
374 374 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
375 375 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
376 376 summary: remote
377 377 tag: force, local, rev, remove, edit, message, date, user
378 378 tags: template
379 379 tip: patch, git, style, template
380 380 unbundle: update
381 381 unshelve: abort, continue, interactive, keep, name, tool, date
382 382 update: clean, check, merge, date, rev, tool
383 383 verify: full
384 384 version: template
385 385
386 386 $ hg init a
387 387 $ cd a
388 388 $ echo fee > fee
389 389 $ hg ci -q -Amfee
390 390 $ hg tag fee
391 391 $ mkdir fie
392 392 $ echo dead > fie/dead
393 393 $ echo live > fie/live
394 394 $ hg bookmark fo
395 395 $ hg branch -q fie
396 396 $ hg ci -q -Amfie
397 397 $ echo fo > fo
398 398 $ hg branch -qf default
399 399 $ hg ci -q -Amfo
400 400 $ echo Fum > Fum
401 401 $ hg ci -q -AmFum
402 402 $ hg bookmark Fum
403 403
404 404 Test debugpathcomplete
405 405
406 406 $ hg debugpathcomplete f
407 407 fee
408 408 fie
409 409 fo
410 410 $ hg debugpathcomplete -f f
411 411 fee
412 412 fie/dead
413 413 fie/live
414 414 fo
415 415
416 416 $ hg rm Fum
417 417 $ hg debugpathcomplete -r F
418 418 Fum
419 419
420 420 Test debugnamecomplete
421 421
422 422 $ hg debugnamecomplete
423 423 Fum
424 424 default
425 425 fee
426 426 fie
427 427 fo
428 428 tip
429 429 $ hg debugnamecomplete f
430 430 fee
431 431 fie
432 432 fo
433 433
434 434 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
435 435 used for completions in some shells.
436 436
437 437 $ hg debuglabelcomplete
438 438 Fum
439 439 default
440 440 fee
441 441 fie
442 442 fo
443 443 tip
444 444 $ hg debuglabelcomplete f
445 445 fee
446 446 fie
447 447 fo
@@ -1,414 +1,433
1 1 ===============================================================
2 2 Test non-regression on the corruption associated with issue6528
3 3 ===============================================================
4 4
5 5 Setup
6 6 =====
7 7
8 8 $ hg init base-repo
9 9 $ cd base-repo
10 10
11 11 $ cat <<EOF > a.txt
12 12 > 1
13 13 > 2
14 14 > 3
15 15 > 4
16 16 > 5
17 17 > 6
18 18 > EOF
19 19
20 20 $ hg add a.txt
21 21 $ hg commit -m 'c_base_c - create a.txt'
22 22
23 23 Modify a.txt
24 24
25 25 $ sed -e 's/1/foo/' a.txt > a.tmp; mv a.tmp a.txt
26 26 $ hg commit -m 'c_modify_c - modify a.txt'
27 27
28 28 Modify and rename a.txt to b.txt
29 29
30 30 $ hg up -r "desc('c_base_c')"
31 31 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
32 32 $ sed -e 's/6/bar/' a.txt > a.tmp; mv a.tmp a.txt
33 33 $ hg mv a.txt b.txt
34 34 $ hg commit -m 'c_rename_c - rename and modify a.txt to b.txt'
35 35 created new head
36 36
37 37 Merge each branch
38 38
39 39 $ hg merge -r "desc('c_modify_c')"
40 40 merging b.txt and a.txt to b.txt
41 41 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
42 42 (branch merge, don't forget to commit)
43 43 $ hg commit -m 'c_merge_c: commit merge'
44 44
45 45 $ hg debugrevlogindex b.txt
46 46 rev linkrev nodeid p1 p2
47 47 0 2 05b806ebe5ea 000000000000 000000000000
48 48 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
49 49
50 50 Check commit Graph
51 51
52 52 $ hg log -G
53 53 @ changeset: 3:a1cc2bdca0aa
54 54 |\ tag: tip
55 55 | | parent: 2:615c6ccefd15
56 56 | | parent: 1:373d507f4667
57 57 | | user: test
58 58 | | date: Thu Jan 01 00:00:00 1970 +0000
59 59 | | summary: c_merge_c: commit merge
60 60 | |
61 61 | o changeset: 2:615c6ccefd15
62 62 | | parent: 0:f5a5a568022f
63 63 | | user: test
64 64 | | date: Thu Jan 01 00:00:00 1970 +0000
65 65 | | summary: c_rename_c - rename and modify a.txt to b.txt
66 66 | |
67 67 o | changeset: 1:373d507f4667
68 68 |/ user: test
69 69 | date: Thu Jan 01 00:00:00 1970 +0000
70 70 | summary: c_modify_c - modify a.txt
71 71 |
72 72 o changeset: 0:f5a5a568022f
73 73 user: test
74 74 date: Thu Jan 01 00:00:00 1970 +0000
75 75 summary: c_base_c - create a.txt
76 76
77 77
78 78 $ hg cat -r . b.txt
79 79 foo
80 80 2
81 81 3
82 82 4
83 83 5
84 84 bar
85 85 $ cat b.txt
86 86 foo
87 87 2
88 88 3
89 89 4
90 90 5
91 91 bar
92 92 $ cd ..
93 93
94 94
95 95 Check the lack of corruption
96 96 ============================
97 97
98 98 $ hg clone --pull base-repo cloned
99 99 requesting all changes
100 100 adding changesets
101 101 adding manifests
102 102 adding file changes
103 103 added 4 changesets with 4 changes to 2 files
104 104 new changesets f5a5a568022f:a1cc2bdca0aa
105 105 updating to branch default
106 106 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
107 107 $ cd cloned
108 108 $ hg up -r "desc('c_merge_c')"
109 109 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
110 110
111 111
112 112 Status is buggy, even with debugrebuilddirstate
113 113
114 114 $ hg cat -r . b.txt
115 115 foo
116 116 2
117 117 3
118 118 4
119 119 5
120 120 bar
121 121 $ cat b.txt
122 122 foo
123 123 2
124 124 3
125 125 4
126 126 5
127 127 bar
128 128 $ hg status
129 129 $ hg debugrebuilddirstate
130 130 $ hg status
131 131
132 132 the history was altered
133 133
134 134 in theory p1/p2 order does not matter but in practice p1 == nullid is used as a
135 135 marker that some metadata are present and should be fetched.
136 136
137 137 $ hg debugrevlogindex b.txt
138 138 rev linkrev nodeid p1 p2
139 139 0 2 05b806ebe5ea 000000000000 000000000000
140 140 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
141 141
142 142 Check commit Graph
143 143
144 144 $ hg log -G
145 145 @ changeset: 3:a1cc2bdca0aa
146 146 |\ tag: tip
147 147 | | parent: 2:615c6ccefd15
148 148 | | parent: 1:373d507f4667
149 149 | | user: test
150 150 | | date: Thu Jan 01 00:00:00 1970 +0000
151 151 | | summary: c_merge_c: commit merge
152 152 | |
153 153 | o changeset: 2:615c6ccefd15
154 154 | | parent: 0:f5a5a568022f
155 155 | | user: test
156 156 | | date: Thu Jan 01 00:00:00 1970 +0000
157 157 | | summary: c_rename_c - rename and modify a.txt to b.txt
158 158 | |
159 159 o | changeset: 1:373d507f4667
160 160 |/ user: test
161 161 | date: Thu Jan 01 00:00:00 1970 +0000
162 162 | summary: c_modify_c - modify a.txt
163 163 |
164 164 o changeset: 0:f5a5a568022f
165 165 user: test
166 166 date: Thu Jan 01 00:00:00 1970 +0000
167 167 summary: c_base_c - create a.txt
168 168
169 169
170 170 Test the command that fixes the issue
171 171 =====================================
172 172
173 173 Restore a broken repository with multiple broken revisions and a filename that
174 174 would get encoded to test the `report` options.
175 175 It's a tarball because unbundle might magically fix the issue later.
176 176
177 177 $ cd ..
178 178 $ mkdir repo-to-fix
179 179 $ cd repo-to-fix
180 180 #if windows
181 181 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
182 182 only since some versions of tar don't have this flag.
183 183
184 184 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
185 185 #else
186 186 $ tar xf $TESTDIR/bundles/issue6528.tar
187 187 #endif
188 188
189 189 Check that the issue is present
190 190 $ hg st
191 191 M D.txt
192 192 M b.txt
193 193 $ hg debugrevlogindex b.txt
194 194 rev linkrev nodeid p1 p2
195 195 0 2 05b806ebe5ea 000000000000 000000000000
196 196 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
197 197 2 6 216a5fe8b8ed 000000000000 000000000000
198 198 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
199 199 $ hg debugrevlogindex D.txt
200 200 rev linkrev nodeid p1 p2
201 201 0 6 2a8d3833f2fb 000000000000 000000000000
202 202 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
203 203
204 204 Dry-run the fix
205 205 $ hg debug-repair-issue6528 --dry-run
206 206 found affected revision 1 for filelog 'data/D.txt.i'
207 207 found affected revision 1 for filelog 'data/b.txt.i'
208 208 found affected revision 3 for filelog 'data/b.txt.i'
209 209 $ hg st
210 210 M D.txt
211 211 M b.txt
212 212 $ hg debugrevlogindex b.txt
213 213 rev linkrev nodeid p1 p2
214 214 0 2 05b806ebe5ea 000000000000 000000000000
215 215 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
216 216 2 6 216a5fe8b8ed 000000000000 000000000000
217 217 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
218 218 $ hg debugrevlogindex D.txt
219 219 rev linkrev nodeid p1 p2
220 220 0 6 2a8d3833f2fb 000000000000 000000000000
221 221 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
222 222
223 Test the --paranoid option
224 $ hg debug-repair-issue6528 --dry-run --paranoid
225 found affected revision 1 for filelog 'data/D.txt.i'
226 found affected revision 1 for filelog 'data/b.txt.i'
227 found affected revision 3 for filelog 'data/b.txt.i'
228 $ hg st
229 M D.txt
230 M b.txt
231 $ hg debugrevlogindex b.txt
232 rev linkrev nodeid p1 p2
233 0 2 05b806ebe5ea 000000000000 000000000000
234 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
235 2 6 216a5fe8b8ed 000000000000 000000000000
236 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
237 $ hg debugrevlogindex D.txt
238 rev linkrev nodeid p1 p2
239 0 6 2a8d3833f2fb 000000000000 000000000000
240 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
241
223 242 Run the fix
224 243 $ hg debug-repair-issue6528
225 244 found affected revision 1 for filelog 'data/D.txt.i'
226 245 repaired revision 1 of 'filelog data/D.txt.i'
227 246 found affected revision 1 for filelog 'data/b.txt.i'
228 247 found affected revision 3 for filelog 'data/b.txt.i'
229 248 repaired revision 1 of 'filelog data/b.txt.i'
230 249 repaired revision 3 of 'filelog data/b.txt.i'
231 250
232 251 Check that the fix worked and that running it twice does nothing
233 252 $ hg st
234 253 $ hg debugrevlogindex b.txt
235 254 rev linkrev nodeid p1 p2
236 255 0 2 05b806ebe5ea 000000000000 000000000000
237 256 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
238 257 2 6 216a5fe8b8ed 000000000000 000000000000
239 258 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
240 259 $ hg debugrevlogindex D.txt
241 260 rev linkrev nodeid p1 p2
242 261 0 6 2a8d3833f2fb 000000000000 000000000000
243 262 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
244 263 $ hg debug-repair-issue6528
245 264 no affected revisions were found
246 265 $ hg st
247 266 $ hg debugrevlogindex b.txt
248 267 rev linkrev nodeid p1 p2
249 268 0 2 05b806ebe5ea 000000000000 000000000000
250 269 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
251 270 2 6 216a5fe8b8ed 000000000000 000000000000
252 271 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
253 272 $ hg debugrevlogindex D.txt
254 273 rev linkrev nodeid p1 p2
255 274 0 6 2a8d3833f2fb 000000000000 000000000000
256 275 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
257 276
258 277 Try the using the report options
259 278 --------------------------------
260 279
261 280 $ cd ..
262 281 $ mkdir repo-to-fix-report
263 282 $ cd repo-to-fix
264 283 #if windows
265 284 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
266 285 only since some versions of tar don't have this flag.
267 286
268 287 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
269 288 #else
270 289 $ tar xf $TESTDIR/bundles/issue6528.tar
271 290 #endif
272 291
273 292 $ hg debug-repair-issue6528 --to-report $TESTTMP/report.txt
274 293 found affected revision 1 for filelog 'data/D.txt.i'
275 294 found affected revision 1 for filelog 'data/b.txt.i'
276 295 found affected revision 3 for filelog 'data/b.txt.i'
277 296 $ cat $TESTTMP/report.txt
278 297 2a80419dfc31d7dfb308ac40f3f138282de7d73b D.txt
279 298 a58b36ad6b6545195952793099613c2116f3563b,ea4f2f2463cca5b29ddf3461012b8ce5c6dac175 b.txt
280 299
281 300 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt --dry-run
282 301 loading report file '$TESTTMP/report.txt'
283 302 found affected revision 1 for filelog 'D.txt'
284 303 found affected revision 1 for filelog 'b.txt'
285 304 found affected revision 3 for filelog 'b.txt'
286 305 $ hg st
287 306 M D.txt
288 307 M b.txt
289 308 $ hg debugrevlogindex b.txt
290 309 rev linkrev nodeid p1 p2
291 310 0 2 05b806ebe5ea 000000000000 000000000000
292 311 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
293 312 2 6 216a5fe8b8ed 000000000000 000000000000
294 313 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
295 314 $ hg debugrevlogindex D.txt
296 315 rev linkrev nodeid p1 p2
297 316 0 6 2a8d3833f2fb 000000000000 000000000000
298 317 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
299 318
300 319 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt
301 320 loading report file '$TESTTMP/report.txt'
302 321 found affected revision 1 for filelog 'D.txt'
303 322 repaired revision 1 of 'filelog data/D.txt.i'
304 323 found affected revision 1 for filelog 'b.txt'
305 324 found affected revision 3 for filelog 'b.txt'
306 325 repaired revision 1 of 'filelog data/b.txt.i'
307 326 repaired revision 3 of 'filelog data/b.txt.i'
308 327 $ hg st
309 328 $ hg debugrevlogindex b.txt
310 329 rev linkrev nodeid p1 p2
311 330 0 2 05b806ebe5ea 000000000000 000000000000
312 331 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
313 332 2 6 216a5fe8b8ed 000000000000 000000000000
314 333 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
315 334 $ hg debugrevlogindex D.txt
316 335 rev linkrev nodeid p1 p2
317 336 0 6 2a8d3833f2fb 000000000000 000000000000
318 337 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
319 338
320 339 Check that the revision is not "fixed" again
321 340
322 341 $ hg debug-repair-issue6528 --from-report $TESTTMP/report.txt
323 342 loading report file '$TESTTMP/report.txt'
324 343 revision 2a80419dfc31d7dfb308ac40f3f138282de7d73b of file 'D.txt' is not affected
325 344 no affected revisions were found for 'D.txt'
326 345 revision a58b36ad6b6545195952793099613c2116f3563b of file 'b.txt' is not affected
327 346 revision ea4f2f2463cca5b29ddf3461012b8ce5c6dac175 of file 'b.txt' is not affected
328 347 no affected revisions were found for 'b.txt'
329 348 $ hg st
330 349 $ hg debugrevlogindex b.txt
331 350 rev linkrev nodeid p1 p2
332 351 0 2 05b806ebe5ea 000000000000 000000000000
333 352 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
334 353 2 6 216a5fe8b8ed 000000000000 000000000000
335 354 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
336 355 $ hg debugrevlogindex D.txt
337 356 rev linkrev nodeid p1 p2
338 357 0 6 2a8d3833f2fb 000000000000 000000000000
339 358 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
340 359
341 360 Try it with a non-inline revlog
342 361 -------------------------------
343 362
344 363 $ cd ..
345 364 $ mkdir $TESTTMP/ext
346 365 $ cat << EOF > $TESTTMP/ext/small_inline.py
347 366 > from mercurial import revlog
348 367 > revlog._maxinline = 8
349 368 > EOF
350 369
351 370 $ cat << EOF >> $HGRCPATH
352 371 > [extensions]
353 372 > small_inline=$TESTTMP/ext/small_inline.py
354 373 > EOF
355 374
356 375 $ mkdir repo-to-fix-not-inline
357 376 $ cd repo-to-fix-not-inline
358 377 #if windows
359 378 tar interprets `:` in paths (like `C:`) as being remote, force local on Windows
360 379 only since some versions of tar don't have this flag.
361 380
362 381 $ tar --force-local -xf $TESTDIR/bundles/issue6528.tar
363 382 #else
364 383 $ tar xf $TESTDIR/bundles/issue6528.tar
365 384 #endif
366 385 $ echo b >> b.txt
367 386 $ hg commit -qm "inline -> separate"
368 387 $ find .hg -name *b.txt.d
369 388 .hg/store/data/b.txt.d
370 389
371 390 Status is correct, but the problem is still there, in the earlier revision
372 391 $ hg st
373 392 $ hg up 3
374 393 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
375 394 $ hg st
376 395 M b.txt
377 396 $ hg debugrevlogindex b.txt
378 397 rev linkrev nodeid p1 p2
379 398 0 2 05b806ebe5ea 000000000000 000000000000
380 399 1 3 a58b36ad6b65 05b806ebe5ea 000000000000
381 400 2 6 216a5fe8b8ed 000000000000 000000000000
382 401 3 7 ea4f2f2463cc 216a5fe8b8ed 000000000000
383 402 4 8 db234885e2fe ea4f2f2463cc 000000000000
384 403 $ hg debugrevlogindex D.txt
385 404 rev linkrev nodeid p1 p2
386 405 0 6 2a8d3833f2fb 000000000000 000000000000
387 406 1 7 2a80419dfc31 2a8d3833f2fb 000000000000
388 407 2 8 65aecc89bb5d 2a80419dfc31 000000000000
389 408
390 409 Run the fix on the non-inline revlog
391 410 $ hg debug-repair-issue6528
392 411 found affected revision 1 for filelog 'data/D.txt.i'
393 412 repaired revision 1 of 'filelog data/D.txt.i'
394 413 found affected revision 1 for filelog 'data/b.txt.i'
395 414 found affected revision 3 for filelog 'data/b.txt.i'
396 415 repaired revision 1 of 'filelog data/b.txt.i'
397 416 repaired revision 3 of 'filelog data/b.txt.i'
398 417
399 418 Check that it worked
400 419 $ hg debugrevlogindex b.txt
401 420 rev linkrev nodeid p1 p2
402 421 0 2 05b806ebe5ea 000000000000 000000000000
403 422 1 3 a58b36ad6b65 000000000000 05b806ebe5ea
404 423 2 6 216a5fe8b8ed 000000000000 000000000000
405 424 3 7 ea4f2f2463cc 000000000000 216a5fe8b8ed
406 425 4 8 db234885e2fe ea4f2f2463cc 000000000000
407 426 $ hg debugrevlogindex D.txt
408 427 rev linkrev nodeid p1 p2
409 428 0 6 2a8d3833f2fb 000000000000 000000000000
410 429 1 7 2a80419dfc31 000000000000 2a8d3833f2fb
411 430 2 8 65aecc89bb5d 2a80419dfc31 000000000000
412 431 $ hg debug-repair-issue6528
413 432 no affected revisions were found
414 433 $ hg st
General Comments 0
You need to be logged in to leave comments. Login now