##// END OF EJS Templates
debugrebuildfncache: add a cheaper option to rebuild the fncache...
Valentin Gatien-Baron -
r48674:8e4659b5 stable draft
parent child Browse files
Show More
@@ -1,4932 +1,4944 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import string
26 26 import subprocess
27 27 import sys
28 28 import time
29 29
30 30 from .i18n import _
31 31 from .node import (
32 32 bin,
33 33 hex,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 wireprotov2peer,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 deltas as deltautil,
108 108 nodemap,
109 109 rewrite,
110 110 sidedata,
111 111 )
112 112
113 113 release = lockmod.release
114 114
115 115 table = {}
116 116 table.update(strip.command._table)
117 117 command = registrar.command(table)
118 118
119 119
120 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 121 def debugancestor(ui, repo, *args):
122 122 """find the ancestor revision of two revisions in a given index"""
123 123 if len(args) == 3:
124 124 index, rev1, rev2 = args
125 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 126 lookup = r.lookup
127 127 elif len(args) == 2:
128 128 if not repo:
129 129 raise error.Abort(
130 130 _(b'there is no Mercurial repository here (.hg not found)')
131 131 )
132 132 rev1, rev2 = args
133 133 r = repo.changelog
134 134 lookup = repo.lookup
135 135 else:
136 136 raise error.Abort(_(b'either two or three arguments required'))
137 137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 139
140 140
141 141 @command(b'debugantivirusrunning', [])
142 142 def debugantivirusrunning(ui, repo):
143 143 """attempt to trigger an antivirus scanner to see if one is active"""
144 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 145 f.write(
146 146 util.b85decode(
147 147 # This is a base85-armored version of the EICAR test file. See
148 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 151 )
152 152 )
153 153 # Give an AV engine time to scan the file.
154 154 time.sleep(2)
155 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 156
157 157
158 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 159 def debugapplystreamclonebundle(ui, repo, fname):
160 160 """apply a stream clone bundle file"""
161 161 f = hg.openpath(ui, fname)
162 162 gen = exchange.readbundle(ui, f, fname)
163 163 gen.apply(repo)
164 164
165 165
166 166 @command(
167 167 b'debugbuilddag',
168 168 [
169 169 (
170 170 b'm',
171 171 b'mergeable-file',
172 172 None,
173 173 _(b'add single file mergeable changes'),
174 174 ),
175 175 (
176 176 b'o',
177 177 b'overwritten-file',
178 178 None,
179 179 _(b'add single file all revs overwrite'),
180 180 ),
181 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 182 ],
183 183 _(b'[OPTION]... [TEXT]'),
184 184 )
185 185 def debugbuilddag(
186 186 ui,
187 187 repo,
188 188 text=None,
189 189 mergeable_file=False,
190 190 overwritten_file=False,
191 191 new_file=False,
192 192 ):
193 193 """builds a repo with a given DAG from scratch in the current empty repo
194 194
195 195 The description of the DAG is read from stdin if not given on the
196 196 command line.
197 197
198 198 Elements:
199 199
200 200 - "+n" is a linear run of n nodes based on the current default parent
201 201 - "." is a single node based on the current default parent
202 202 - "$" resets the default parent to null (implied at the start);
203 203 otherwise the default parent is always the last node created
204 204 - "<p" sets the default parent to the backref p
205 205 - "*p" is a fork at parent p, which is a backref
206 206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
207 207 - "/p2" is a merge of the preceding node and p2
208 208 - ":tag" defines a local tag for the preceding node
209 209 - "@branch" sets the named branch for subsequent nodes
210 210 - "#...\\n" is a comment up to the end of the line
211 211
212 212 Whitespace between the above elements is ignored.
213 213
214 214 A backref is either
215 215
216 216 - a number n, which references the node curr-n, where curr is the current
217 217 node, or
218 218 - the name of a local tag you placed earlier using ":tag", or
219 219 - empty to denote the default parent.
220 220
221 221 All string valued-elements are either strictly alphanumeric, or must
222 222 be enclosed in double quotes ("..."), with "\\" as escape character.
223 223 """
224 224
225 225 if text is None:
226 226 ui.status(_(b"reading DAG from stdin\n"))
227 227 text = ui.fin.read()
228 228
229 229 cl = repo.changelog
230 230 if len(cl) > 0:
231 231 raise error.Abort(_(b'repository is not empty'))
232 232
233 233 # determine number of revs in DAG
234 234 total = 0
235 235 for type, data in dagparser.parsedag(text):
236 236 if type == b'n':
237 237 total += 1
238 238
239 239 if mergeable_file:
240 240 linesperrev = 2
241 241 # make a file with k lines per rev
242 242 initialmergedlines = [
243 243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
244 244 ]
245 245 initialmergedlines.append(b"")
246 246
247 247 tags = []
248 248 progress = ui.makeprogress(
249 249 _(b'building'), unit=_(b'revisions'), total=total
250 250 )
251 251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
252 252 at = -1
253 253 atbranch = b'default'
254 254 nodeids = []
255 255 id = 0
256 256 progress.update(id)
257 257 for type, data in dagparser.parsedag(text):
258 258 if type == b'n':
259 259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
260 260 id, ps = data
261 261
262 262 files = []
263 263 filecontent = {}
264 264
265 265 p2 = None
266 266 if mergeable_file:
267 267 fn = b"mf"
268 268 p1 = repo[ps[0]]
269 269 if len(ps) > 1:
270 270 p2 = repo[ps[1]]
271 271 pa = p1.ancestor(p2)
272 272 base, local, other = [
273 273 x[fn].data() for x in (pa, p1, p2)
274 274 ]
275 275 m3 = simplemerge.Merge3Text(base, local, other)
276 276 ml = [l.strip() for l in m3.merge_lines()]
277 277 ml.append(b"")
278 278 elif at > 0:
279 279 ml = p1[fn].data().split(b"\n")
280 280 else:
281 281 ml = initialmergedlines
282 282 ml[id * linesperrev] += b" r%i" % id
283 283 mergedtext = b"\n".join(ml)
284 284 files.append(fn)
285 285 filecontent[fn] = mergedtext
286 286
287 287 if overwritten_file:
288 288 fn = b"of"
289 289 files.append(fn)
290 290 filecontent[fn] = b"r%i\n" % id
291 291
292 292 if new_file:
293 293 fn = b"nf%i" % id
294 294 files.append(fn)
295 295 filecontent[fn] = b"r%i\n" % id
296 296 if len(ps) > 1:
297 297 if not p2:
298 298 p2 = repo[ps[1]]
299 299 for fn in p2:
300 300 if fn.startswith(b"nf"):
301 301 files.append(fn)
302 302 filecontent[fn] = p2[fn].data()
303 303
304 304 def fctxfn(repo, cx, path):
305 305 if path in filecontent:
306 306 return context.memfilectx(
307 307 repo, cx, path, filecontent[path]
308 308 )
309 309 return None
310 310
311 311 if len(ps) == 0 or ps[0] < 0:
312 312 pars = [None, None]
313 313 elif len(ps) == 1:
314 314 pars = [nodeids[ps[0]], None]
315 315 else:
316 316 pars = [nodeids[p] for p in ps]
317 317 cx = context.memctx(
318 318 repo,
319 319 pars,
320 320 b"r%i" % id,
321 321 files,
322 322 fctxfn,
323 323 date=(id, 0),
324 324 user=b"debugbuilddag",
325 325 extra={b'branch': atbranch},
326 326 )
327 327 nodeid = repo.commitctx(cx)
328 328 nodeids.append(nodeid)
329 329 at = id
330 330 elif type == b'l':
331 331 id, name = data
332 332 ui.note((b'tag %s\n' % name))
333 333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
334 334 elif type == b'a':
335 335 ui.note((b'branch %s\n' % data))
336 336 atbranch = data
337 337 progress.update(id)
338 338
339 339 if tags:
340 340 repo.vfs.write(b"localtags", b"".join(tags))
341 341
342 342
343 343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
344 344 indent_string = b' ' * indent
345 345 if all:
346 346 ui.writenoi18n(
347 347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
348 348 % indent_string
349 349 )
350 350
351 351 def showchunks(named):
352 352 ui.write(b"\n%s%s\n" % (indent_string, named))
353 353 for deltadata in gen.deltaiter():
354 354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
355 355 ui.write(
356 356 b"%s%s %s %s %s %s %d\n"
357 357 % (
358 358 indent_string,
359 359 hex(node),
360 360 hex(p1),
361 361 hex(p2),
362 362 hex(cs),
363 363 hex(deltabase),
364 364 len(delta),
365 365 )
366 366 )
367 367
368 368 gen.changelogheader()
369 369 showchunks(b"changelog")
370 370 gen.manifestheader()
371 371 showchunks(b"manifest")
372 372 for chunkdata in iter(gen.filelogheader, {}):
373 373 fname = chunkdata[b'filename']
374 374 showchunks(fname)
375 375 else:
376 376 if isinstance(gen, bundle2.unbundle20):
377 377 raise error.Abort(_(b'use debugbundle2 for this file'))
378 378 gen.changelogheader()
379 379 for deltadata in gen.deltaiter():
380 380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
381 381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
382 382
383 383
384 384 def _debugobsmarkers(ui, part, indent=0, **opts):
385 385 """display version and markers contained in 'data'"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 data = part.read()
388 388 indent_string = b' ' * indent
389 389 try:
390 390 version, markers = obsolete._readmarkers(data)
391 391 except error.UnknownVersion as exc:
392 392 msg = b"%sunsupported version: %s (%d bytes)\n"
393 393 msg %= indent_string, exc.version, len(data)
394 394 ui.write(msg)
395 395 else:
396 396 msg = b"%sversion: %d (%d bytes)\n"
397 397 msg %= indent_string, version, len(data)
398 398 ui.write(msg)
399 399 fm = ui.formatter(b'debugobsolete', opts)
400 400 for rawmarker in sorted(markers):
401 401 m = obsutil.marker(None, rawmarker)
402 402 fm.startitem()
403 403 fm.plain(indent_string)
404 404 cmdutil.showmarker(fm, m)
405 405 fm.end()
406 406
407 407
408 408 def _debugphaseheads(ui, data, indent=0):
409 409 """display version and markers contained in 'data'"""
410 410 indent_string = b' ' * indent
411 411 headsbyphase = phases.binarydecode(data)
412 412 for phase in phases.allphases:
413 413 for head in headsbyphase[phase]:
414 414 ui.write(indent_string)
415 415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
416 416
417 417
418 418 def _quasirepr(thing):
419 419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
420 420 return b'{%s}' % (
421 421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
422 422 )
423 423 return pycompat.bytestr(repr(thing))
424 424
425 425
426 426 def _debugbundle2(ui, gen, all=None, **opts):
427 427 """lists the contents of a bundle2"""
428 428 if not isinstance(gen, bundle2.unbundle20):
429 429 raise error.Abort(_(b'not a bundle2 file'))
430 430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
431 431 parttypes = opts.get('part_type', [])
432 432 for part in gen.iterparts():
433 433 if parttypes and part.type not in parttypes:
434 434 continue
435 435 msg = b'%s -- %s (mandatory: %r)\n'
436 436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
437 437 if part.type == b'changegroup':
438 438 version = part.params.get(b'version', b'01')
439 439 cg = changegroup.getunbundler(version, part, b'UN')
440 440 if not ui.quiet:
441 441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
442 442 if part.type == b'obsmarkers':
443 443 if not ui.quiet:
444 444 _debugobsmarkers(ui, part, indent=4, **opts)
445 445 if part.type == b'phase-heads':
446 446 if not ui.quiet:
447 447 _debugphaseheads(ui, part, indent=4)
448 448
449 449
450 450 @command(
451 451 b'debugbundle',
452 452 [
453 453 (b'a', b'all', None, _(b'show all details')),
454 454 (b'', b'part-type', [], _(b'show only the named part type')),
455 455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
456 456 ],
457 457 _(b'FILE'),
458 458 norepo=True,
459 459 )
460 460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
461 461 """lists the contents of a bundle"""
462 462 with hg.openpath(ui, bundlepath) as f:
463 463 if spec:
464 464 spec = exchange.getbundlespec(ui, f)
465 465 ui.write(b'%s\n' % spec)
466 466 return
467 467
468 468 gen = exchange.readbundle(ui, f, bundlepath)
469 469 if isinstance(gen, bundle2.unbundle20):
470 470 return _debugbundle2(ui, gen, all=all, **opts)
471 471 _debugchangegroup(ui, gen, all=all, **opts)
472 472
473 473
474 474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
475 475 def debugcapabilities(ui, path, **opts):
476 476 """lists the capabilities of a remote peer"""
477 477 opts = pycompat.byteskwargs(opts)
478 478 peer = hg.peer(ui, opts, path)
479 479 try:
480 480 caps = peer.capabilities()
481 481 ui.writenoi18n(b'Main capabilities:\n')
482 482 for c in sorted(caps):
483 483 ui.write(b' %s\n' % c)
484 484 b2caps = bundle2.bundle2caps(peer)
485 485 if b2caps:
486 486 ui.writenoi18n(b'Bundle2 capabilities:\n')
487 487 for key, values in sorted(pycompat.iteritems(b2caps)):
488 488 ui.write(b' %s\n' % key)
489 489 for v in values:
490 490 ui.write(b' %s\n' % v)
491 491 finally:
492 492 peer.close()
493 493
494 494
495 495 @command(
496 496 b'debugchangedfiles',
497 497 [
498 498 (
499 499 b'',
500 500 b'compute',
501 501 False,
502 502 b"compute information instead of reading it from storage",
503 503 ),
504 504 ],
505 505 b'REV',
506 506 )
507 507 def debugchangedfiles(ui, repo, rev, **opts):
508 508 """list the stored files changes for a revision"""
509 509 ctx = scmutil.revsingle(repo, rev, None)
510 510 files = None
511 511
512 512 if opts['compute']:
513 513 files = metadata.compute_all_files_changes(ctx)
514 514 else:
515 515 sd = repo.changelog.sidedata(ctx.rev())
516 516 files_block = sd.get(sidedata.SD_FILES)
517 517 if files_block is not None:
518 518 files = metadata.decode_files_sidedata(sd)
519 519 if files is not None:
520 520 for f in sorted(files.touched):
521 521 if f in files.added:
522 522 action = b"added"
523 523 elif f in files.removed:
524 524 action = b"removed"
525 525 elif f in files.merged:
526 526 action = b"merged"
527 527 elif f in files.salvaged:
528 528 action = b"salvaged"
529 529 else:
530 530 action = b"touched"
531 531
532 532 copy_parent = b""
533 533 copy_source = b""
534 534 if f in files.copied_from_p1:
535 535 copy_parent = b"p1"
536 536 copy_source = files.copied_from_p1[f]
537 537 elif f in files.copied_from_p2:
538 538 copy_parent = b"p2"
539 539 copy_source = files.copied_from_p2[f]
540 540
541 541 data = (action, copy_parent, f, copy_source)
542 542 template = b"%-8s %2s: %s, %s;\n"
543 543 ui.write(template % data)
544 544
545 545
546 546 @command(b'debugcheckstate', [], b'')
547 547 def debugcheckstate(ui, repo):
548 548 """validate the correctness of the current dirstate"""
549 549 parent1, parent2 = repo.dirstate.parents()
550 550 m1 = repo[parent1].manifest()
551 551 m2 = repo[parent2].manifest()
552 552 errors = 0
553 553 for f in repo.dirstate:
554 554 state = repo.dirstate[f]
555 555 if state in b"nr" and f not in m1:
556 556 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
557 557 errors += 1
558 558 if state in b"a" and f in m1:
559 559 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
560 560 errors += 1
561 561 if state in b"m" and f not in m1 and f not in m2:
562 562 ui.warn(
563 563 _(b"%s in state %s, but not in either manifest\n") % (f, state)
564 564 )
565 565 errors += 1
566 566 for f in m1:
567 567 state = repo.dirstate[f]
568 568 if state not in b"nrm":
569 569 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
570 570 errors += 1
571 571 if errors:
572 572 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
573 573 raise error.Abort(errstr)
574 574
575 575
576 576 @command(
577 577 b'debugcolor',
578 578 [(b'', b'style', None, _(b'show all configured styles'))],
579 579 b'hg debugcolor',
580 580 )
581 581 def debugcolor(ui, repo, **opts):
582 582 """show available color, effects or style"""
583 583 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
584 584 if opts.get('style'):
585 585 return _debugdisplaystyle(ui)
586 586 else:
587 587 return _debugdisplaycolor(ui)
588 588
589 589
590 590 def _debugdisplaycolor(ui):
591 591 ui = ui.copy()
592 592 ui._styles.clear()
593 593 for effect in color._activeeffects(ui).keys():
594 594 ui._styles[effect] = effect
595 595 if ui._terminfoparams:
596 596 for k, v in ui.configitems(b'color'):
597 597 if k.startswith(b'color.'):
598 598 ui._styles[k] = k[6:]
599 599 elif k.startswith(b'terminfo.'):
600 600 ui._styles[k] = k[9:]
601 601 ui.write(_(b'available colors:\n'))
602 602 # sort label with a '_' after the other to group '_background' entry.
603 603 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
604 604 for colorname, label in items:
605 605 ui.write(b'%s\n' % colorname, label=label)
606 606
607 607
608 608 def _debugdisplaystyle(ui):
609 609 ui.write(_(b'available style:\n'))
610 610 if not ui._styles:
611 611 return
612 612 width = max(len(s) for s in ui._styles)
613 613 for label, effects in sorted(ui._styles.items()):
614 614 ui.write(b'%s' % label, label=label)
615 615 if effects:
616 616 # 50
617 617 ui.write(b': ')
618 618 ui.write(b' ' * (max(0, width - len(label))))
619 619 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
620 620 ui.write(b'\n')
621 621
622 622
623 623 @command(b'debugcreatestreamclonebundle', [], b'FILE')
624 624 def debugcreatestreamclonebundle(ui, repo, fname):
625 625 """create a stream clone bundle file
626 626
627 627 Stream bundles are special bundles that are essentially archives of
628 628 revlog files. They are commonly used for cloning very quickly.
629 629 """
630 630 # TODO we may want to turn this into an abort when this functionality
631 631 # is moved into `hg bundle`.
632 632 if phases.hassecret(repo):
633 633 ui.warn(
634 634 _(
635 635 b'(warning: stream clone bundle will contain secret '
636 636 b'revisions)\n'
637 637 )
638 638 )
639 639
640 640 requirements, gen = streamclone.generatebundlev1(repo)
641 641 changegroup.writechunks(ui, gen, fname)
642 642
643 643 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
644 644
645 645
646 646 @command(
647 647 b'debugdag',
648 648 [
649 649 (b't', b'tags', None, _(b'use tags as labels')),
650 650 (b'b', b'branches', None, _(b'annotate with branch names')),
651 651 (b'', b'dots', None, _(b'use dots for runs')),
652 652 (b's', b'spaces', None, _(b'separate elements by spaces')),
653 653 ],
654 654 _(b'[OPTION]... [FILE [REV]...]'),
655 655 optionalrepo=True,
656 656 )
657 657 def debugdag(ui, repo, file_=None, *revs, **opts):
658 658 """format the changelog or an index DAG as a concise textual description
659 659
660 660 If you pass a revlog index, the revlog's DAG is emitted. If you list
661 661 revision numbers, they get labeled in the output as rN.
662 662
663 663 Otherwise, the changelog DAG of the current repo is emitted.
664 664 """
665 665 spaces = opts.get('spaces')
666 666 dots = opts.get('dots')
667 667 if file_:
668 668 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
669 669 revs = {int(r) for r in revs}
670 670
671 671 def events():
672 672 for r in rlog:
673 673 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
674 674 if r in revs:
675 675 yield b'l', (r, b"r%i" % r)
676 676
677 677 elif repo:
678 678 cl = repo.changelog
679 679 tags = opts.get('tags')
680 680 branches = opts.get('branches')
681 681 if tags:
682 682 labels = {}
683 683 for l, n in repo.tags().items():
684 684 labels.setdefault(cl.rev(n), []).append(l)
685 685
686 686 def events():
687 687 b = b"default"
688 688 for r in cl:
689 689 if branches:
690 690 newb = cl.read(cl.node(r))[5][b'branch']
691 691 if newb != b:
692 692 yield b'a', newb
693 693 b = newb
694 694 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
695 695 if tags:
696 696 ls = labels.get(r)
697 697 if ls:
698 698 for l in ls:
699 699 yield b'l', (r, l)
700 700
701 701 else:
702 702 raise error.Abort(_(b'need repo for changelog dag'))
703 703
704 704 for line in dagparser.dagtextlines(
705 705 events(),
706 706 addspaces=spaces,
707 707 wraplabels=True,
708 708 wrapannotations=True,
709 709 wrapnonlinear=dots,
710 710 usedots=dots,
711 711 maxlinewidth=70,
712 712 ):
713 713 ui.write(line)
714 714 ui.write(b"\n")
715 715
716 716
717 717 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
718 718 def debugdata(ui, repo, file_, rev=None, **opts):
719 719 """dump the contents of a data file revision"""
720 720 opts = pycompat.byteskwargs(opts)
721 721 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
722 722 if rev is not None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 file_, rev = None, file_
725 725 elif rev is None:
726 726 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
727 727 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
728 728 try:
729 729 ui.write(r.rawdata(r.lookup(rev)))
730 730 except KeyError:
731 731 raise error.Abort(_(b'invalid revision identifier %s') % rev)
732 732
733 733
734 734 @command(
735 735 b'debugdate',
736 736 [(b'e', b'extended', None, _(b'try extended date formats'))],
737 737 _(b'[-e] DATE [RANGE]'),
738 738 norepo=True,
739 739 optionalrepo=True,
740 740 )
741 741 def debugdate(ui, date, range=None, **opts):
742 742 """parse and display a date"""
743 743 if opts["extended"]:
744 744 d = dateutil.parsedate(date, dateutil.extendeddateformats)
745 745 else:
746 746 d = dateutil.parsedate(date)
747 747 ui.writenoi18n(b"internal: %d %d\n" % d)
748 748 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
749 749 if range:
750 750 m = dateutil.matchdate(range)
751 751 ui.writenoi18n(b"match: %s\n" % m(d[0]))
752 752
753 753
754 754 @command(
755 755 b'debugdeltachain',
756 756 cmdutil.debugrevlogopts + cmdutil.formatteropts,
757 757 _(b'-c|-m|FILE'),
758 758 optionalrepo=True,
759 759 )
760 760 def debugdeltachain(ui, repo, file_=None, **opts):
761 761 """dump information about delta chains in a revlog
762 762
763 763 Output can be templatized. Available template keywords are:
764 764
765 765 :``rev``: revision number
766 766 :``chainid``: delta chain identifier (numbered by unique base)
767 767 :``chainlen``: delta chain length to this revision
768 768 :``prevrev``: previous revision in delta chain
769 769 :``deltatype``: role of delta / how it was computed
770 770 :``compsize``: compressed size of revision
771 771 :``uncompsize``: uncompressed size of revision
772 772 :``chainsize``: total size of compressed revisions in chain
773 773 :``chainratio``: total chain size divided by uncompressed revision size
774 774 (new delta chains typically start at ratio 2.00)
775 775 :``lindist``: linear distance from base revision in delta chain to end
776 776 of this revision
777 777 :``extradist``: total size of revisions not part of this delta chain from
778 778 base of delta chain to end of this revision; a measurement
779 779 of how much extra data we need to read/seek across to read
780 780 the delta chain for this revision
781 781 :``extraratio``: extradist divided by chainsize; another representation of
782 782 how much unrelated data is needed to load this delta chain
783 783
784 784 If the repository is configured to use the sparse read, additional keywords
785 785 are available:
786 786
787 787 :``readsize``: total size of data read from the disk for a revision
788 788 (sum of the sizes of all the blocks)
789 789 :``largestblock``: size of the largest block of data read from the disk
790 790 :``readdensity``: density of useful bytes in the data read from the disk
791 791 :``srchunks``: in how many data hunks the whole revision would be read
792 792
793 793 The sparse read can be enabled with experimental.sparse-read = True
794 794 """
795 795 opts = pycompat.byteskwargs(opts)
796 796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
797 797 index = r.index
798 798 start = r.start
799 799 length = r.length
800 800 generaldelta = r._generaldelta
801 801 withsparseread = getattr(r, '_withsparseread', False)
802 802
803 803 def revinfo(rev):
804 804 e = index[rev]
805 805 compsize = e[1]
806 806 uncompsize = e[2]
807 807 chainsize = 0
808 808
809 809 if generaldelta:
810 810 if e[3] == e[5]:
811 811 deltatype = b'p1'
812 812 elif e[3] == e[6]:
813 813 deltatype = b'p2'
814 814 elif e[3] == rev - 1:
815 815 deltatype = b'prev'
816 816 elif e[3] == rev:
817 817 deltatype = b'base'
818 818 else:
819 819 deltatype = b'other'
820 820 else:
821 821 if e[3] == rev:
822 822 deltatype = b'base'
823 823 else:
824 824 deltatype = b'prev'
825 825
826 826 chain = r._deltachain(rev)[0]
827 827 for iterrev in chain:
828 828 e = index[iterrev]
829 829 chainsize += e[1]
830 830
831 831 return compsize, uncompsize, deltatype, chain, chainsize
832 832
833 833 fm = ui.formatter(b'debugdeltachain', opts)
834 834
835 835 fm.plain(
836 836 b' rev chain# chainlen prev delta '
837 837 b'size rawsize chainsize ratio lindist extradist '
838 838 b'extraratio'
839 839 )
840 840 if withsparseread:
841 841 fm.plain(b' readsize largestblk rddensity srchunks')
842 842 fm.plain(b'\n')
843 843
844 844 chainbases = {}
845 845 for rev in r:
846 846 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
847 847 chainbase = chain[0]
848 848 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
849 849 basestart = start(chainbase)
850 850 revstart = start(rev)
851 851 lineardist = revstart + comp - basestart
852 852 extradist = lineardist - chainsize
853 853 try:
854 854 prevrev = chain[-2]
855 855 except IndexError:
856 856 prevrev = -1
857 857
858 858 if uncomp != 0:
859 859 chainratio = float(chainsize) / float(uncomp)
860 860 else:
861 861 chainratio = chainsize
862 862
863 863 if chainsize != 0:
864 864 extraratio = float(extradist) / float(chainsize)
865 865 else:
866 866 extraratio = extradist
867 867
868 868 fm.startitem()
869 869 fm.write(
870 870 b'rev chainid chainlen prevrev deltatype compsize '
871 871 b'uncompsize chainsize chainratio lindist extradist '
872 872 b'extraratio',
873 873 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
874 874 rev,
875 875 chainid,
876 876 len(chain),
877 877 prevrev,
878 878 deltatype,
879 879 comp,
880 880 uncomp,
881 881 chainsize,
882 882 chainratio,
883 883 lineardist,
884 884 extradist,
885 885 extraratio,
886 886 rev=rev,
887 887 chainid=chainid,
888 888 chainlen=len(chain),
889 889 prevrev=prevrev,
890 890 deltatype=deltatype,
891 891 compsize=comp,
892 892 uncompsize=uncomp,
893 893 chainsize=chainsize,
894 894 chainratio=chainratio,
895 895 lindist=lineardist,
896 896 extradist=extradist,
897 897 extraratio=extraratio,
898 898 )
899 899 if withsparseread:
900 900 readsize = 0
901 901 largestblock = 0
902 902 srchunks = 0
903 903
904 904 for revschunk in deltautil.slicechunk(r, chain):
905 905 srchunks += 1
906 906 blkend = start(revschunk[-1]) + length(revschunk[-1])
907 907 blksize = blkend - start(revschunk[0])
908 908
909 909 readsize += blksize
910 910 if largestblock < blksize:
911 911 largestblock = blksize
912 912
913 913 if readsize:
914 914 readdensity = float(chainsize) / float(readsize)
915 915 else:
916 916 readdensity = 1
917 917
918 918 fm.write(
919 919 b'readsize largestblock readdensity srchunks',
920 920 b' %10d %10d %9.5f %8d',
921 921 readsize,
922 922 largestblock,
923 923 readdensity,
924 924 srchunks,
925 925 readsize=readsize,
926 926 largestblock=largestblock,
927 927 readdensity=readdensity,
928 928 srchunks=srchunks,
929 929 )
930 930
931 931 fm.plain(b'\n')
932 932
933 933 fm.end()
934 934
935 935
936 936 @command(
937 937 b'debugdirstate|debugstate',
938 938 [
939 939 (
940 940 b'',
941 941 b'nodates',
942 942 None,
943 943 _(b'do not display the saved mtime (DEPRECATED)'),
944 944 ),
945 945 (b'', b'dates', True, _(b'display the saved mtime')),
946 946 (b'', b'datesort', None, _(b'sort by saved mtime')),
947 947 (
948 948 b'',
949 949 b'all',
950 950 False,
951 951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
952 952 ),
953 953 ],
954 954 _(b'[OPTION]...'),
955 955 )
956 956 def debugstate(ui, repo, **opts):
957 957 """show the contents of the current dirstate"""
958 958
959 959 nodates = not opts['dates']
960 960 if opts.get('nodates') is not None:
961 961 nodates = True
962 962 datesort = opts.get('datesort')
963 963
964 964 if datesort:
965 965 keyfunc = lambda x: (
966 966 x[1].v1_mtime(),
967 967 x[0],
968 968 ) # sort by mtime, then by filename
969 969 else:
970 970 keyfunc = None # sort by filename
971 971 if opts['all']:
972 972 entries = list(repo.dirstate._map.debug_iter())
973 973 else:
974 974 entries = list(pycompat.iteritems(repo.dirstate))
975 975 entries.sort(key=keyfunc)
976 976 for file_, ent in entries:
977 977 if ent.v1_mtime() == -1:
978 978 timestr = b'unset '
979 979 elif nodates:
980 980 timestr = b'set '
981 981 else:
982 982 timestr = time.strftime(
983 983 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
984 984 )
985 985 timestr = encoding.strtolocal(timestr)
986 986 if ent.mode & 0o20000:
987 987 mode = b'lnk'
988 988 else:
989 989 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
990 990 ui.write(
991 991 b"%c %s %10d %s%s\n"
992 992 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
993 993 )
994 994 for f in repo.dirstate.copies():
995 995 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
996 996
997 997
998 998 @command(
999 999 b'debugdirstateignorepatternshash',
1000 1000 [],
1001 1001 _(b''),
1002 1002 )
1003 1003 def debugdirstateignorepatternshash(ui, repo, **opts):
1004 1004 """show the hash of ignore patterns stored in dirstate if v2,
1005 1005 or nothing for dirstate-v2
1006 1006 """
1007 1007 if repo.dirstate._use_dirstate_v2:
1008 1008 docket = repo.dirstate._map.docket
1009 1009 hash_len = 20 # 160 bits for SHA-1
1010 1010 hash_bytes = docket.tree_metadata[-hash_len:]
1011 1011 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1012 1012
1013 1013
1014 1014 @command(
1015 1015 b'debugdiscovery',
1016 1016 [
1017 1017 (b'', b'old', None, _(b'use old-style discovery')),
1018 1018 (
1019 1019 b'',
1020 1020 b'nonheads',
1021 1021 None,
1022 1022 _(b'use old-style discovery with non-heads included'),
1023 1023 ),
1024 1024 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1025 1025 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1026 1026 (
1027 1027 b'',
1028 1028 b'local-as-revs',
1029 1029 b"",
1030 1030 b'treat local has having these revisions only',
1031 1031 ),
1032 1032 (
1033 1033 b'',
1034 1034 b'remote-as-revs',
1035 1035 b"",
1036 1036 b'use local as remote, with only these these revisions',
1037 1037 ),
1038 1038 ]
1039 1039 + cmdutil.remoteopts
1040 1040 + cmdutil.formatteropts,
1041 1041 _(b'[--rev REV] [OTHER]'),
1042 1042 )
1043 1043 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1044 1044 """runs the changeset discovery protocol in isolation
1045 1045
1046 1046 The local peer can be "replaced" by a subset of the local repository by
1047 1047 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1048 1048 be "replaced" by a subset of the local repository using the
1049 1049 `--local-as-revs` flag. This is useful to efficiently debug pathological
1050 1050 discovery situation.
1051 1051
1052 1052 The following developer oriented config are relevant for people playing with this command:
1053 1053
1054 1054 * devel.discovery.exchange-heads=True
1055 1055
1056 1056 If False, the discovery will not start with
1057 1057 remote head fetching and local head querying.
1058 1058
1059 1059 * devel.discovery.grow-sample=True
1060 1060
1061 1061 If False, the sample size used in set discovery will not be increased
1062 1062 through the process
1063 1063
1064 1064 * devel.discovery.grow-sample.dynamic=True
1065 1065
1066 1066 When discovery.grow-sample.dynamic is True, the default, the sample size is
1067 1067 adapted to the shape of the undecided set (it is set to the max of:
1068 1068 <target-size>, len(roots(undecided)), len(heads(undecided)
1069 1069
1070 1070 * devel.discovery.grow-sample.rate=1.05
1071 1071
1072 1072 the rate at which the sample grow
1073 1073
1074 1074 * devel.discovery.randomize=True
1075 1075
1076 1076 If andom sampling during discovery are deterministic. It is meant for
1077 1077 integration tests.
1078 1078
1079 1079 * devel.discovery.sample-size=200
1080 1080
1081 1081 Control the initial size of the discovery sample
1082 1082
1083 1083 * devel.discovery.sample-size.initial=100
1084 1084
1085 1085 Control the initial size of the discovery for initial change
1086 1086 """
1087 1087 opts = pycompat.byteskwargs(opts)
1088 1088 unfi = repo.unfiltered()
1089 1089
1090 1090 # setup potential extra filtering
1091 1091 local_revs = opts[b"local_as_revs"]
1092 1092 remote_revs = opts[b"remote_as_revs"]
1093 1093
1094 1094 # make sure tests are repeatable
1095 1095 random.seed(int(opts[b'seed']))
1096 1096
1097 1097 if not remote_revs:
1098 1098
1099 1099 remoteurl, branches = urlutil.get_unique_pull_path(
1100 1100 b'debugdiscovery', repo, ui, remoteurl
1101 1101 )
1102 1102 remote = hg.peer(repo, opts, remoteurl)
1103 1103 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1104 1104 else:
1105 1105 branches = (None, [])
1106 1106 remote_filtered_revs = scmutil.revrange(
1107 1107 unfi, [b"not (::(%s))" % remote_revs]
1108 1108 )
1109 1109 remote_filtered_revs = frozenset(remote_filtered_revs)
1110 1110
1111 1111 def remote_func(x):
1112 1112 return remote_filtered_revs
1113 1113
1114 1114 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1115 1115
1116 1116 remote = repo.peer()
1117 1117 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1118 1118
1119 1119 if local_revs:
1120 1120 local_filtered_revs = scmutil.revrange(
1121 1121 unfi, [b"not (::(%s))" % local_revs]
1122 1122 )
1123 1123 local_filtered_revs = frozenset(local_filtered_revs)
1124 1124
1125 1125 def local_func(x):
1126 1126 return local_filtered_revs
1127 1127
1128 1128 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1129 1129 repo = repo.filtered(b'debug-discovery-local-filter')
1130 1130
1131 1131 data = {}
1132 1132 if opts.get(b'old'):
1133 1133
1134 1134 def doit(pushedrevs, remoteheads, remote=remote):
1135 1135 if not util.safehasattr(remote, b'branches'):
1136 1136 # enable in-client legacy support
1137 1137 remote = localrepo.locallegacypeer(remote.local())
1138 1138 common, _in, hds = treediscovery.findcommonincoming(
1139 1139 repo, remote, force=True, audit=data
1140 1140 )
1141 1141 common = set(common)
1142 1142 if not opts.get(b'nonheads'):
1143 1143 ui.writenoi18n(
1144 1144 b"unpruned common: %s\n"
1145 1145 % b" ".join(sorted(short(n) for n in common))
1146 1146 )
1147 1147
1148 1148 clnode = repo.changelog.node
1149 1149 common = repo.revs(b'heads(::%ln)', common)
1150 1150 common = {clnode(r) for r in common}
1151 1151 return common, hds
1152 1152
1153 1153 else:
1154 1154
1155 1155 def doit(pushedrevs, remoteheads, remote=remote):
1156 1156 nodes = None
1157 1157 if pushedrevs:
1158 1158 revs = scmutil.revrange(repo, pushedrevs)
1159 1159 nodes = [repo[r].node() for r in revs]
1160 1160 common, any, hds = setdiscovery.findcommonheads(
1161 1161 ui, repo, remote, ancestorsof=nodes, audit=data
1162 1162 )
1163 1163 return common, hds
1164 1164
1165 1165 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1166 1166 localrevs = opts[b'rev']
1167 1167
1168 1168 fm = ui.formatter(b'debugdiscovery', opts)
1169 1169 if fm.strict_format:
1170 1170
1171 1171 @contextlib.contextmanager
1172 1172 def may_capture_output():
1173 1173 ui.pushbuffer()
1174 1174 yield
1175 1175 data[b'output'] = ui.popbuffer()
1176 1176
1177 1177 else:
1178 1178 may_capture_output = util.nullcontextmanager
1179 1179 with may_capture_output():
1180 1180 with util.timedcm('debug-discovery') as t:
1181 1181 common, hds = doit(localrevs, remoterevs)
1182 1182
1183 1183 # compute all statistics
1184 1184 heads_common = set(common)
1185 1185 heads_remote = set(hds)
1186 1186 heads_local = set(repo.heads())
1187 1187 # note: they cannot be a local or remote head that is in common and not
1188 1188 # itself a head of common.
1189 1189 heads_common_local = heads_common & heads_local
1190 1190 heads_common_remote = heads_common & heads_remote
1191 1191 heads_common_both = heads_common & heads_remote & heads_local
1192 1192
1193 1193 all = repo.revs(b'all()')
1194 1194 common = repo.revs(b'::%ln', common)
1195 1195 roots_common = repo.revs(b'roots(::%ld)', common)
1196 1196 missing = repo.revs(b'not ::%ld', common)
1197 1197 heads_missing = repo.revs(b'heads(%ld)', missing)
1198 1198 roots_missing = repo.revs(b'roots(%ld)', missing)
1199 1199 assert len(common) + len(missing) == len(all)
1200 1200
1201 1201 initial_undecided = repo.revs(
1202 1202 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1203 1203 )
1204 1204 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1205 1205 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1206 1206 common_initial_undecided = initial_undecided & common
1207 1207 missing_initial_undecided = initial_undecided & missing
1208 1208
1209 1209 data[b'elapsed'] = t.elapsed
1210 1210 data[b'nb-common-heads'] = len(heads_common)
1211 1211 data[b'nb-common-heads-local'] = len(heads_common_local)
1212 1212 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1213 1213 data[b'nb-common-heads-both'] = len(heads_common_both)
1214 1214 data[b'nb-common-roots'] = len(roots_common)
1215 1215 data[b'nb-head-local'] = len(heads_local)
1216 1216 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1217 1217 data[b'nb-head-remote'] = len(heads_remote)
1218 1218 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1219 1219 heads_common_remote
1220 1220 )
1221 1221 data[b'nb-revs'] = len(all)
1222 1222 data[b'nb-revs-common'] = len(common)
1223 1223 data[b'nb-revs-missing'] = len(missing)
1224 1224 data[b'nb-missing-heads'] = len(heads_missing)
1225 1225 data[b'nb-missing-roots'] = len(roots_missing)
1226 1226 data[b'nb-ini_und'] = len(initial_undecided)
1227 1227 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1228 1228 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1229 1229 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1230 1230 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1231 1231
1232 1232 fm.startitem()
1233 1233 fm.data(**pycompat.strkwargs(data))
1234 1234 # display discovery summary
1235 1235 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1236 1236 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1237 1237 fm.plain(b"heads summary:\n")
1238 1238 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1239 1239 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1240 1240 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1241 1241 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1242 1242 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1243 1243 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1244 1244 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1245 1245 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1246 1246 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1247 1247 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1248 1248 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1249 1249 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1250 1250 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1251 1251 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1252 1252 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1253 1253 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1254 1254 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1255 1255 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1256 1256 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1257 1257 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1258 1258 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1259 1259 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1260 1260
1261 1261 if ui.verbose:
1262 1262 fm.plain(
1263 1263 b"common heads: %s\n"
1264 1264 % b" ".join(sorted(short(n) for n in heads_common))
1265 1265 )
1266 1266 fm.end()
1267 1267
1268 1268
1269 1269 _chunksize = 4 << 10
1270 1270
1271 1271
1272 1272 @command(
1273 1273 b'debugdownload',
1274 1274 [
1275 1275 (b'o', b'output', b'', _(b'path')),
1276 1276 ],
1277 1277 optionalrepo=True,
1278 1278 )
1279 1279 def debugdownload(ui, repo, url, output=None, **opts):
1280 1280 """download a resource using Mercurial logic and config"""
1281 1281 fh = urlmod.open(ui, url, output)
1282 1282
1283 1283 dest = ui
1284 1284 if output:
1285 1285 dest = open(output, b"wb", _chunksize)
1286 1286 try:
1287 1287 data = fh.read(_chunksize)
1288 1288 while data:
1289 1289 dest.write(data)
1290 1290 data = fh.read(_chunksize)
1291 1291 finally:
1292 1292 if output:
1293 1293 dest.close()
1294 1294
1295 1295
1296 1296 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1297 1297 def debugextensions(ui, repo, **opts):
1298 1298 '''show information about active extensions'''
1299 1299 opts = pycompat.byteskwargs(opts)
1300 1300 exts = extensions.extensions(ui)
1301 1301 hgver = util.version()
1302 1302 fm = ui.formatter(b'debugextensions', opts)
1303 1303 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1304 1304 isinternal = extensions.ismoduleinternal(extmod)
1305 1305 extsource = None
1306 1306
1307 1307 if util.safehasattr(extmod, '__file__'):
1308 1308 extsource = pycompat.fsencode(extmod.__file__)
1309 1309 elif getattr(sys, 'oxidized', False):
1310 1310 extsource = pycompat.sysexecutable
1311 1311 if isinternal:
1312 1312 exttestedwith = [] # never expose magic string to users
1313 1313 else:
1314 1314 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1315 1315 extbuglink = getattr(extmod, 'buglink', None)
1316 1316
1317 1317 fm.startitem()
1318 1318
1319 1319 if ui.quiet or ui.verbose:
1320 1320 fm.write(b'name', b'%s\n', extname)
1321 1321 else:
1322 1322 fm.write(b'name', b'%s', extname)
1323 1323 if isinternal or hgver in exttestedwith:
1324 1324 fm.plain(b'\n')
1325 1325 elif not exttestedwith:
1326 1326 fm.plain(_(b' (untested!)\n'))
1327 1327 else:
1328 1328 lasttestedversion = exttestedwith[-1]
1329 1329 fm.plain(b' (%s!)\n' % lasttestedversion)
1330 1330
1331 1331 fm.condwrite(
1332 1332 ui.verbose and extsource,
1333 1333 b'source',
1334 1334 _(b' location: %s\n'),
1335 1335 extsource or b"",
1336 1336 )
1337 1337
1338 1338 if ui.verbose:
1339 1339 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1340 1340 fm.data(bundled=isinternal)
1341 1341
1342 1342 fm.condwrite(
1343 1343 ui.verbose and exttestedwith,
1344 1344 b'testedwith',
1345 1345 _(b' tested with: %s\n'),
1346 1346 fm.formatlist(exttestedwith, name=b'ver'),
1347 1347 )
1348 1348
1349 1349 fm.condwrite(
1350 1350 ui.verbose and extbuglink,
1351 1351 b'buglink',
1352 1352 _(b' bug reporting: %s\n'),
1353 1353 extbuglink or b"",
1354 1354 )
1355 1355
1356 1356 fm.end()
1357 1357
1358 1358
1359 1359 @command(
1360 1360 b'debugfileset',
1361 1361 [
1362 1362 (
1363 1363 b'r',
1364 1364 b'rev',
1365 1365 b'',
1366 1366 _(b'apply the filespec on this revision'),
1367 1367 _(b'REV'),
1368 1368 ),
1369 1369 (
1370 1370 b'',
1371 1371 b'all-files',
1372 1372 False,
1373 1373 _(b'test files from all revisions and working directory'),
1374 1374 ),
1375 1375 (
1376 1376 b's',
1377 1377 b'show-matcher',
1378 1378 None,
1379 1379 _(b'print internal representation of matcher'),
1380 1380 ),
1381 1381 (
1382 1382 b'p',
1383 1383 b'show-stage',
1384 1384 [],
1385 1385 _(b'print parsed tree at the given stage'),
1386 1386 _(b'NAME'),
1387 1387 ),
1388 1388 ],
1389 1389 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1390 1390 )
1391 1391 def debugfileset(ui, repo, expr, **opts):
1392 1392 '''parse and apply a fileset specification'''
1393 1393 from . import fileset
1394 1394
1395 1395 fileset.symbols # force import of fileset so we have predicates to optimize
1396 1396 opts = pycompat.byteskwargs(opts)
1397 1397 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1398 1398
1399 1399 stages = [
1400 1400 (b'parsed', pycompat.identity),
1401 1401 (b'analyzed', filesetlang.analyze),
1402 1402 (b'optimized', filesetlang.optimize),
1403 1403 ]
1404 1404 stagenames = {n for n, f in stages}
1405 1405
1406 1406 showalways = set()
1407 1407 if ui.verbose and not opts[b'show_stage']:
1408 1408 # show parsed tree by --verbose (deprecated)
1409 1409 showalways.add(b'parsed')
1410 1410 if opts[b'show_stage'] == [b'all']:
1411 1411 showalways.update(stagenames)
1412 1412 else:
1413 1413 for n in opts[b'show_stage']:
1414 1414 if n not in stagenames:
1415 1415 raise error.Abort(_(b'invalid stage name: %s') % n)
1416 1416 showalways.update(opts[b'show_stage'])
1417 1417
1418 1418 tree = filesetlang.parse(expr)
1419 1419 for n, f in stages:
1420 1420 tree = f(tree)
1421 1421 if n in showalways:
1422 1422 if opts[b'show_stage'] or n != b'parsed':
1423 1423 ui.write(b"* %s:\n" % n)
1424 1424 ui.write(filesetlang.prettyformat(tree), b"\n")
1425 1425
1426 1426 files = set()
1427 1427 if opts[b'all_files']:
1428 1428 for r in repo:
1429 1429 c = repo[r]
1430 1430 files.update(c.files())
1431 1431 files.update(c.substate)
1432 1432 if opts[b'all_files'] or ctx.rev() is None:
1433 1433 wctx = repo[None]
1434 1434 files.update(
1435 1435 repo.dirstate.walk(
1436 1436 scmutil.matchall(repo),
1437 1437 subrepos=list(wctx.substate),
1438 1438 unknown=True,
1439 1439 ignored=True,
1440 1440 )
1441 1441 )
1442 1442 files.update(wctx.substate)
1443 1443 else:
1444 1444 files.update(ctx.files())
1445 1445 files.update(ctx.substate)
1446 1446
1447 1447 m = ctx.matchfileset(repo.getcwd(), expr)
1448 1448 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1449 1449 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1450 1450 for f in sorted(files):
1451 1451 if not m(f):
1452 1452 continue
1453 1453 ui.write(b"%s\n" % f)
1454 1454
1455 1455
1456 1456 @command(
1457 1457 b"debug-repair-issue6528",
1458 1458 [
1459 1459 (
1460 1460 b'',
1461 1461 b'to-report',
1462 1462 b'',
1463 1463 _(b'build a report of affected revisions to this file'),
1464 1464 _(b'FILE'),
1465 1465 ),
1466 1466 (
1467 1467 b'',
1468 1468 b'from-report',
1469 1469 b'',
1470 1470 _(b'repair revisions listed in this report file'),
1471 1471 _(b'FILE'),
1472 1472 ),
1473 1473 (
1474 1474 b'',
1475 1475 b'paranoid',
1476 1476 False,
1477 1477 _(b'check that both detection methods do the same thing'),
1478 1478 ),
1479 1479 ]
1480 1480 + cmdutil.dryrunopts,
1481 1481 )
1482 1482 def debug_repair_issue6528(ui, repo, **opts):
1483 1483 """find affected revisions and repair them. See issue6528 for more details.
1484 1484
1485 1485 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1486 1486 computation of affected revisions for a given repository across clones.
1487 1487 The report format is line-based (with empty lines ignored):
1488 1488
1489 1489 ```
1490 1490 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1491 1491 ```
1492 1492
1493 1493 There can be multiple broken revisions per filelog, they are separated by
1494 1494 a comma with no spaces. The only space is between the revision(s) and the
1495 1495 filename.
1496 1496
1497 1497 Note that this does *not* mean that this repairs future affected revisions,
1498 1498 that needs a separate fix at the exchange level that hasn't been written yet
1499 1499 (as of 5.9rc0).
1500 1500
1501 1501 There is a `--paranoid` flag to test that the fast implementation is correct
1502 1502 by checking it against the slow implementation. Since this matter is quite
1503 1503 urgent and testing every edge-case is probably quite costly, we use this
1504 1504 method to test on large repositories as a fuzzing method of sorts.
1505 1505 """
1506 1506 cmdutil.check_incompatible_arguments(
1507 1507 opts, 'to_report', ['from_report', 'dry_run']
1508 1508 )
1509 1509 dry_run = opts.get('dry_run')
1510 1510 to_report = opts.get('to_report')
1511 1511 from_report = opts.get('from_report')
1512 1512 paranoid = opts.get('paranoid')
1513 1513 # TODO maybe add filelog pattern and revision pattern parameters to help
1514 1514 # narrow down the search for users that know what they're looking for?
1515 1515
1516 1516 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1517 1517 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1518 1518 raise error.Abort(_(msg))
1519 1519
1520 1520 rewrite.repair_issue6528(
1521 1521 ui,
1522 1522 repo,
1523 1523 dry_run=dry_run,
1524 1524 to_report=to_report,
1525 1525 from_report=from_report,
1526 1526 paranoid=paranoid,
1527 1527 )
1528 1528
1529 1529
1530 1530 @command(b'debugformat', [] + cmdutil.formatteropts)
1531 1531 def debugformat(ui, repo, **opts):
1532 1532 """display format information about the current repository
1533 1533
1534 1534 Use --verbose to get extra information about current config value and
1535 1535 Mercurial default."""
1536 1536 opts = pycompat.byteskwargs(opts)
1537 1537 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1538 1538 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1539 1539
1540 1540 def makeformatname(name):
1541 1541 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1542 1542
1543 1543 fm = ui.formatter(b'debugformat', opts)
1544 1544 if fm.isplain():
1545 1545
1546 1546 def formatvalue(value):
1547 1547 if util.safehasattr(value, b'startswith'):
1548 1548 return value
1549 1549 if value:
1550 1550 return b'yes'
1551 1551 else:
1552 1552 return b'no'
1553 1553
1554 1554 else:
1555 1555 formatvalue = pycompat.identity
1556 1556
1557 1557 fm.plain(b'format-variant')
1558 1558 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1559 1559 fm.plain(b' repo')
1560 1560 if ui.verbose:
1561 1561 fm.plain(b' config default')
1562 1562 fm.plain(b'\n')
1563 1563 for fv in upgrade.allformatvariant:
1564 1564 fm.startitem()
1565 1565 repovalue = fv.fromrepo(repo)
1566 1566 configvalue = fv.fromconfig(repo)
1567 1567
1568 1568 if repovalue != configvalue:
1569 1569 namelabel = b'formatvariant.name.mismatchconfig'
1570 1570 repolabel = b'formatvariant.repo.mismatchconfig'
1571 1571 elif repovalue != fv.default:
1572 1572 namelabel = b'formatvariant.name.mismatchdefault'
1573 1573 repolabel = b'formatvariant.repo.mismatchdefault'
1574 1574 else:
1575 1575 namelabel = b'formatvariant.name.uptodate'
1576 1576 repolabel = b'formatvariant.repo.uptodate'
1577 1577
1578 1578 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1579 1579 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1580 1580 if fv.default != configvalue:
1581 1581 configlabel = b'formatvariant.config.special'
1582 1582 else:
1583 1583 configlabel = b'formatvariant.config.default'
1584 1584 fm.condwrite(
1585 1585 ui.verbose,
1586 1586 b'config',
1587 1587 b' %6s',
1588 1588 formatvalue(configvalue),
1589 1589 label=configlabel,
1590 1590 )
1591 1591 fm.condwrite(
1592 1592 ui.verbose,
1593 1593 b'default',
1594 1594 b' %7s',
1595 1595 formatvalue(fv.default),
1596 1596 label=b'formatvariant.default',
1597 1597 )
1598 1598 fm.plain(b'\n')
1599 1599 fm.end()
1600 1600
1601 1601
1602 1602 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1603 1603 def debugfsinfo(ui, path=b"."):
1604 1604 """show information detected about current filesystem"""
1605 1605 ui.writenoi18n(b'path: %s\n' % path)
1606 1606 ui.writenoi18n(
1607 1607 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1608 1608 )
1609 1609 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1610 1610 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1611 1611 ui.writenoi18n(
1612 1612 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1613 1613 )
1614 1614 ui.writenoi18n(
1615 1615 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1616 1616 )
1617 1617 casesensitive = b'(unknown)'
1618 1618 try:
1619 1619 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1620 1620 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1621 1621 except OSError:
1622 1622 pass
1623 1623 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1624 1624
1625 1625
1626 1626 @command(
1627 1627 b'debuggetbundle',
1628 1628 [
1629 1629 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1630 1630 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1631 1631 (
1632 1632 b't',
1633 1633 b'type',
1634 1634 b'bzip2',
1635 1635 _(b'bundle compression type to use'),
1636 1636 _(b'TYPE'),
1637 1637 ),
1638 1638 ],
1639 1639 _(b'REPO FILE [-H|-C ID]...'),
1640 1640 norepo=True,
1641 1641 )
1642 1642 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1643 1643 """retrieves a bundle from a repo
1644 1644
1645 1645 Every ID must be a full-length hex node id string. Saves the bundle to the
1646 1646 given file.
1647 1647 """
1648 1648 opts = pycompat.byteskwargs(opts)
1649 1649 repo = hg.peer(ui, opts, repopath)
1650 1650 if not repo.capable(b'getbundle'):
1651 1651 raise error.Abort(b"getbundle() not supported by target repository")
1652 1652 args = {}
1653 1653 if common:
1654 1654 args['common'] = [bin(s) for s in common]
1655 1655 if head:
1656 1656 args['heads'] = [bin(s) for s in head]
1657 1657 # TODO: get desired bundlecaps from command line.
1658 1658 args['bundlecaps'] = None
1659 1659 bundle = repo.getbundle(b'debug', **args)
1660 1660
1661 1661 bundletype = opts.get(b'type', b'bzip2').lower()
1662 1662 btypes = {
1663 1663 b'none': b'HG10UN',
1664 1664 b'bzip2': b'HG10BZ',
1665 1665 b'gzip': b'HG10GZ',
1666 1666 b'bundle2': b'HG20',
1667 1667 }
1668 1668 bundletype = btypes.get(bundletype)
1669 1669 if bundletype not in bundle2.bundletypes:
1670 1670 raise error.Abort(_(b'unknown bundle type specified with --type'))
1671 1671 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1672 1672
1673 1673
1674 1674 @command(b'debugignore', [], b'[FILE]')
1675 1675 def debugignore(ui, repo, *files, **opts):
1676 1676 """display the combined ignore pattern and information about ignored files
1677 1677
1678 1678 With no argument display the combined ignore pattern.
1679 1679
1680 1680 Given space separated file names, shows if the given file is ignored and
1681 1681 if so, show the ignore rule (file and line number) that matched it.
1682 1682 """
1683 1683 ignore = repo.dirstate._ignore
1684 1684 if not files:
1685 1685 # Show all the patterns
1686 1686 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1687 1687 else:
1688 1688 m = scmutil.match(repo[None], pats=files)
1689 1689 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1690 1690 for f in m.files():
1691 1691 nf = util.normpath(f)
1692 1692 ignored = None
1693 1693 ignoredata = None
1694 1694 if nf != b'.':
1695 1695 if ignore(nf):
1696 1696 ignored = nf
1697 1697 ignoredata = repo.dirstate._ignorefileandline(nf)
1698 1698 else:
1699 1699 for p in pathutil.finddirs(nf):
1700 1700 if ignore(p):
1701 1701 ignored = p
1702 1702 ignoredata = repo.dirstate._ignorefileandline(p)
1703 1703 break
1704 1704 if ignored:
1705 1705 if ignored == nf:
1706 1706 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1707 1707 else:
1708 1708 ui.write(
1709 1709 _(
1710 1710 b"%s is ignored because of "
1711 1711 b"containing directory %s\n"
1712 1712 )
1713 1713 % (uipathfn(f), ignored)
1714 1714 )
1715 1715 ignorefile, lineno, line = ignoredata
1716 1716 ui.write(
1717 1717 _(b"(ignore rule in %s, line %d: '%s')\n")
1718 1718 % (ignorefile, lineno, line)
1719 1719 )
1720 1720 else:
1721 1721 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1722 1722
1723 1723
1724 1724 @command(
1725 1725 b'debugindex',
1726 1726 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1727 1727 _(b'-c|-m|FILE'),
1728 1728 )
1729 1729 def debugindex(ui, repo, file_=None, **opts):
1730 1730 """dump index data for a storage primitive"""
1731 1731 opts = pycompat.byteskwargs(opts)
1732 1732 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1733 1733
1734 1734 if ui.debugflag:
1735 1735 shortfn = hex
1736 1736 else:
1737 1737 shortfn = short
1738 1738
1739 1739 idlen = 12
1740 1740 for i in store:
1741 1741 idlen = len(shortfn(store.node(i)))
1742 1742 break
1743 1743
1744 1744 fm = ui.formatter(b'debugindex', opts)
1745 1745 fm.plain(
1746 1746 b' rev linkrev %s %s p2\n'
1747 1747 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1748 1748 )
1749 1749
1750 1750 for rev in store:
1751 1751 node = store.node(rev)
1752 1752 parents = store.parents(node)
1753 1753
1754 1754 fm.startitem()
1755 1755 fm.write(b'rev', b'%6d ', rev)
1756 1756 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1757 1757 fm.write(b'node', b'%s ', shortfn(node))
1758 1758 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1759 1759 fm.write(b'p2', b'%s', shortfn(parents[1]))
1760 1760 fm.plain(b'\n')
1761 1761
1762 1762 fm.end()
1763 1763
1764 1764
1765 1765 @command(
1766 1766 b'debugindexdot',
1767 1767 cmdutil.debugrevlogopts,
1768 1768 _(b'-c|-m|FILE'),
1769 1769 optionalrepo=True,
1770 1770 )
1771 1771 def debugindexdot(ui, repo, file_=None, **opts):
1772 1772 """dump an index DAG as a graphviz dot file"""
1773 1773 opts = pycompat.byteskwargs(opts)
1774 1774 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1775 1775 ui.writenoi18n(b"digraph G {\n")
1776 1776 for i in r:
1777 1777 node = r.node(i)
1778 1778 pp = r.parents(node)
1779 1779 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1780 1780 if pp[1] != repo.nullid:
1781 1781 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1782 1782 ui.write(b"}\n")
1783 1783
1784 1784
1785 1785 @command(b'debugindexstats', [])
1786 1786 def debugindexstats(ui, repo):
1787 1787 """show stats related to the changelog index"""
1788 1788 repo.changelog.shortest(repo.nullid, 1)
1789 1789 index = repo.changelog.index
1790 1790 if not util.safehasattr(index, b'stats'):
1791 1791 raise error.Abort(_(b'debugindexstats only works with native code'))
1792 1792 for k, v in sorted(index.stats().items()):
1793 1793 ui.write(b'%s: %d\n' % (k, v))
1794 1794
1795 1795
1796 1796 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1797 1797 def debuginstall(ui, **opts):
1798 1798 """test Mercurial installation
1799 1799
1800 1800 Returns 0 on success.
1801 1801 """
1802 1802 opts = pycompat.byteskwargs(opts)
1803 1803
1804 1804 problems = 0
1805 1805
1806 1806 fm = ui.formatter(b'debuginstall', opts)
1807 1807 fm.startitem()
1808 1808
1809 1809 # encoding might be unknown or wrong. don't translate these messages.
1810 1810 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1811 1811 err = None
1812 1812 try:
1813 1813 codecs.lookup(pycompat.sysstr(encoding.encoding))
1814 1814 except LookupError as inst:
1815 1815 err = stringutil.forcebytestr(inst)
1816 1816 problems += 1
1817 1817 fm.condwrite(
1818 1818 err,
1819 1819 b'encodingerror',
1820 1820 b" %s\n (check that your locale is properly set)\n",
1821 1821 err,
1822 1822 )
1823 1823
1824 1824 # Python
1825 1825 pythonlib = None
1826 1826 if util.safehasattr(os, '__file__'):
1827 1827 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1828 1828 elif getattr(sys, 'oxidized', False):
1829 1829 pythonlib = pycompat.sysexecutable
1830 1830
1831 1831 fm.write(
1832 1832 b'pythonexe',
1833 1833 _(b"checking Python executable (%s)\n"),
1834 1834 pycompat.sysexecutable or _(b"unknown"),
1835 1835 )
1836 1836 fm.write(
1837 1837 b'pythonimplementation',
1838 1838 _(b"checking Python implementation (%s)\n"),
1839 1839 pycompat.sysbytes(platform.python_implementation()),
1840 1840 )
1841 1841 fm.write(
1842 1842 b'pythonver',
1843 1843 _(b"checking Python version (%s)\n"),
1844 1844 (b"%d.%d.%d" % sys.version_info[:3]),
1845 1845 )
1846 1846 fm.write(
1847 1847 b'pythonlib',
1848 1848 _(b"checking Python lib (%s)...\n"),
1849 1849 pythonlib or _(b"unknown"),
1850 1850 )
1851 1851
1852 1852 try:
1853 1853 from . import rustext # pytype: disable=import-error
1854 1854
1855 1855 rustext.__doc__ # trigger lazy import
1856 1856 except ImportError:
1857 1857 rustext = None
1858 1858
1859 1859 security = set(sslutil.supportedprotocols)
1860 1860 if sslutil.hassni:
1861 1861 security.add(b'sni')
1862 1862
1863 1863 fm.write(
1864 1864 b'pythonsecurity',
1865 1865 _(b"checking Python security support (%s)\n"),
1866 1866 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1867 1867 )
1868 1868
1869 1869 # These are warnings, not errors. So don't increment problem count. This
1870 1870 # may change in the future.
1871 1871 if b'tls1.2' not in security:
1872 1872 fm.plain(
1873 1873 _(
1874 1874 b' TLS 1.2 not supported by Python install; '
1875 1875 b'network connections lack modern security\n'
1876 1876 )
1877 1877 )
1878 1878 if b'sni' not in security:
1879 1879 fm.plain(
1880 1880 _(
1881 1881 b' SNI not supported by Python install; may have '
1882 1882 b'connectivity issues with some servers\n'
1883 1883 )
1884 1884 )
1885 1885
1886 1886 fm.plain(
1887 1887 _(
1888 1888 b"checking Rust extensions (%s)\n"
1889 1889 % (b'missing' if rustext is None else b'installed')
1890 1890 ),
1891 1891 )
1892 1892
1893 1893 # TODO print CA cert info
1894 1894
1895 1895 # hg version
1896 1896 hgver = util.version()
1897 1897 fm.write(
1898 1898 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1899 1899 )
1900 1900 fm.write(
1901 1901 b'hgverextra',
1902 1902 _(b"checking Mercurial custom build (%s)\n"),
1903 1903 b'+'.join(hgver.split(b'+')[1:]),
1904 1904 )
1905 1905
1906 1906 # compiled modules
1907 1907 hgmodules = None
1908 1908 if util.safehasattr(sys.modules[__name__], '__file__'):
1909 1909 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1910 1910 elif getattr(sys, 'oxidized', False):
1911 1911 hgmodules = pycompat.sysexecutable
1912 1912
1913 1913 fm.write(
1914 1914 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1915 1915 )
1916 1916 fm.write(
1917 1917 b'hgmodules',
1918 1918 _(b"checking installed modules (%s)...\n"),
1919 1919 hgmodules or _(b"unknown"),
1920 1920 )
1921 1921
1922 1922 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1923 1923 rustext = rustandc # for now, that's the only case
1924 1924 cext = policy.policy in (b'c', b'allow') or rustandc
1925 1925 nopure = cext or rustext
1926 1926 if nopure:
1927 1927 err = None
1928 1928 try:
1929 1929 if cext:
1930 1930 from .cext import ( # pytype: disable=import-error
1931 1931 base85,
1932 1932 bdiff,
1933 1933 mpatch,
1934 1934 osutil,
1935 1935 )
1936 1936
1937 1937 # quiet pyflakes
1938 1938 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1939 1939 if rustext:
1940 1940 from .rustext import ( # pytype: disable=import-error
1941 1941 ancestor,
1942 1942 dirstate,
1943 1943 )
1944 1944
1945 1945 dir(ancestor), dir(dirstate) # quiet pyflakes
1946 1946 except Exception as inst:
1947 1947 err = stringutil.forcebytestr(inst)
1948 1948 problems += 1
1949 1949 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1950 1950
1951 1951 compengines = util.compengines._engines.values()
1952 1952 fm.write(
1953 1953 b'compengines',
1954 1954 _(b'checking registered compression engines (%s)\n'),
1955 1955 fm.formatlist(
1956 1956 sorted(e.name() for e in compengines),
1957 1957 name=b'compengine',
1958 1958 fmt=b'%s',
1959 1959 sep=b', ',
1960 1960 ),
1961 1961 )
1962 1962 fm.write(
1963 1963 b'compenginesavail',
1964 1964 _(b'checking available compression engines (%s)\n'),
1965 1965 fm.formatlist(
1966 1966 sorted(e.name() for e in compengines if e.available()),
1967 1967 name=b'compengine',
1968 1968 fmt=b'%s',
1969 1969 sep=b', ',
1970 1970 ),
1971 1971 )
1972 1972 wirecompengines = compression.compengines.supportedwireengines(
1973 1973 compression.SERVERROLE
1974 1974 )
1975 1975 fm.write(
1976 1976 b'compenginesserver',
1977 1977 _(
1978 1978 b'checking available compression engines '
1979 1979 b'for wire protocol (%s)\n'
1980 1980 ),
1981 1981 fm.formatlist(
1982 1982 [e.name() for e in wirecompengines if e.wireprotosupport()],
1983 1983 name=b'compengine',
1984 1984 fmt=b'%s',
1985 1985 sep=b', ',
1986 1986 ),
1987 1987 )
1988 1988 re2 = b'missing'
1989 1989 if util._re2:
1990 1990 re2 = b'available'
1991 1991 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1992 1992 fm.data(re2=bool(util._re2))
1993 1993
1994 1994 # templates
1995 1995 p = templater.templatedir()
1996 1996 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1997 1997 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1998 1998 if p:
1999 1999 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2000 2000 if m:
2001 2001 # template found, check if it is working
2002 2002 err = None
2003 2003 try:
2004 2004 templater.templater.frommapfile(m)
2005 2005 except Exception as inst:
2006 2006 err = stringutil.forcebytestr(inst)
2007 2007 p = None
2008 2008 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2009 2009 else:
2010 2010 p = None
2011 2011 fm.condwrite(
2012 2012 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2013 2013 )
2014 2014 fm.condwrite(
2015 2015 not m,
2016 2016 b'defaulttemplatenotfound',
2017 2017 _(b" template '%s' not found\n"),
2018 2018 b"default",
2019 2019 )
2020 2020 if not p:
2021 2021 problems += 1
2022 2022 fm.condwrite(
2023 2023 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2024 2024 )
2025 2025
2026 2026 # editor
2027 2027 editor = ui.geteditor()
2028 2028 editor = util.expandpath(editor)
2029 2029 editorbin = procutil.shellsplit(editor)[0]
2030 2030 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2031 2031 cmdpath = procutil.findexe(editorbin)
2032 2032 fm.condwrite(
2033 2033 not cmdpath and editor == b'vi',
2034 2034 b'vinotfound',
2035 2035 _(
2036 2036 b" No commit editor set and can't find %s in PATH\n"
2037 2037 b" (specify a commit editor in your configuration"
2038 2038 b" file)\n"
2039 2039 ),
2040 2040 not cmdpath and editor == b'vi' and editorbin,
2041 2041 )
2042 2042 fm.condwrite(
2043 2043 not cmdpath and editor != b'vi',
2044 2044 b'editornotfound',
2045 2045 _(
2046 2046 b" Can't find editor '%s' in PATH\n"
2047 2047 b" (specify a commit editor in your configuration"
2048 2048 b" file)\n"
2049 2049 ),
2050 2050 not cmdpath and editorbin,
2051 2051 )
2052 2052 if not cmdpath and editor != b'vi':
2053 2053 problems += 1
2054 2054
2055 2055 # check username
2056 2056 username = None
2057 2057 err = None
2058 2058 try:
2059 2059 username = ui.username()
2060 2060 except error.Abort as e:
2061 2061 err = e.message
2062 2062 problems += 1
2063 2063
2064 2064 fm.condwrite(
2065 2065 username, b'username', _(b"checking username (%s)\n"), username
2066 2066 )
2067 2067 fm.condwrite(
2068 2068 err,
2069 2069 b'usernameerror',
2070 2070 _(
2071 2071 b"checking username...\n %s\n"
2072 2072 b" (specify a username in your configuration file)\n"
2073 2073 ),
2074 2074 err,
2075 2075 )
2076 2076
2077 2077 for name, mod in extensions.extensions():
2078 2078 handler = getattr(mod, 'debuginstall', None)
2079 2079 if handler is not None:
2080 2080 problems += handler(ui, fm)
2081 2081
2082 2082 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2083 2083 if not problems:
2084 2084 fm.data(problems=problems)
2085 2085 fm.condwrite(
2086 2086 problems,
2087 2087 b'problems',
2088 2088 _(b"%d problems detected, please check your install!\n"),
2089 2089 problems,
2090 2090 )
2091 2091 fm.end()
2092 2092
2093 2093 return problems
2094 2094
2095 2095
2096 2096 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2097 2097 def debugknown(ui, repopath, *ids, **opts):
2098 2098 """test whether node ids are known to a repo
2099 2099
2100 2100 Every ID must be a full-length hex node id string. Returns a list of 0s
2101 2101 and 1s indicating unknown/known.
2102 2102 """
2103 2103 opts = pycompat.byteskwargs(opts)
2104 2104 repo = hg.peer(ui, opts, repopath)
2105 2105 if not repo.capable(b'known'):
2106 2106 raise error.Abort(b"known() not supported by target repository")
2107 2107 flags = repo.known([bin(s) for s in ids])
2108 2108 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2109 2109
2110 2110
2111 2111 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2112 2112 def debuglabelcomplete(ui, repo, *args):
2113 2113 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2114 2114 debugnamecomplete(ui, repo, *args)
2115 2115
2116 2116
2117 2117 @command(
2118 2118 b'debuglocks',
2119 2119 [
2120 2120 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2121 2121 (
2122 2122 b'W',
2123 2123 b'force-free-wlock',
2124 2124 None,
2125 2125 _(b'free the working state lock (DANGEROUS)'),
2126 2126 ),
2127 2127 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2128 2128 (
2129 2129 b'S',
2130 2130 b'set-wlock',
2131 2131 None,
2132 2132 _(b'set the working state lock until stopped'),
2133 2133 ),
2134 2134 ],
2135 2135 _(b'[OPTION]...'),
2136 2136 )
2137 2137 def debuglocks(ui, repo, **opts):
2138 2138 """show or modify state of locks
2139 2139
2140 2140 By default, this command will show which locks are held. This
2141 2141 includes the user and process holding the lock, the amount of time
2142 2142 the lock has been held, and the machine name where the process is
2143 2143 running if it's not local.
2144 2144
2145 2145 Locks protect the integrity of Mercurial's data, so should be
2146 2146 treated with care. System crashes or other interruptions may cause
2147 2147 locks to not be properly released, though Mercurial will usually
2148 2148 detect and remove such stale locks automatically.
2149 2149
2150 2150 However, detecting stale locks may not always be possible (for
2151 2151 instance, on a shared filesystem). Removing locks may also be
2152 2152 blocked by filesystem permissions.
2153 2153
2154 2154 Setting a lock will prevent other commands from changing the data.
2155 2155 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2156 2156 The set locks are removed when the command exits.
2157 2157
2158 2158 Returns 0 if no locks are held.
2159 2159
2160 2160 """
2161 2161
2162 2162 if opts.get('force_free_lock'):
2163 2163 repo.svfs.unlink(b'lock')
2164 2164 if opts.get('force_free_wlock'):
2165 2165 repo.vfs.unlink(b'wlock')
2166 2166 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2167 2167 return 0
2168 2168
2169 2169 locks = []
2170 2170 try:
2171 2171 if opts.get('set_wlock'):
2172 2172 try:
2173 2173 locks.append(repo.wlock(False))
2174 2174 except error.LockHeld:
2175 2175 raise error.Abort(_(b'wlock is already held'))
2176 2176 if opts.get('set_lock'):
2177 2177 try:
2178 2178 locks.append(repo.lock(False))
2179 2179 except error.LockHeld:
2180 2180 raise error.Abort(_(b'lock is already held'))
2181 2181 if len(locks):
2182 2182 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2183 2183 return 0
2184 2184 finally:
2185 2185 release(*locks)
2186 2186
2187 2187 now = time.time()
2188 2188 held = 0
2189 2189
2190 2190 def report(vfs, name, method):
2191 2191 # this causes stale locks to get reaped for more accurate reporting
2192 2192 try:
2193 2193 l = method(False)
2194 2194 except error.LockHeld:
2195 2195 l = None
2196 2196
2197 2197 if l:
2198 2198 l.release()
2199 2199 else:
2200 2200 try:
2201 2201 st = vfs.lstat(name)
2202 2202 age = now - st[stat.ST_MTIME]
2203 2203 user = util.username(st.st_uid)
2204 2204 locker = vfs.readlock(name)
2205 2205 if b":" in locker:
2206 2206 host, pid = locker.split(b':')
2207 2207 if host == socket.gethostname():
2208 2208 locker = b'user %s, process %s' % (user or b'None', pid)
2209 2209 else:
2210 2210 locker = b'user %s, process %s, host %s' % (
2211 2211 user or b'None',
2212 2212 pid,
2213 2213 host,
2214 2214 )
2215 2215 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2216 2216 return 1
2217 2217 except OSError as e:
2218 2218 if e.errno != errno.ENOENT:
2219 2219 raise
2220 2220
2221 2221 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2222 2222 return 0
2223 2223
2224 2224 held += report(repo.svfs, b"lock", repo.lock)
2225 2225 held += report(repo.vfs, b"wlock", repo.wlock)
2226 2226
2227 2227 return held
2228 2228
2229 2229
2230 2230 @command(
2231 2231 b'debugmanifestfulltextcache',
2232 2232 [
2233 2233 (b'', b'clear', False, _(b'clear the cache')),
2234 2234 (
2235 2235 b'a',
2236 2236 b'add',
2237 2237 [],
2238 2238 _(b'add the given manifest nodes to the cache'),
2239 2239 _(b'NODE'),
2240 2240 ),
2241 2241 ],
2242 2242 b'',
2243 2243 )
2244 2244 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2245 2245 """show, clear or amend the contents of the manifest fulltext cache"""
2246 2246
2247 2247 def getcache():
2248 2248 r = repo.manifestlog.getstorage(b'')
2249 2249 try:
2250 2250 return r._fulltextcache
2251 2251 except AttributeError:
2252 2252 msg = _(
2253 2253 b"Current revlog implementation doesn't appear to have a "
2254 2254 b"manifest fulltext cache\n"
2255 2255 )
2256 2256 raise error.Abort(msg)
2257 2257
2258 2258 if opts.get('clear'):
2259 2259 with repo.wlock():
2260 2260 cache = getcache()
2261 2261 cache.clear(clear_persisted_data=True)
2262 2262 return
2263 2263
2264 2264 if add:
2265 2265 with repo.wlock():
2266 2266 m = repo.manifestlog
2267 2267 store = m.getstorage(b'')
2268 2268 for n in add:
2269 2269 try:
2270 2270 manifest = m[store.lookup(n)]
2271 2271 except error.LookupError as e:
2272 2272 raise error.Abort(
2273 2273 bytes(e), hint=b"Check your manifest node id"
2274 2274 )
2275 2275 manifest.read() # stores revisision in cache too
2276 2276 return
2277 2277
2278 2278 cache = getcache()
2279 2279 if not len(cache):
2280 2280 ui.write(_(b'cache empty\n'))
2281 2281 else:
2282 2282 ui.write(
2283 2283 _(
2284 2284 b'cache contains %d manifest entries, in order of most to '
2285 2285 b'least recent:\n'
2286 2286 )
2287 2287 % (len(cache),)
2288 2288 )
2289 2289 totalsize = 0
2290 2290 for nodeid in cache:
2291 2291 # Use cache.get to not update the LRU order
2292 2292 data = cache.peek(nodeid)
2293 2293 size = len(data)
2294 2294 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2295 2295 ui.write(
2296 2296 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2297 2297 )
2298 2298 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2299 2299 ui.write(
2300 2300 _(b'total cache data size %s, on-disk %s\n')
2301 2301 % (util.bytecount(totalsize), util.bytecount(ondisk))
2302 2302 )
2303 2303
2304 2304
2305 2305 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2306 2306 def debugmergestate(ui, repo, *args, **opts):
2307 2307 """print merge state
2308 2308
2309 2309 Use --verbose to print out information about whether v1 or v2 merge state
2310 2310 was chosen."""
2311 2311
2312 2312 if ui.verbose:
2313 2313 ms = mergestatemod.mergestate(repo)
2314 2314
2315 2315 # sort so that reasonable information is on top
2316 2316 v1records = ms._readrecordsv1()
2317 2317 v2records = ms._readrecordsv2()
2318 2318
2319 2319 if not v1records and not v2records:
2320 2320 pass
2321 2321 elif not v2records:
2322 2322 ui.writenoi18n(b'no version 2 merge state\n')
2323 2323 elif ms._v1v2match(v1records, v2records):
2324 2324 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2325 2325 else:
2326 2326 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2327 2327
2328 2328 opts = pycompat.byteskwargs(opts)
2329 2329 if not opts[b'template']:
2330 2330 opts[b'template'] = (
2331 2331 b'{if(commits, "", "no merge state found\n")}'
2332 2332 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2333 2333 b'{files % "file: {path} (state \\"{state}\\")\n'
2334 2334 b'{if(local_path, "'
2335 2335 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2336 2336 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2337 2337 b' other path: {other_path} (node {other_node})\n'
2338 2338 b'")}'
2339 2339 b'{if(rename_side, "'
2340 2340 b' rename side: {rename_side}\n'
2341 2341 b' renamed path: {renamed_path}\n'
2342 2342 b'")}'
2343 2343 b'{extras % " extra: {key} = {value}\n"}'
2344 2344 b'"}'
2345 2345 b'{extras % "extra: {file} ({key} = {value})\n"}'
2346 2346 )
2347 2347
2348 2348 ms = mergestatemod.mergestate.read(repo)
2349 2349
2350 2350 fm = ui.formatter(b'debugmergestate', opts)
2351 2351 fm.startitem()
2352 2352
2353 2353 fm_commits = fm.nested(b'commits')
2354 2354 if ms.active():
2355 2355 for name, node, label_index in (
2356 2356 (b'local', ms.local, 0),
2357 2357 (b'other', ms.other, 1),
2358 2358 ):
2359 2359 fm_commits.startitem()
2360 2360 fm_commits.data(name=name)
2361 2361 fm_commits.data(node=hex(node))
2362 2362 if ms._labels and len(ms._labels) > label_index:
2363 2363 fm_commits.data(label=ms._labels[label_index])
2364 2364 fm_commits.end()
2365 2365
2366 2366 fm_files = fm.nested(b'files')
2367 2367 if ms.active():
2368 2368 for f in ms:
2369 2369 fm_files.startitem()
2370 2370 fm_files.data(path=f)
2371 2371 state = ms._state[f]
2372 2372 fm_files.data(state=state[0])
2373 2373 if state[0] in (
2374 2374 mergestatemod.MERGE_RECORD_UNRESOLVED,
2375 2375 mergestatemod.MERGE_RECORD_RESOLVED,
2376 2376 ):
2377 2377 fm_files.data(local_key=state[1])
2378 2378 fm_files.data(local_path=state[2])
2379 2379 fm_files.data(ancestor_path=state[3])
2380 2380 fm_files.data(ancestor_node=state[4])
2381 2381 fm_files.data(other_path=state[5])
2382 2382 fm_files.data(other_node=state[6])
2383 2383 fm_files.data(local_flags=state[7])
2384 2384 elif state[0] in (
2385 2385 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2386 2386 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2387 2387 ):
2388 2388 fm_files.data(renamed_path=state[1])
2389 2389 fm_files.data(rename_side=state[2])
2390 2390 fm_extras = fm_files.nested(b'extras')
2391 2391 for k, v in sorted(ms.extras(f).items()):
2392 2392 fm_extras.startitem()
2393 2393 fm_extras.data(key=k)
2394 2394 fm_extras.data(value=v)
2395 2395 fm_extras.end()
2396 2396
2397 2397 fm_files.end()
2398 2398
2399 2399 fm_extras = fm.nested(b'extras')
2400 2400 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2401 2401 if f in ms:
2402 2402 # If file is in mergestate, we have already processed it's extras
2403 2403 continue
2404 2404 for k, v in pycompat.iteritems(d):
2405 2405 fm_extras.startitem()
2406 2406 fm_extras.data(file=f)
2407 2407 fm_extras.data(key=k)
2408 2408 fm_extras.data(value=v)
2409 2409 fm_extras.end()
2410 2410
2411 2411 fm.end()
2412 2412
2413 2413
2414 2414 @command(b'debugnamecomplete', [], _(b'NAME...'))
2415 2415 def debugnamecomplete(ui, repo, *args):
2416 2416 '''complete "names" - tags, open branch names, bookmark names'''
2417 2417
2418 2418 names = set()
2419 2419 # since we previously only listed open branches, we will handle that
2420 2420 # specially (after this for loop)
2421 2421 for name, ns in pycompat.iteritems(repo.names):
2422 2422 if name != b'branches':
2423 2423 names.update(ns.listnames(repo))
2424 2424 names.update(
2425 2425 tag
2426 2426 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2427 2427 if not closed
2428 2428 )
2429 2429 completions = set()
2430 2430 if not args:
2431 2431 args = [b'']
2432 2432 for a in args:
2433 2433 completions.update(n for n in names if n.startswith(a))
2434 2434 ui.write(b'\n'.join(sorted(completions)))
2435 2435 ui.write(b'\n')
2436 2436
2437 2437
2438 2438 @command(
2439 2439 b'debugnodemap',
2440 2440 [
2441 2441 (
2442 2442 b'',
2443 2443 b'dump-new',
2444 2444 False,
2445 2445 _(b'write a (new) persistent binary nodemap on stdout'),
2446 2446 ),
2447 2447 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2448 2448 (
2449 2449 b'',
2450 2450 b'check',
2451 2451 False,
2452 2452 _(b'check that the data on disk data are correct.'),
2453 2453 ),
2454 2454 (
2455 2455 b'',
2456 2456 b'metadata',
2457 2457 False,
2458 2458 _(b'display the on disk meta data for the nodemap'),
2459 2459 ),
2460 2460 ],
2461 2461 )
2462 2462 def debugnodemap(ui, repo, **opts):
2463 2463 """write and inspect on disk nodemap"""
2464 2464 if opts['dump_new']:
2465 2465 unfi = repo.unfiltered()
2466 2466 cl = unfi.changelog
2467 2467 if util.safehasattr(cl.index, "nodemap_data_all"):
2468 2468 data = cl.index.nodemap_data_all()
2469 2469 else:
2470 2470 data = nodemap.persistent_data(cl.index)
2471 2471 ui.write(data)
2472 2472 elif opts['dump_disk']:
2473 2473 unfi = repo.unfiltered()
2474 2474 cl = unfi.changelog
2475 2475 nm_data = nodemap.persisted_data(cl)
2476 2476 if nm_data is not None:
2477 2477 docket, data = nm_data
2478 2478 ui.write(data[:])
2479 2479 elif opts['check']:
2480 2480 unfi = repo.unfiltered()
2481 2481 cl = unfi.changelog
2482 2482 nm_data = nodemap.persisted_data(cl)
2483 2483 if nm_data is not None:
2484 2484 docket, data = nm_data
2485 2485 return nodemap.check_data(ui, cl.index, data)
2486 2486 elif opts['metadata']:
2487 2487 unfi = repo.unfiltered()
2488 2488 cl = unfi.changelog
2489 2489 nm_data = nodemap.persisted_data(cl)
2490 2490 if nm_data is not None:
2491 2491 docket, data = nm_data
2492 2492 ui.write((b"uid: %s\n") % docket.uid)
2493 2493 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2494 2494 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2495 2495 ui.write((b"data-length: %d\n") % docket.data_length)
2496 2496 ui.write((b"data-unused: %d\n") % docket.data_unused)
2497 2497 unused_perc = docket.data_unused * 100.0 / docket.data_length
2498 2498 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2499 2499
2500 2500
2501 2501 @command(
2502 2502 b'debugobsolete',
2503 2503 [
2504 2504 (b'', b'flags', 0, _(b'markers flag')),
2505 2505 (
2506 2506 b'',
2507 2507 b'record-parents',
2508 2508 False,
2509 2509 _(b'record parent information for the precursor'),
2510 2510 ),
2511 2511 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2512 2512 (
2513 2513 b'',
2514 2514 b'exclusive',
2515 2515 False,
2516 2516 _(b'restrict display to markers only relevant to REV'),
2517 2517 ),
2518 2518 (b'', b'index', False, _(b'display index of the marker')),
2519 2519 (b'', b'delete', [], _(b'delete markers specified by indices')),
2520 2520 ]
2521 2521 + cmdutil.commitopts2
2522 2522 + cmdutil.formatteropts,
2523 2523 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2524 2524 )
2525 2525 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2526 2526 """create arbitrary obsolete marker
2527 2527
2528 2528 With no arguments, displays the list of obsolescence markers."""
2529 2529
2530 2530 opts = pycompat.byteskwargs(opts)
2531 2531
2532 2532 def parsenodeid(s):
2533 2533 try:
2534 2534 # We do not use revsingle/revrange functions here to accept
2535 2535 # arbitrary node identifiers, possibly not present in the
2536 2536 # local repository.
2537 2537 n = bin(s)
2538 2538 if len(n) != repo.nodeconstants.nodelen:
2539 2539 raise TypeError()
2540 2540 return n
2541 2541 except TypeError:
2542 2542 raise error.InputError(
2543 2543 b'changeset references must be full hexadecimal '
2544 2544 b'node identifiers'
2545 2545 )
2546 2546
2547 2547 if opts.get(b'delete'):
2548 2548 indices = []
2549 2549 for v in opts.get(b'delete'):
2550 2550 try:
2551 2551 indices.append(int(v))
2552 2552 except ValueError:
2553 2553 raise error.InputError(
2554 2554 _(b'invalid index value: %r') % v,
2555 2555 hint=_(b'use integers for indices'),
2556 2556 )
2557 2557
2558 2558 if repo.currenttransaction():
2559 2559 raise error.Abort(
2560 2560 _(b'cannot delete obsmarkers in the middle of transaction.')
2561 2561 )
2562 2562
2563 2563 with repo.lock():
2564 2564 n = repair.deleteobsmarkers(repo.obsstore, indices)
2565 2565 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2566 2566
2567 2567 return
2568 2568
2569 2569 if precursor is not None:
2570 2570 if opts[b'rev']:
2571 2571 raise error.InputError(
2572 2572 b'cannot select revision when creating marker'
2573 2573 )
2574 2574 metadata = {}
2575 2575 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2576 2576 succs = tuple(parsenodeid(succ) for succ in successors)
2577 2577 l = repo.lock()
2578 2578 try:
2579 2579 tr = repo.transaction(b'debugobsolete')
2580 2580 try:
2581 2581 date = opts.get(b'date')
2582 2582 if date:
2583 2583 date = dateutil.parsedate(date)
2584 2584 else:
2585 2585 date = None
2586 2586 prec = parsenodeid(precursor)
2587 2587 parents = None
2588 2588 if opts[b'record_parents']:
2589 2589 if prec not in repo.unfiltered():
2590 2590 raise error.Abort(
2591 2591 b'cannot used --record-parents on '
2592 2592 b'unknown changesets'
2593 2593 )
2594 2594 parents = repo.unfiltered()[prec].parents()
2595 2595 parents = tuple(p.node() for p in parents)
2596 2596 repo.obsstore.create(
2597 2597 tr,
2598 2598 prec,
2599 2599 succs,
2600 2600 opts[b'flags'],
2601 2601 parents=parents,
2602 2602 date=date,
2603 2603 metadata=metadata,
2604 2604 ui=ui,
2605 2605 )
2606 2606 tr.close()
2607 2607 except ValueError as exc:
2608 2608 raise error.Abort(
2609 2609 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2610 2610 )
2611 2611 finally:
2612 2612 tr.release()
2613 2613 finally:
2614 2614 l.release()
2615 2615 else:
2616 2616 if opts[b'rev']:
2617 2617 revs = scmutil.revrange(repo, opts[b'rev'])
2618 2618 nodes = [repo[r].node() for r in revs]
2619 2619 markers = list(
2620 2620 obsutil.getmarkers(
2621 2621 repo, nodes=nodes, exclusive=opts[b'exclusive']
2622 2622 )
2623 2623 )
2624 2624 markers.sort(key=lambda x: x._data)
2625 2625 else:
2626 2626 markers = obsutil.getmarkers(repo)
2627 2627
2628 2628 markerstoiter = markers
2629 2629 isrelevant = lambda m: True
2630 2630 if opts.get(b'rev') and opts.get(b'index'):
2631 2631 markerstoiter = obsutil.getmarkers(repo)
2632 2632 markerset = set(markers)
2633 2633 isrelevant = lambda m: m in markerset
2634 2634
2635 2635 fm = ui.formatter(b'debugobsolete', opts)
2636 2636 for i, m in enumerate(markerstoiter):
2637 2637 if not isrelevant(m):
2638 2638 # marker can be irrelevant when we're iterating over a set
2639 2639 # of markers (markerstoiter) which is bigger than the set
2640 2640 # of markers we want to display (markers)
2641 2641 # this can happen if both --index and --rev options are
2642 2642 # provided and thus we need to iterate over all of the markers
2643 2643 # to get the correct indices, but only display the ones that
2644 2644 # are relevant to --rev value
2645 2645 continue
2646 2646 fm.startitem()
2647 2647 ind = i if opts.get(b'index') else None
2648 2648 cmdutil.showmarker(fm, m, index=ind)
2649 2649 fm.end()
2650 2650
2651 2651
2652 2652 @command(
2653 2653 b'debugp1copies',
2654 2654 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2655 2655 _(b'[-r REV]'),
2656 2656 )
2657 2657 def debugp1copies(ui, repo, **opts):
2658 2658 """dump copy information compared to p1"""
2659 2659
2660 2660 opts = pycompat.byteskwargs(opts)
2661 2661 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2662 2662 for dst, src in ctx.p1copies().items():
2663 2663 ui.write(b'%s -> %s\n' % (src, dst))
2664 2664
2665 2665
2666 2666 @command(
2667 2667 b'debugp2copies',
2668 2668 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2669 2669 _(b'[-r REV]'),
2670 2670 )
2671 2671 def debugp1copies(ui, repo, **opts):
2672 2672 """dump copy information compared to p2"""
2673 2673
2674 2674 opts = pycompat.byteskwargs(opts)
2675 2675 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2676 2676 for dst, src in ctx.p2copies().items():
2677 2677 ui.write(b'%s -> %s\n' % (src, dst))
2678 2678
2679 2679
2680 2680 @command(
2681 2681 b'debugpathcomplete',
2682 2682 [
2683 2683 (b'f', b'full', None, _(b'complete an entire path')),
2684 2684 (b'n', b'normal', None, _(b'show only normal files')),
2685 2685 (b'a', b'added', None, _(b'show only added files')),
2686 2686 (b'r', b'removed', None, _(b'show only removed files')),
2687 2687 ],
2688 2688 _(b'FILESPEC...'),
2689 2689 )
2690 2690 def debugpathcomplete(ui, repo, *specs, **opts):
2691 2691 """complete part or all of a tracked path
2692 2692
2693 2693 This command supports shells that offer path name completion. It
2694 2694 currently completes only files already known to the dirstate.
2695 2695
2696 2696 Completion extends only to the next path segment unless
2697 2697 --full is specified, in which case entire paths are used."""
2698 2698
2699 2699 def complete(path, acceptable):
2700 2700 dirstate = repo.dirstate
2701 2701 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2702 2702 rootdir = repo.root + pycompat.ossep
2703 2703 if spec != repo.root and not spec.startswith(rootdir):
2704 2704 return [], []
2705 2705 if os.path.isdir(spec):
2706 2706 spec += b'/'
2707 2707 spec = spec[len(rootdir) :]
2708 2708 fixpaths = pycompat.ossep != b'/'
2709 2709 if fixpaths:
2710 2710 spec = spec.replace(pycompat.ossep, b'/')
2711 2711 speclen = len(spec)
2712 2712 fullpaths = opts['full']
2713 2713 files, dirs = set(), set()
2714 2714 adddir, addfile = dirs.add, files.add
2715 2715 for f, st in pycompat.iteritems(dirstate):
2716 2716 if f.startswith(spec) and st.state in acceptable:
2717 2717 if fixpaths:
2718 2718 f = f.replace(b'/', pycompat.ossep)
2719 2719 if fullpaths:
2720 2720 addfile(f)
2721 2721 continue
2722 2722 s = f.find(pycompat.ossep, speclen)
2723 2723 if s >= 0:
2724 2724 adddir(f[:s])
2725 2725 else:
2726 2726 addfile(f)
2727 2727 return files, dirs
2728 2728
2729 2729 acceptable = b''
2730 2730 if opts['normal']:
2731 2731 acceptable += b'nm'
2732 2732 if opts['added']:
2733 2733 acceptable += b'a'
2734 2734 if opts['removed']:
2735 2735 acceptable += b'r'
2736 2736 cwd = repo.getcwd()
2737 2737 if not specs:
2738 2738 specs = [b'.']
2739 2739
2740 2740 files, dirs = set(), set()
2741 2741 for spec in specs:
2742 2742 f, d = complete(spec, acceptable or b'nmar')
2743 2743 files.update(f)
2744 2744 dirs.update(d)
2745 2745 files.update(dirs)
2746 2746 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2747 2747 ui.write(b'\n')
2748 2748
2749 2749
2750 2750 @command(
2751 2751 b'debugpathcopies',
2752 2752 cmdutil.walkopts,
2753 2753 b'hg debugpathcopies REV1 REV2 [FILE]',
2754 2754 inferrepo=True,
2755 2755 )
2756 2756 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2757 2757 """show copies between two revisions"""
2758 2758 ctx1 = scmutil.revsingle(repo, rev1)
2759 2759 ctx2 = scmutil.revsingle(repo, rev2)
2760 2760 m = scmutil.match(ctx1, pats, opts)
2761 2761 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2762 2762 ui.write(b'%s -> %s\n' % (src, dst))
2763 2763
2764 2764
2765 2765 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2766 2766 def debugpeer(ui, path):
2767 2767 """establish a connection to a peer repository"""
2768 2768 # Always enable peer request logging. Requires --debug to display
2769 2769 # though.
2770 2770 overrides = {
2771 2771 (b'devel', b'debug.peer-request'): True,
2772 2772 }
2773 2773
2774 2774 with ui.configoverride(overrides):
2775 2775 peer = hg.peer(ui, {}, path)
2776 2776
2777 2777 try:
2778 2778 local = peer.local() is not None
2779 2779 canpush = peer.canpush()
2780 2780
2781 2781 ui.write(_(b'url: %s\n') % peer.url())
2782 2782 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2783 2783 ui.write(
2784 2784 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2785 2785 )
2786 2786 finally:
2787 2787 peer.close()
2788 2788
2789 2789
2790 2790 @command(
2791 2791 b'debugpickmergetool',
2792 2792 [
2793 2793 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2794 2794 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2795 2795 ]
2796 2796 + cmdutil.walkopts
2797 2797 + cmdutil.mergetoolopts,
2798 2798 _(b'[PATTERN]...'),
2799 2799 inferrepo=True,
2800 2800 )
2801 2801 def debugpickmergetool(ui, repo, *pats, **opts):
2802 2802 """examine which merge tool is chosen for specified file
2803 2803
2804 2804 As described in :hg:`help merge-tools`, Mercurial examines
2805 2805 configurations below in this order to decide which merge tool is
2806 2806 chosen for specified file.
2807 2807
2808 2808 1. ``--tool`` option
2809 2809 2. ``HGMERGE`` environment variable
2810 2810 3. configurations in ``merge-patterns`` section
2811 2811 4. configuration of ``ui.merge``
2812 2812 5. configurations in ``merge-tools`` section
2813 2813 6. ``hgmerge`` tool (for historical reason only)
2814 2814 7. default tool for fallback (``:merge`` or ``:prompt``)
2815 2815
2816 2816 This command writes out examination result in the style below::
2817 2817
2818 2818 FILE = MERGETOOL
2819 2819
2820 2820 By default, all files known in the first parent context of the
2821 2821 working directory are examined. Use file patterns and/or -I/-X
2822 2822 options to limit target files. -r/--rev is also useful to examine
2823 2823 files in another context without actual updating to it.
2824 2824
2825 2825 With --debug, this command shows warning messages while matching
2826 2826 against ``merge-patterns`` and so on, too. It is recommended to
2827 2827 use this option with explicit file patterns and/or -I/-X options,
2828 2828 because this option increases amount of output per file according
2829 2829 to configurations in hgrc.
2830 2830
2831 2831 With -v/--verbose, this command shows configurations below at
2832 2832 first (only if specified).
2833 2833
2834 2834 - ``--tool`` option
2835 2835 - ``HGMERGE`` environment variable
2836 2836 - configuration of ``ui.merge``
2837 2837
2838 2838 If merge tool is chosen before matching against
2839 2839 ``merge-patterns``, this command can't show any helpful
2840 2840 information, even with --debug. In such case, information above is
2841 2841 useful to know why a merge tool is chosen.
2842 2842 """
2843 2843 opts = pycompat.byteskwargs(opts)
2844 2844 overrides = {}
2845 2845 if opts[b'tool']:
2846 2846 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2847 2847 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2848 2848
2849 2849 with ui.configoverride(overrides, b'debugmergepatterns'):
2850 2850 hgmerge = encoding.environ.get(b"HGMERGE")
2851 2851 if hgmerge is not None:
2852 2852 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2853 2853 uimerge = ui.config(b"ui", b"merge")
2854 2854 if uimerge:
2855 2855 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2856 2856
2857 2857 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2858 2858 m = scmutil.match(ctx, pats, opts)
2859 2859 changedelete = opts[b'changedelete']
2860 2860 for path in ctx.walk(m):
2861 2861 fctx = ctx[path]
2862 2862 with ui.silent(
2863 2863 error=True
2864 2864 ) if not ui.debugflag else util.nullcontextmanager():
2865 2865 tool, toolpath = filemerge._picktool(
2866 2866 repo,
2867 2867 ui,
2868 2868 path,
2869 2869 fctx.isbinary(),
2870 2870 b'l' in fctx.flags(),
2871 2871 changedelete,
2872 2872 )
2873 2873 ui.write(b'%s = %s\n' % (path, tool))
2874 2874
2875 2875
2876 2876 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2877 2877 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2878 2878 """access the pushkey key/value protocol
2879 2879
2880 2880 With two args, list the keys in the given namespace.
2881 2881
2882 2882 With five args, set a key to new if it currently is set to old.
2883 2883 Reports success or failure.
2884 2884 """
2885 2885
2886 2886 target = hg.peer(ui, {}, repopath)
2887 2887 try:
2888 2888 if keyinfo:
2889 2889 key, old, new = keyinfo
2890 2890 with target.commandexecutor() as e:
2891 2891 r = e.callcommand(
2892 2892 b'pushkey',
2893 2893 {
2894 2894 b'namespace': namespace,
2895 2895 b'key': key,
2896 2896 b'old': old,
2897 2897 b'new': new,
2898 2898 },
2899 2899 ).result()
2900 2900
2901 2901 ui.status(pycompat.bytestr(r) + b'\n')
2902 2902 return not r
2903 2903 else:
2904 2904 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2905 2905 ui.write(
2906 2906 b"%s\t%s\n"
2907 2907 % (stringutil.escapestr(k), stringutil.escapestr(v))
2908 2908 )
2909 2909 finally:
2910 2910 target.close()
2911 2911
2912 2912
2913 2913 @command(b'debugpvec', [], _(b'A B'))
2914 2914 def debugpvec(ui, repo, a, b=None):
2915 2915 ca = scmutil.revsingle(repo, a)
2916 2916 cb = scmutil.revsingle(repo, b)
2917 2917 pa = pvec.ctxpvec(ca)
2918 2918 pb = pvec.ctxpvec(cb)
2919 2919 if pa == pb:
2920 2920 rel = b"="
2921 2921 elif pa > pb:
2922 2922 rel = b">"
2923 2923 elif pa < pb:
2924 2924 rel = b"<"
2925 2925 elif pa | pb:
2926 2926 rel = b"|"
2927 2927 ui.write(_(b"a: %s\n") % pa)
2928 2928 ui.write(_(b"b: %s\n") % pb)
2929 2929 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2930 2930 ui.write(
2931 2931 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2932 2932 % (
2933 2933 abs(pa._depth - pb._depth),
2934 2934 pvec._hamming(pa._vec, pb._vec),
2935 2935 pa.distance(pb),
2936 2936 rel,
2937 2937 )
2938 2938 )
2939 2939
2940 2940
2941 2941 @command(
2942 2942 b'debugrebuilddirstate|debugrebuildstate',
2943 2943 [
2944 2944 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2945 2945 (
2946 2946 b'',
2947 2947 b'minimal',
2948 2948 None,
2949 2949 _(
2950 2950 b'only rebuild files that are inconsistent with '
2951 2951 b'the working copy parent'
2952 2952 ),
2953 2953 ),
2954 2954 ],
2955 2955 _(b'[-r REV]'),
2956 2956 )
2957 2957 def debugrebuilddirstate(ui, repo, rev, **opts):
2958 2958 """rebuild the dirstate as it would look like for the given revision
2959 2959
2960 2960 If no revision is specified the first current parent will be used.
2961 2961
2962 2962 The dirstate will be set to the files of the given revision.
2963 2963 The actual working directory content or existing dirstate
2964 2964 information such as adds or removes is not considered.
2965 2965
2966 2966 ``minimal`` will only rebuild the dirstate status for files that claim to be
2967 2967 tracked but are not in the parent manifest, or that exist in the parent
2968 2968 manifest but are not in the dirstate. It will not change adds, removes, or
2969 2969 modified files that are in the working copy parent.
2970 2970
2971 2971 One use of this command is to make the next :hg:`status` invocation
2972 2972 check the actual file content.
2973 2973 """
2974 2974 ctx = scmutil.revsingle(repo, rev)
2975 2975 with repo.wlock():
2976 2976 dirstate = repo.dirstate
2977 2977 changedfiles = None
2978 2978 # See command doc for what minimal does.
2979 2979 if opts.get('minimal'):
2980 2980 manifestfiles = set(ctx.manifest().keys())
2981 2981 dirstatefiles = set(dirstate)
2982 2982 manifestonly = manifestfiles - dirstatefiles
2983 2983 dsonly = dirstatefiles - manifestfiles
2984 2984 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2985 2985 changedfiles = manifestonly | dsnotadded
2986 2986
2987 2987 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2988 2988
2989 2989
2990 @command(b'debugrebuildfncache', [], b'')
2991 def debugrebuildfncache(ui, repo):
2990 @command(
2991 b'debugrebuildfncache',
2992 [
2993 (
2994 b'',
2995 b'only-data',
2996 False,
2997 _(b'only look for wrong .d files (much faster)'),
2998 )
2999 ],
3000 b'',
3001 )
3002 def debugrebuildfncache(ui, repo, **opts):
2992 3003 """rebuild the fncache file"""
2993 repair.rebuildfncache(ui, repo)
3004 opts = pycompat.byteskwargs(opts)
3005 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2994 3006
2995 3007
2996 3008 @command(
2997 3009 b'debugrename',
2998 3010 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2999 3011 _(b'[-r REV] [FILE]...'),
3000 3012 )
3001 3013 def debugrename(ui, repo, *pats, **opts):
3002 3014 """dump rename information"""
3003 3015
3004 3016 opts = pycompat.byteskwargs(opts)
3005 3017 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3006 3018 m = scmutil.match(ctx, pats, opts)
3007 3019 for abs in ctx.walk(m):
3008 3020 fctx = ctx[abs]
3009 3021 o = fctx.filelog().renamed(fctx.filenode())
3010 3022 rel = repo.pathto(abs)
3011 3023 if o:
3012 3024 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3013 3025 else:
3014 3026 ui.write(_(b"%s not renamed\n") % rel)
3015 3027
3016 3028
3017 3029 @command(b'debugrequires|debugrequirements', [], b'')
3018 3030 def debugrequirements(ui, repo):
3019 3031 """print the current repo requirements"""
3020 3032 for r in sorted(repo.requirements):
3021 3033 ui.write(b"%s\n" % r)
3022 3034
3023 3035
3024 3036 @command(
3025 3037 b'debugrevlog',
3026 3038 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3027 3039 _(b'-c|-m|FILE'),
3028 3040 optionalrepo=True,
3029 3041 )
3030 3042 def debugrevlog(ui, repo, file_=None, **opts):
3031 3043 """show data and statistics about a revlog"""
3032 3044 opts = pycompat.byteskwargs(opts)
3033 3045 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3034 3046
3035 3047 if opts.get(b"dump"):
3036 3048 numrevs = len(r)
3037 3049 ui.write(
3038 3050 (
3039 3051 b"# rev p1rev p2rev start end deltastart base p1 p2"
3040 3052 b" rawsize totalsize compression heads chainlen\n"
3041 3053 )
3042 3054 )
3043 3055 ts = 0
3044 3056 heads = set()
3045 3057
3046 3058 for rev in pycompat.xrange(numrevs):
3047 3059 dbase = r.deltaparent(rev)
3048 3060 if dbase == -1:
3049 3061 dbase = rev
3050 3062 cbase = r.chainbase(rev)
3051 3063 clen = r.chainlen(rev)
3052 3064 p1, p2 = r.parentrevs(rev)
3053 3065 rs = r.rawsize(rev)
3054 3066 ts = ts + rs
3055 3067 heads -= set(r.parentrevs(rev))
3056 3068 heads.add(rev)
3057 3069 try:
3058 3070 compression = ts / r.end(rev)
3059 3071 except ZeroDivisionError:
3060 3072 compression = 0
3061 3073 ui.write(
3062 3074 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3063 3075 b"%11d %5d %8d\n"
3064 3076 % (
3065 3077 rev,
3066 3078 p1,
3067 3079 p2,
3068 3080 r.start(rev),
3069 3081 r.end(rev),
3070 3082 r.start(dbase),
3071 3083 r.start(cbase),
3072 3084 r.start(p1),
3073 3085 r.start(p2),
3074 3086 rs,
3075 3087 ts,
3076 3088 compression,
3077 3089 len(heads),
3078 3090 clen,
3079 3091 )
3080 3092 )
3081 3093 return 0
3082 3094
3083 3095 format = r._format_version
3084 3096 v = r._format_flags
3085 3097 flags = []
3086 3098 gdelta = False
3087 3099 if v & revlog.FLAG_INLINE_DATA:
3088 3100 flags.append(b'inline')
3089 3101 if v & revlog.FLAG_GENERALDELTA:
3090 3102 gdelta = True
3091 3103 flags.append(b'generaldelta')
3092 3104 if not flags:
3093 3105 flags = [b'(none)']
3094 3106
3095 3107 ### tracks merge vs single parent
3096 3108 nummerges = 0
3097 3109
3098 3110 ### tracks ways the "delta" are build
3099 3111 # nodelta
3100 3112 numempty = 0
3101 3113 numemptytext = 0
3102 3114 numemptydelta = 0
3103 3115 # full file content
3104 3116 numfull = 0
3105 3117 # intermediate snapshot against a prior snapshot
3106 3118 numsemi = 0
3107 3119 # snapshot count per depth
3108 3120 numsnapdepth = collections.defaultdict(lambda: 0)
3109 3121 # delta against previous revision
3110 3122 numprev = 0
3111 3123 # delta against first or second parent (not prev)
3112 3124 nump1 = 0
3113 3125 nump2 = 0
3114 3126 # delta against neither prev nor parents
3115 3127 numother = 0
3116 3128 # delta against prev that are also first or second parent
3117 3129 # (details of `numprev`)
3118 3130 nump1prev = 0
3119 3131 nump2prev = 0
3120 3132
3121 3133 # data about delta chain of each revs
3122 3134 chainlengths = []
3123 3135 chainbases = []
3124 3136 chainspans = []
3125 3137
3126 3138 # data about each revision
3127 3139 datasize = [None, 0, 0]
3128 3140 fullsize = [None, 0, 0]
3129 3141 semisize = [None, 0, 0]
3130 3142 # snapshot count per depth
3131 3143 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3132 3144 deltasize = [None, 0, 0]
3133 3145 chunktypecounts = {}
3134 3146 chunktypesizes = {}
3135 3147
3136 3148 def addsize(size, l):
3137 3149 if l[0] is None or size < l[0]:
3138 3150 l[0] = size
3139 3151 if size > l[1]:
3140 3152 l[1] = size
3141 3153 l[2] += size
3142 3154
3143 3155 numrevs = len(r)
3144 3156 for rev in pycompat.xrange(numrevs):
3145 3157 p1, p2 = r.parentrevs(rev)
3146 3158 delta = r.deltaparent(rev)
3147 3159 if format > 0:
3148 3160 addsize(r.rawsize(rev), datasize)
3149 3161 if p2 != nullrev:
3150 3162 nummerges += 1
3151 3163 size = r.length(rev)
3152 3164 if delta == nullrev:
3153 3165 chainlengths.append(0)
3154 3166 chainbases.append(r.start(rev))
3155 3167 chainspans.append(size)
3156 3168 if size == 0:
3157 3169 numempty += 1
3158 3170 numemptytext += 1
3159 3171 else:
3160 3172 numfull += 1
3161 3173 numsnapdepth[0] += 1
3162 3174 addsize(size, fullsize)
3163 3175 addsize(size, snapsizedepth[0])
3164 3176 else:
3165 3177 chainlengths.append(chainlengths[delta] + 1)
3166 3178 baseaddr = chainbases[delta]
3167 3179 revaddr = r.start(rev)
3168 3180 chainbases.append(baseaddr)
3169 3181 chainspans.append((revaddr - baseaddr) + size)
3170 3182 if size == 0:
3171 3183 numempty += 1
3172 3184 numemptydelta += 1
3173 3185 elif r.issnapshot(rev):
3174 3186 addsize(size, semisize)
3175 3187 numsemi += 1
3176 3188 depth = r.snapshotdepth(rev)
3177 3189 numsnapdepth[depth] += 1
3178 3190 addsize(size, snapsizedepth[depth])
3179 3191 else:
3180 3192 addsize(size, deltasize)
3181 3193 if delta == rev - 1:
3182 3194 numprev += 1
3183 3195 if delta == p1:
3184 3196 nump1prev += 1
3185 3197 elif delta == p2:
3186 3198 nump2prev += 1
3187 3199 elif delta == p1:
3188 3200 nump1 += 1
3189 3201 elif delta == p2:
3190 3202 nump2 += 1
3191 3203 elif delta != nullrev:
3192 3204 numother += 1
3193 3205
3194 3206 # Obtain data on the raw chunks in the revlog.
3195 3207 if util.safehasattr(r, b'_getsegmentforrevs'):
3196 3208 segment = r._getsegmentforrevs(rev, rev)[1]
3197 3209 else:
3198 3210 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3199 3211 if segment:
3200 3212 chunktype = bytes(segment[0:1])
3201 3213 else:
3202 3214 chunktype = b'empty'
3203 3215
3204 3216 if chunktype not in chunktypecounts:
3205 3217 chunktypecounts[chunktype] = 0
3206 3218 chunktypesizes[chunktype] = 0
3207 3219
3208 3220 chunktypecounts[chunktype] += 1
3209 3221 chunktypesizes[chunktype] += size
3210 3222
3211 3223 # Adjust size min value for empty cases
3212 3224 for size in (datasize, fullsize, semisize, deltasize):
3213 3225 if size[0] is None:
3214 3226 size[0] = 0
3215 3227
3216 3228 numdeltas = numrevs - numfull - numempty - numsemi
3217 3229 numoprev = numprev - nump1prev - nump2prev
3218 3230 totalrawsize = datasize[2]
3219 3231 datasize[2] /= numrevs
3220 3232 fulltotal = fullsize[2]
3221 3233 if numfull == 0:
3222 3234 fullsize[2] = 0
3223 3235 else:
3224 3236 fullsize[2] /= numfull
3225 3237 semitotal = semisize[2]
3226 3238 snaptotal = {}
3227 3239 if numsemi > 0:
3228 3240 semisize[2] /= numsemi
3229 3241 for depth in snapsizedepth:
3230 3242 snaptotal[depth] = snapsizedepth[depth][2]
3231 3243 snapsizedepth[depth][2] /= numsnapdepth[depth]
3232 3244
3233 3245 deltatotal = deltasize[2]
3234 3246 if numdeltas > 0:
3235 3247 deltasize[2] /= numdeltas
3236 3248 totalsize = fulltotal + semitotal + deltatotal
3237 3249 avgchainlen = sum(chainlengths) / numrevs
3238 3250 maxchainlen = max(chainlengths)
3239 3251 maxchainspan = max(chainspans)
3240 3252 compratio = 1
3241 3253 if totalsize:
3242 3254 compratio = totalrawsize / totalsize
3243 3255
3244 3256 basedfmtstr = b'%%%dd\n'
3245 3257 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3246 3258
3247 3259 def dfmtstr(max):
3248 3260 return basedfmtstr % len(str(max))
3249 3261
3250 3262 def pcfmtstr(max, padding=0):
3251 3263 return basepcfmtstr % (len(str(max)), b' ' * padding)
3252 3264
3253 3265 def pcfmt(value, total):
3254 3266 if total:
3255 3267 return (value, 100 * float(value) / total)
3256 3268 else:
3257 3269 return value, 100.0
3258 3270
3259 3271 ui.writenoi18n(b'format : %d\n' % format)
3260 3272 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3261 3273
3262 3274 ui.write(b'\n')
3263 3275 fmt = pcfmtstr(totalsize)
3264 3276 fmt2 = dfmtstr(totalsize)
3265 3277 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3266 3278 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3267 3279 ui.writenoi18n(
3268 3280 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3269 3281 )
3270 3282 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3271 3283 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3272 3284 ui.writenoi18n(
3273 3285 b' text : '
3274 3286 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3275 3287 )
3276 3288 ui.writenoi18n(
3277 3289 b' delta : '
3278 3290 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3279 3291 )
3280 3292 ui.writenoi18n(
3281 3293 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3282 3294 )
3283 3295 for depth in sorted(numsnapdepth):
3284 3296 ui.write(
3285 3297 (b' lvl-%-3d : ' % depth)
3286 3298 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3287 3299 )
3288 3300 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3289 3301 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3290 3302 ui.writenoi18n(
3291 3303 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3292 3304 )
3293 3305 for depth in sorted(numsnapdepth):
3294 3306 ui.write(
3295 3307 (b' lvl-%-3d : ' % depth)
3296 3308 + fmt % pcfmt(snaptotal[depth], totalsize)
3297 3309 )
3298 3310 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3299 3311
3300 3312 def fmtchunktype(chunktype):
3301 3313 if chunktype == b'empty':
3302 3314 return b' %s : ' % chunktype
3303 3315 elif chunktype in pycompat.bytestr(string.ascii_letters):
3304 3316 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3305 3317 else:
3306 3318 return b' 0x%s : ' % hex(chunktype)
3307 3319
3308 3320 ui.write(b'\n')
3309 3321 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3310 3322 for chunktype in sorted(chunktypecounts):
3311 3323 ui.write(fmtchunktype(chunktype))
3312 3324 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3313 3325 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3314 3326 for chunktype in sorted(chunktypecounts):
3315 3327 ui.write(fmtchunktype(chunktype))
3316 3328 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3317 3329
3318 3330 ui.write(b'\n')
3319 3331 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3320 3332 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3321 3333 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3322 3334 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3323 3335 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3324 3336
3325 3337 if format > 0:
3326 3338 ui.write(b'\n')
3327 3339 ui.writenoi18n(
3328 3340 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3329 3341 % tuple(datasize)
3330 3342 )
3331 3343 ui.writenoi18n(
3332 3344 b'full revision size (min/max/avg) : %d / %d / %d\n'
3333 3345 % tuple(fullsize)
3334 3346 )
3335 3347 ui.writenoi18n(
3336 3348 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3337 3349 % tuple(semisize)
3338 3350 )
3339 3351 for depth in sorted(snapsizedepth):
3340 3352 if depth == 0:
3341 3353 continue
3342 3354 ui.writenoi18n(
3343 3355 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3344 3356 % ((depth,) + tuple(snapsizedepth[depth]))
3345 3357 )
3346 3358 ui.writenoi18n(
3347 3359 b'delta size (min/max/avg) : %d / %d / %d\n'
3348 3360 % tuple(deltasize)
3349 3361 )
3350 3362
3351 3363 if numdeltas > 0:
3352 3364 ui.write(b'\n')
3353 3365 fmt = pcfmtstr(numdeltas)
3354 3366 fmt2 = pcfmtstr(numdeltas, 4)
3355 3367 ui.writenoi18n(
3356 3368 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3357 3369 )
3358 3370 if numprev > 0:
3359 3371 ui.writenoi18n(
3360 3372 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3361 3373 )
3362 3374 ui.writenoi18n(
3363 3375 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3364 3376 )
3365 3377 ui.writenoi18n(
3366 3378 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3367 3379 )
3368 3380 if gdelta:
3369 3381 ui.writenoi18n(
3370 3382 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3371 3383 )
3372 3384 ui.writenoi18n(
3373 3385 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3374 3386 )
3375 3387 ui.writenoi18n(
3376 3388 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3377 3389 )
3378 3390
3379 3391
3380 3392 @command(
3381 3393 b'debugrevlogindex',
3382 3394 cmdutil.debugrevlogopts
3383 3395 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3384 3396 _(b'[-f FORMAT] -c|-m|FILE'),
3385 3397 optionalrepo=True,
3386 3398 )
3387 3399 def debugrevlogindex(ui, repo, file_=None, **opts):
3388 3400 """dump the contents of a revlog index"""
3389 3401 opts = pycompat.byteskwargs(opts)
3390 3402 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3391 3403 format = opts.get(b'format', 0)
3392 3404 if format not in (0, 1):
3393 3405 raise error.Abort(_(b"unknown format %d") % format)
3394 3406
3395 3407 if ui.debugflag:
3396 3408 shortfn = hex
3397 3409 else:
3398 3410 shortfn = short
3399 3411
3400 3412 # There might not be anything in r, so have a sane default
3401 3413 idlen = 12
3402 3414 for i in r:
3403 3415 idlen = len(shortfn(r.node(i)))
3404 3416 break
3405 3417
3406 3418 if format == 0:
3407 3419 if ui.verbose:
3408 3420 ui.writenoi18n(
3409 3421 b" rev offset length linkrev %s %s p2\n"
3410 3422 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3411 3423 )
3412 3424 else:
3413 3425 ui.writenoi18n(
3414 3426 b" rev linkrev %s %s p2\n"
3415 3427 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3416 3428 )
3417 3429 elif format == 1:
3418 3430 if ui.verbose:
3419 3431 ui.writenoi18n(
3420 3432 (
3421 3433 b" rev flag offset length size link p1"
3422 3434 b" p2 %s\n"
3423 3435 )
3424 3436 % b"nodeid".rjust(idlen)
3425 3437 )
3426 3438 else:
3427 3439 ui.writenoi18n(
3428 3440 b" rev flag size link p1 p2 %s\n"
3429 3441 % b"nodeid".rjust(idlen)
3430 3442 )
3431 3443
3432 3444 for i in r:
3433 3445 node = r.node(i)
3434 3446 if format == 0:
3435 3447 try:
3436 3448 pp = r.parents(node)
3437 3449 except Exception:
3438 3450 pp = [repo.nullid, repo.nullid]
3439 3451 if ui.verbose:
3440 3452 ui.write(
3441 3453 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3442 3454 % (
3443 3455 i,
3444 3456 r.start(i),
3445 3457 r.length(i),
3446 3458 r.linkrev(i),
3447 3459 shortfn(node),
3448 3460 shortfn(pp[0]),
3449 3461 shortfn(pp[1]),
3450 3462 )
3451 3463 )
3452 3464 else:
3453 3465 ui.write(
3454 3466 b"% 6d % 7d %s %s %s\n"
3455 3467 % (
3456 3468 i,
3457 3469 r.linkrev(i),
3458 3470 shortfn(node),
3459 3471 shortfn(pp[0]),
3460 3472 shortfn(pp[1]),
3461 3473 )
3462 3474 )
3463 3475 elif format == 1:
3464 3476 pr = r.parentrevs(i)
3465 3477 if ui.verbose:
3466 3478 ui.write(
3467 3479 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3468 3480 % (
3469 3481 i,
3470 3482 r.flags(i),
3471 3483 r.start(i),
3472 3484 r.length(i),
3473 3485 r.rawsize(i),
3474 3486 r.linkrev(i),
3475 3487 pr[0],
3476 3488 pr[1],
3477 3489 shortfn(node),
3478 3490 )
3479 3491 )
3480 3492 else:
3481 3493 ui.write(
3482 3494 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3483 3495 % (
3484 3496 i,
3485 3497 r.flags(i),
3486 3498 r.rawsize(i),
3487 3499 r.linkrev(i),
3488 3500 pr[0],
3489 3501 pr[1],
3490 3502 shortfn(node),
3491 3503 )
3492 3504 )
3493 3505
3494 3506
3495 3507 @command(
3496 3508 b'debugrevspec',
3497 3509 [
3498 3510 (
3499 3511 b'',
3500 3512 b'optimize',
3501 3513 None,
3502 3514 _(b'print parsed tree after optimizing (DEPRECATED)'),
3503 3515 ),
3504 3516 (
3505 3517 b'',
3506 3518 b'show-revs',
3507 3519 True,
3508 3520 _(b'print list of result revisions (default)'),
3509 3521 ),
3510 3522 (
3511 3523 b's',
3512 3524 b'show-set',
3513 3525 None,
3514 3526 _(b'print internal representation of result set'),
3515 3527 ),
3516 3528 (
3517 3529 b'p',
3518 3530 b'show-stage',
3519 3531 [],
3520 3532 _(b'print parsed tree at the given stage'),
3521 3533 _(b'NAME'),
3522 3534 ),
3523 3535 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3524 3536 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3525 3537 ],
3526 3538 b'REVSPEC',
3527 3539 )
3528 3540 def debugrevspec(ui, repo, expr, **opts):
3529 3541 """parse and apply a revision specification
3530 3542
3531 3543 Use -p/--show-stage option to print the parsed tree at the given stages.
3532 3544 Use -p all to print tree at every stage.
3533 3545
3534 3546 Use --no-show-revs option with -s or -p to print only the set
3535 3547 representation or the parsed tree respectively.
3536 3548
3537 3549 Use --verify-optimized to compare the optimized result with the unoptimized
3538 3550 one. Returns 1 if the optimized result differs.
3539 3551 """
3540 3552 opts = pycompat.byteskwargs(opts)
3541 3553 aliases = ui.configitems(b'revsetalias')
3542 3554 stages = [
3543 3555 (b'parsed', lambda tree: tree),
3544 3556 (
3545 3557 b'expanded',
3546 3558 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3547 3559 ),
3548 3560 (b'concatenated', revsetlang.foldconcat),
3549 3561 (b'analyzed', revsetlang.analyze),
3550 3562 (b'optimized', revsetlang.optimize),
3551 3563 ]
3552 3564 if opts[b'no_optimized']:
3553 3565 stages = stages[:-1]
3554 3566 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3555 3567 raise error.Abort(
3556 3568 _(b'cannot use --verify-optimized with --no-optimized')
3557 3569 )
3558 3570 stagenames = {n for n, f in stages}
3559 3571
3560 3572 showalways = set()
3561 3573 showchanged = set()
3562 3574 if ui.verbose and not opts[b'show_stage']:
3563 3575 # show parsed tree by --verbose (deprecated)
3564 3576 showalways.add(b'parsed')
3565 3577 showchanged.update([b'expanded', b'concatenated'])
3566 3578 if opts[b'optimize']:
3567 3579 showalways.add(b'optimized')
3568 3580 if opts[b'show_stage'] and opts[b'optimize']:
3569 3581 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3570 3582 if opts[b'show_stage'] == [b'all']:
3571 3583 showalways.update(stagenames)
3572 3584 else:
3573 3585 for n in opts[b'show_stage']:
3574 3586 if n not in stagenames:
3575 3587 raise error.Abort(_(b'invalid stage name: %s') % n)
3576 3588 showalways.update(opts[b'show_stage'])
3577 3589
3578 3590 treebystage = {}
3579 3591 printedtree = None
3580 3592 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3581 3593 for n, f in stages:
3582 3594 treebystage[n] = tree = f(tree)
3583 3595 if n in showalways or (n in showchanged and tree != printedtree):
3584 3596 if opts[b'show_stage'] or n != b'parsed':
3585 3597 ui.write(b"* %s:\n" % n)
3586 3598 ui.write(revsetlang.prettyformat(tree), b"\n")
3587 3599 printedtree = tree
3588 3600
3589 3601 if opts[b'verify_optimized']:
3590 3602 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3591 3603 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3592 3604 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3593 3605 ui.writenoi18n(
3594 3606 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3595 3607 )
3596 3608 ui.writenoi18n(
3597 3609 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3598 3610 )
3599 3611 arevs = list(arevs)
3600 3612 brevs = list(brevs)
3601 3613 if arevs == brevs:
3602 3614 return 0
3603 3615 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3604 3616 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3605 3617 sm = difflib.SequenceMatcher(None, arevs, brevs)
3606 3618 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3607 3619 if tag in ('delete', 'replace'):
3608 3620 for c in arevs[alo:ahi]:
3609 3621 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3610 3622 if tag in ('insert', 'replace'):
3611 3623 for c in brevs[blo:bhi]:
3612 3624 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3613 3625 if tag == 'equal':
3614 3626 for c in arevs[alo:ahi]:
3615 3627 ui.write(b' %d\n' % c)
3616 3628 return 1
3617 3629
3618 3630 func = revset.makematcher(tree)
3619 3631 revs = func(repo)
3620 3632 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3621 3633 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3622 3634 if not opts[b'show_revs']:
3623 3635 return
3624 3636 for c in revs:
3625 3637 ui.write(b"%d\n" % c)
3626 3638
3627 3639
3628 3640 @command(
3629 3641 b'debugserve',
3630 3642 [
3631 3643 (
3632 3644 b'',
3633 3645 b'sshstdio',
3634 3646 False,
3635 3647 _(b'run an SSH server bound to process handles'),
3636 3648 ),
3637 3649 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3638 3650 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3639 3651 ],
3640 3652 b'',
3641 3653 )
3642 3654 def debugserve(ui, repo, **opts):
3643 3655 """run a server with advanced settings
3644 3656
3645 3657 This command is similar to :hg:`serve`. It exists partially as a
3646 3658 workaround to the fact that ``hg serve --stdio`` must have specific
3647 3659 arguments for security reasons.
3648 3660 """
3649 3661 opts = pycompat.byteskwargs(opts)
3650 3662
3651 3663 if not opts[b'sshstdio']:
3652 3664 raise error.Abort(_(b'only --sshstdio is currently supported'))
3653 3665
3654 3666 logfh = None
3655 3667
3656 3668 if opts[b'logiofd'] and opts[b'logiofile']:
3657 3669 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3658 3670
3659 3671 if opts[b'logiofd']:
3660 3672 # Ideally we would be line buffered. But line buffering in binary
3661 3673 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3662 3674 # buffering could have performance impacts. But since this isn't
3663 3675 # performance critical code, it should be fine.
3664 3676 try:
3665 3677 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3666 3678 except OSError as e:
3667 3679 if e.errno != errno.ESPIPE:
3668 3680 raise
3669 3681 # can't seek a pipe, so `ab` mode fails on py3
3670 3682 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3671 3683 elif opts[b'logiofile']:
3672 3684 logfh = open(opts[b'logiofile'], b'ab', 0)
3673 3685
3674 3686 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3675 3687 s.serve_forever()
3676 3688
3677 3689
3678 3690 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3679 3691 def debugsetparents(ui, repo, rev1, rev2=None):
3680 3692 """manually set the parents of the current working directory (DANGEROUS)
3681 3693
3682 3694 This command is not what you are looking for and should not be used. Using
3683 3695 this command will most certainly results in slight corruption of the file
3684 3696 level histories withing your repository. DO NOT USE THIS COMMAND.
3685 3697
3686 3698 The command update the p1 and p2 field in the dirstate, and not touching
3687 3699 anything else. This useful for writing repository conversion tools, but
3688 3700 should be used with extreme care. For example, neither the working
3689 3701 directory nor the dirstate is updated, so file status may be incorrect
3690 3702 after running this command. Only used if you are one of the few people that
3691 3703 deeply unstand both conversion tools and file level histories. If you are
3692 3704 reading this help, you are not one of this people (most of them sailed west
3693 3705 from Mithlond anyway.
3694 3706
3695 3707 So one last time DO NOT USE THIS COMMAND.
3696 3708
3697 3709 Returns 0 on success.
3698 3710 """
3699 3711
3700 3712 node1 = scmutil.revsingle(repo, rev1).node()
3701 3713 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3702 3714
3703 3715 with repo.wlock():
3704 3716 repo.setparents(node1, node2)
3705 3717
3706 3718
3707 3719 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3708 3720 def debugsidedata(ui, repo, file_, rev=None, **opts):
3709 3721 """dump the side data for a cl/manifest/file revision
3710 3722
3711 3723 Use --verbose to dump the sidedata content."""
3712 3724 opts = pycompat.byteskwargs(opts)
3713 3725 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3714 3726 if rev is not None:
3715 3727 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3716 3728 file_, rev = None, file_
3717 3729 elif rev is None:
3718 3730 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3719 3731 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3720 3732 r = getattr(r, '_revlog', r)
3721 3733 try:
3722 3734 sidedata = r.sidedata(r.lookup(rev))
3723 3735 except KeyError:
3724 3736 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3725 3737 if sidedata:
3726 3738 sidedata = list(sidedata.items())
3727 3739 sidedata.sort()
3728 3740 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3729 3741 for key, value in sidedata:
3730 3742 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3731 3743 if ui.verbose:
3732 3744 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3733 3745
3734 3746
3735 3747 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3736 3748 def debugssl(ui, repo, source=None, **opts):
3737 3749 """test a secure connection to a server
3738 3750
3739 3751 This builds the certificate chain for the server on Windows, installing the
3740 3752 missing intermediates and trusted root via Windows Update if necessary. It
3741 3753 does nothing on other platforms.
3742 3754
3743 3755 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3744 3756 that server is used. See :hg:`help urls` for more information.
3745 3757
3746 3758 If the update succeeds, retry the original operation. Otherwise, the cause
3747 3759 of the SSL error is likely another issue.
3748 3760 """
3749 3761 if not pycompat.iswindows:
3750 3762 raise error.Abort(
3751 3763 _(b'certificate chain building is only possible on Windows')
3752 3764 )
3753 3765
3754 3766 if not source:
3755 3767 if not repo:
3756 3768 raise error.Abort(
3757 3769 _(
3758 3770 b"there is no Mercurial repository here, and no "
3759 3771 b"server specified"
3760 3772 )
3761 3773 )
3762 3774 source = b"default"
3763 3775
3764 3776 source, branches = urlutil.get_unique_pull_path(
3765 3777 b'debugssl', repo, ui, source
3766 3778 )
3767 3779 url = urlutil.url(source)
3768 3780
3769 3781 defaultport = {b'https': 443, b'ssh': 22}
3770 3782 if url.scheme in defaultport:
3771 3783 try:
3772 3784 addr = (url.host, int(url.port or defaultport[url.scheme]))
3773 3785 except ValueError:
3774 3786 raise error.Abort(_(b"malformed port number in URL"))
3775 3787 else:
3776 3788 raise error.Abort(_(b"only https and ssh connections are supported"))
3777 3789
3778 3790 from . import win32
3779 3791
3780 3792 s = ssl.wrap_socket(
3781 3793 socket.socket(),
3782 3794 ssl_version=ssl.PROTOCOL_TLS,
3783 3795 cert_reqs=ssl.CERT_NONE,
3784 3796 ca_certs=None,
3785 3797 )
3786 3798
3787 3799 try:
3788 3800 s.connect(addr)
3789 3801 cert = s.getpeercert(True)
3790 3802
3791 3803 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3792 3804
3793 3805 complete = win32.checkcertificatechain(cert, build=False)
3794 3806
3795 3807 if not complete:
3796 3808 ui.status(_(b'certificate chain is incomplete, updating... '))
3797 3809
3798 3810 if not win32.checkcertificatechain(cert):
3799 3811 ui.status(_(b'failed.\n'))
3800 3812 else:
3801 3813 ui.status(_(b'done.\n'))
3802 3814 else:
3803 3815 ui.status(_(b'full certificate chain is available\n'))
3804 3816 finally:
3805 3817 s.close()
3806 3818
3807 3819
3808 3820 @command(
3809 3821 b"debugbackupbundle",
3810 3822 [
3811 3823 (
3812 3824 b"",
3813 3825 b"recover",
3814 3826 b"",
3815 3827 b"brings the specified changeset back into the repository",
3816 3828 )
3817 3829 ]
3818 3830 + cmdutil.logopts,
3819 3831 _(b"hg debugbackupbundle [--recover HASH]"),
3820 3832 )
3821 3833 def debugbackupbundle(ui, repo, *pats, **opts):
3822 3834 """lists the changesets available in backup bundles
3823 3835
3824 3836 Without any arguments, this command prints a list of the changesets in each
3825 3837 backup bundle.
3826 3838
3827 3839 --recover takes a changeset hash and unbundles the first bundle that
3828 3840 contains that hash, which puts that changeset back in your repository.
3829 3841
3830 3842 --verbose will print the entire commit message and the bundle path for that
3831 3843 backup.
3832 3844 """
3833 3845 backups = list(
3834 3846 filter(
3835 3847 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3836 3848 )
3837 3849 )
3838 3850 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3839 3851
3840 3852 opts = pycompat.byteskwargs(opts)
3841 3853 opts[b"bundle"] = b""
3842 3854 opts[b"force"] = None
3843 3855 limit = logcmdutil.getlimit(opts)
3844 3856
3845 3857 def display(other, chlist, displayer):
3846 3858 if opts.get(b"newest_first"):
3847 3859 chlist.reverse()
3848 3860 count = 0
3849 3861 for n in chlist:
3850 3862 if limit is not None and count >= limit:
3851 3863 break
3852 3864 parents = [
3853 3865 True for p in other.changelog.parents(n) if p != repo.nullid
3854 3866 ]
3855 3867 if opts.get(b"no_merges") and len(parents) == 2:
3856 3868 continue
3857 3869 count += 1
3858 3870 displayer.show(other[n])
3859 3871
3860 3872 recovernode = opts.get(b"recover")
3861 3873 if recovernode:
3862 3874 if scmutil.isrevsymbol(repo, recovernode):
3863 3875 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3864 3876 return
3865 3877 elif backups:
3866 3878 msg = _(
3867 3879 b"Recover changesets using: hg debugbackupbundle --recover "
3868 3880 b"<changeset hash>\n\nAvailable backup changesets:"
3869 3881 )
3870 3882 ui.status(msg, label=b"status.removed")
3871 3883 else:
3872 3884 ui.status(_(b"no backup changesets found\n"))
3873 3885 return
3874 3886
3875 3887 for backup in backups:
3876 3888 # Much of this is copied from the hg incoming logic
3877 3889 source = os.path.relpath(backup, encoding.getcwd())
3878 3890 source, branches = urlutil.get_unique_pull_path(
3879 3891 b'debugbackupbundle',
3880 3892 repo,
3881 3893 ui,
3882 3894 source,
3883 3895 default_branches=opts.get(b'branch'),
3884 3896 )
3885 3897 try:
3886 3898 other = hg.peer(repo, opts, source)
3887 3899 except error.LookupError as ex:
3888 3900 msg = _(b"\nwarning: unable to open bundle %s") % source
3889 3901 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3890 3902 ui.warn(msg, hint=hint)
3891 3903 continue
3892 3904 revs, checkout = hg.addbranchrevs(
3893 3905 repo, other, branches, opts.get(b"rev")
3894 3906 )
3895 3907
3896 3908 if revs:
3897 3909 revs = [other.lookup(rev) for rev in revs]
3898 3910
3899 3911 with ui.silent():
3900 3912 try:
3901 3913 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3902 3914 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3903 3915 )
3904 3916 except error.LookupError:
3905 3917 continue
3906 3918
3907 3919 try:
3908 3920 if not chlist:
3909 3921 continue
3910 3922 if recovernode:
3911 3923 with repo.lock(), repo.transaction(b"unbundle") as tr:
3912 3924 if scmutil.isrevsymbol(other, recovernode):
3913 3925 ui.status(_(b"Unbundling %s\n") % (recovernode))
3914 3926 f = hg.openpath(ui, source)
3915 3927 gen = exchange.readbundle(ui, f, source)
3916 3928 if isinstance(gen, bundle2.unbundle20):
3917 3929 bundle2.applybundle(
3918 3930 repo,
3919 3931 gen,
3920 3932 tr,
3921 3933 source=b"unbundle",
3922 3934 url=b"bundle:" + source,
3923 3935 )
3924 3936 else:
3925 3937 gen.apply(repo, b"unbundle", b"bundle:" + source)
3926 3938 break
3927 3939 else:
3928 3940 backupdate = encoding.strtolocal(
3929 3941 time.strftime(
3930 3942 "%a %H:%M, %Y-%m-%d",
3931 3943 time.localtime(os.path.getmtime(source)),
3932 3944 )
3933 3945 )
3934 3946 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3935 3947 if ui.verbose:
3936 3948 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3937 3949 else:
3938 3950 opts[
3939 3951 b"template"
3940 3952 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3941 3953 displayer = logcmdutil.changesetdisplayer(
3942 3954 ui, other, opts, False
3943 3955 )
3944 3956 display(other, chlist, displayer)
3945 3957 displayer.close()
3946 3958 finally:
3947 3959 cleanupfn()
3948 3960
3949 3961
3950 3962 @command(
3951 3963 b'debugsub',
3952 3964 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3953 3965 _(b'[-r REV] [REV]'),
3954 3966 )
3955 3967 def debugsub(ui, repo, rev=None):
3956 3968 ctx = scmutil.revsingle(repo, rev, None)
3957 3969 for k, v in sorted(ctx.substate.items()):
3958 3970 ui.writenoi18n(b'path %s\n' % k)
3959 3971 ui.writenoi18n(b' source %s\n' % v[0])
3960 3972 ui.writenoi18n(b' revision %s\n' % v[1])
3961 3973
3962 3974
3963 3975 @command(b'debugshell', optionalrepo=True)
3964 3976 def debugshell(ui, repo):
3965 3977 """run an interactive Python interpreter
3966 3978
3967 3979 The local namespace is provided with a reference to the ui and
3968 3980 the repo instance (if available).
3969 3981 """
3970 3982 import code
3971 3983
3972 3984 imported_objects = {
3973 3985 'ui': ui,
3974 3986 'repo': repo,
3975 3987 }
3976 3988
3977 3989 code.interact(local=imported_objects)
3978 3990
3979 3991
3980 3992 @command(
3981 3993 b'debugsuccessorssets',
3982 3994 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3983 3995 _(b'[REV]'),
3984 3996 )
3985 3997 def debugsuccessorssets(ui, repo, *revs, **opts):
3986 3998 """show set of successors for revision
3987 3999
3988 4000 A successors set of changeset A is a consistent group of revisions that
3989 4001 succeed A. It contains non-obsolete changesets only unless closests
3990 4002 successors set is set.
3991 4003
3992 4004 In most cases a changeset A has a single successors set containing a single
3993 4005 successor (changeset A replaced by A').
3994 4006
3995 4007 A changeset that is made obsolete with no successors are called "pruned".
3996 4008 Such changesets have no successors sets at all.
3997 4009
3998 4010 A changeset that has been "split" will have a successors set containing
3999 4011 more than one successor.
4000 4012
4001 4013 A changeset that has been rewritten in multiple different ways is called
4002 4014 "divergent". Such changesets have multiple successor sets (each of which
4003 4015 may also be split, i.e. have multiple successors).
4004 4016
4005 4017 Results are displayed as follows::
4006 4018
4007 4019 <rev1>
4008 4020 <successors-1A>
4009 4021 <rev2>
4010 4022 <successors-2A>
4011 4023 <successors-2B1> <successors-2B2> <successors-2B3>
4012 4024
4013 4025 Here rev2 has two possible (i.e. divergent) successors sets. The first
4014 4026 holds one element, whereas the second holds three (i.e. the changeset has
4015 4027 been split).
4016 4028 """
4017 4029 # passed to successorssets caching computation from one call to another
4018 4030 cache = {}
4019 4031 ctx2str = bytes
4020 4032 node2str = short
4021 4033 for rev in scmutil.revrange(repo, revs):
4022 4034 ctx = repo[rev]
4023 4035 ui.write(b'%s\n' % ctx2str(ctx))
4024 4036 for succsset in obsutil.successorssets(
4025 4037 repo, ctx.node(), closest=opts['closest'], cache=cache
4026 4038 ):
4027 4039 if succsset:
4028 4040 ui.write(b' ')
4029 4041 ui.write(node2str(succsset[0]))
4030 4042 for node in succsset[1:]:
4031 4043 ui.write(b' ')
4032 4044 ui.write(node2str(node))
4033 4045 ui.write(b'\n')
4034 4046
4035 4047
4036 4048 @command(b'debugtagscache', [])
4037 4049 def debugtagscache(ui, repo):
4038 4050 """display the contents of .hg/cache/hgtagsfnodes1"""
4039 4051 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4040 4052 flog = repo.file(b'.hgtags')
4041 4053 for r in repo:
4042 4054 node = repo[r].node()
4043 4055 tagsnode = cache.getfnode(node, computemissing=False)
4044 4056 if tagsnode:
4045 4057 tagsnodedisplay = hex(tagsnode)
4046 4058 if not flog.hasnode(tagsnode):
4047 4059 tagsnodedisplay += b' (unknown node)'
4048 4060 elif tagsnode is None:
4049 4061 tagsnodedisplay = b'missing'
4050 4062 else:
4051 4063 tagsnodedisplay = b'invalid'
4052 4064
4053 4065 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4054 4066
4055 4067
4056 4068 @command(
4057 4069 b'debugtemplate',
4058 4070 [
4059 4071 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4060 4072 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4061 4073 ],
4062 4074 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4063 4075 optionalrepo=True,
4064 4076 )
4065 4077 def debugtemplate(ui, repo, tmpl, **opts):
4066 4078 """parse and apply a template
4067 4079
4068 4080 If -r/--rev is given, the template is processed as a log template and
4069 4081 applied to the given changesets. Otherwise, it is processed as a generic
4070 4082 template.
4071 4083
4072 4084 Use --verbose to print the parsed tree.
4073 4085 """
4074 4086 revs = None
4075 4087 if opts['rev']:
4076 4088 if repo is None:
4077 4089 raise error.RepoError(
4078 4090 _(b'there is no Mercurial repository here (.hg not found)')
4079 4091 )
4080 4092 revs = scmutil.revrange(repo, opts['rev'])
4081 4093
4082 4094 props = {}
4083 4095 for d in opts['define']:
4084 4096 try:
4085 4097 k, v = (e.strip() for e in d.split(b'=', 1))
4086 4098 if not k or k == b'ui':
4087 4099 raise ValueError
4088 4100 props[k] = v
4089 4101 except ValueError:
4090 4102 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4091 4103
4092 4104 if ui.verbose:
4093 4105 aliases = ui.configitems(b'templatealias')
4094 4106 tree = templater.parse(tmpl)
4095 4107 ui.note(templater.prettyformat(tree), b'\n')
4096 4108 newtree = templater.expandaliases(tree, aliases)
4097 4109 if newtree != tree:
4098 4110 ui.notenoi18n(
4099 4111 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4100 4112 )
4101 4113
4102 4114 if revs is None:
4103 4115 tres = formatter.templateresources(ui, repo)
4104 4116 t = formatter.maketemplater(ui, tmpl, resources=tres)
4105 4117 if ui.verbose:
4106 4118 kwds, funcs = t.symbolsuseddefault()
4107 4119 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4108 4120 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4109 4121 ui.write(t.renderdefault(props))
4110 4122 else:
4111 4123 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4112 4124 if ui.verbose:
4113 4125 kwds, funcs = displayer.t.symbolsuseddefault()
4114 4126 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4115 4127 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4116 4128 for r in revs:
4117 4129 displayer.show(repo[r], **pycompat.strkwargs(props))
4118 4130 displayer.close()
4119 4131
4120 4132
4121 4133 @command(
4122 4134 b'debuguigetpass',
4123 4135 [
4124 4136 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4125 4137 ],
4126 4138 _(b'[-p TEXT]'),
4127 4139 norepo=True,
4128 4140 )
4129 4141 def debuguigetpass(ui, prompt=b''):
4130 4142 """show prompt to type password"""
4131 4143 r = ui.getpass(prompt)
4132 4144 if r is None:
4133 4145 r = b"<default response>"
4134 4146 ui.writenoi18n(b'response: %s\n' % r)
4135 4147
4136 4148
4137 4149 @command(
4138 4150 b'debuguiprompt',
4139 4151 [
4140 4152 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4141 4153 ],
4142 4154 _(b'[-p TEXT]'),
4143 4155 norepo=True,
4144 4156 )
4145 4157 def debuguiprompt(ui, prompt=b''):
4146 4158 """show plain prompt"""
4147 4159 r = ui.prompt(prompt)
4148 4160 ui.writenoi18n(b'response: %s\n' % r)
4149 4161
4150 4162
4151 4163 @command(b'debugupdatecaches', [])
4152 4164 def debugupdatecaches(ui, repo, *pats, **opts):
4153 4165 """warm all known caches in the repository"""
4154 4166 with repo.wlock(), repo.lock():
4155 4167 repo.updatecaches(caches=repository.CACHES_ALL)
4156 4168
4157 4169
4158 4170 @command(
4159 4171 b'debugupgraderepo',
4160 4172 [
4161 4173 (
4162 4174 b'o',
4163 4175 b'optimize',
4164 4176 [],
4165 4177 _(b'extra optimization to perform'),
4166 4178 _(b'NAME'),
4167 4179 ),
4168 4180 (b'', b'run', False, _(b'performs an upgrade')),
4169 4181 (b'', b'backup', True, _(b'keep the old repository content around')),
4170 4182 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4171 4183 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4172 4184 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4173 4185 ],
4174 4186 )
4175 4187 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4176 4188 """upgrade a repository to use different features
4177 4189
4178 4190 If no arguments are specified, the repository is evaluated for upgrade
4179 4191 and a list of problems and potential optimizations is printed.
4180 4192
4181 4193 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4182 4194 can be influenced via additional arguments. More details will be provided
4183 4195 by the command output when run without ``--run``.
4184 4196
4185 4197 During the upgrade, the repository will be locked and no writes will be
4186 4198 allowed.
4187 4199
4188 4200 At the end of the upgrade, the repository may not be readable while new
4189 4201 repository data is swapped in. This window will be as long as it takes to
4190 4202 rename some directories inside the ``.hg`` directory. On most machines, this
4191 4203 should complete almost instantaneously and the chances of a consumer being
4192 4204 unable to access the repository should be low.
4193 4205
4194 4206 By default, all revlogs will be upgraded. You can restrict this using flags
4195 4207 such as `--manifest`:
4196 4208
4197 4209 * `--manifest`: only optimize the manifest
4198 4210 * `--no-manifest`: optimize all revlog but the manifest
4199 4211 * `--changelog`: optimize the changelog only
4200 4212 * `--no-changelog --no-manifest`: optimize filelogs only
4201 4213 * `--filelogs`: optimize the filelogs only
4202 4214 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4203 4215 """
4204 4216 return upgrade.upgraderepo(
4205 4217 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4206 4218 )
4207 4219
4208 4220
4209 4221 @command(
4210 4222 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4211 4223 )
4212 4224 def debugwalk(ui, repo, *pats, **opts):
4213 4225 """show how files match on given patterns"""
4214 4226 opts = pycompat.byteskwargs(opts)
4215 4227 m = scmutil.match(repo[None], pats, opts)
4216 4228 if ui.verbose:
4217 4229 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4218 4230 items = list(repo[None].walk(m))
4219 4231 if not items:
4220 4232 return
4221 4233 f = lambda fn: fn
4222 4234 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4223 4235 f = lambda fn: util.normpath(fn)
4224 4236 fmt = b'f %%-%ds %%-%ds %%s' % (
4225 4237 max([len(abs) for abs in items]),
4226 4238 max([len(repo.pathto(abs)) for abs in items]),
4227 4239 )
4228 4240 for abs in items:
4229 4241 line = fmt % (
4230 4242 abs,
4231 4243 f(repo.pathto(abs)),
4232 4244 m.exact(abs) and b'exact' or b'',
4233 4245 )
4234 4246 ui.write(b"%s\n" % line.rstrip())
4235 4247
4236 4248
4237 4249 @command(b'debugwhyunstable', [], _(b'REV'))
4238 4250 def debugwhyunstable(ui, repo, rev):
4239 4251 """explain instabilities of a changeset"""
4240 4252 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4241 4253 dnodes = b''
4242 4254 if entry.get(b'divergentnodes'):
4243 4255 dnodes = (
4244 4256 b' '.join(
4245 4257 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4246 4258 for ctx in entry[b'divergentnodes']
4247 4259 )
4248 4260 + b' '
4249 4261 )
4250 4262 ui.write(
4251 4263 b'%s: %s%s %s\n'
4252 4264 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4253 4265 )
4254 4266
4255 4267
4256 4268 @command(
4257 4269 b'debugwireargs',
4258 4270 [
4259 4271 (b'', b'three', b'', b'three'),
4260 4272 (b'', b'four', b'', b'four'),
4261 4273 (b'', b'five', b'', b'five'),
4262 4274 ]
4263 4275 + cmdutil.remoteopts,
4264 4276 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4265 4277 norepo=True,
4266 4278 )
4267 4279 def debugwireargs(ui, repopath, *vals, **opts):
4268 4280 opts = pycompat.byteskwargs(opts)
4269 4281 repo = hg.peer(ui, opts, repopath)
4270 4282 try:
4271 4283 for opt in cmdutil.remoteopts:
4272 4284 del opts[opt[1]]
4273 4285 args = {}
4274 4286 for k, v in pycompat.iteritems(opts):
4275 4287 if v:
4276 4288 args[k] = v
4277 4289 args = pycompat.strkwargs(args)
4278 4290 # run twice to check that we don't mess up the stream for the next command
4279 4291 res1 = repo.debugwireargs(*vals, **args)
4280 4292 res2 = repo.debugwireargs(*vals, **args)
4281 4293 ui.write(b"%s\n" % res1)
4282 4294 if res1 != res2:
4283 4295 ui.warn(b"%s\n" % res2)
4284 4296 finally:
4285 4297 repo.close()
4286 4298
4287 4299
4288 4300 def _parsewirelangblocks(fh):
4289 4301 activeaction = None
4290 4302 blocklines = []
4291 4303 lastindent = 0
4292 4304
4293 4305 for line in fh:
4294 4306 line = line.rstrip()
4295 4307 if not line:
4296 4308 continue
4297 4309
4298 4310 if line.startswith(b'#'):
4299 4311 continue
4300 4312
4301 4313 if not line.startswith(b' '):
4302 4314 # New block. Flush previous one.
4303 4315 if activeaction:
4304 4316 yield activeaction, blocklines
4305 4317
4306 4318 activeaction = line
4307 4319 blocklines = []
4308 4320 lastindent = 0
4309 4321 continue
4310 4322
4311 4323 # Else we start with an indent.
4312 4324
4313 4325 if not activeaction:
4314 4326 raise error.Abort(_(b'indented line outside of block'))
4315 4327
4316 4328 indent = len(line) - len(line.lstrip())
4317 4329
4318 4330 # If this line is indented more than the last line, concatenate it.
4319 4331 if indent > lastindent and blocklines:
4320 4332 blocklines[-1] += line.lstrip()
4321 4333 else:
4322 4334 blocklines.append(line)
4323 4335 lastindent = indent
4324 4336
4325 4337 # Flush last block.
4326 4338 if activeaction:
4327 4339 yield activeaction, blocklines
4328 4340
4329 4341
4330 4342 @command(
4331 4343 b'debugwireproto',
4332 4344 [
4333 4345 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4334 4346 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4335 4347 (
4336 4348 b'',
4337 4349 b'noreadstderr',
4338 4350 False,
4339 4351 _(b'do not read from stderr of the remote'),
4340 4352 ),
4341 4353 (
4342 4354 b'',
4343 4355 b'nologhandshake',
4344 4356 False,
4345 4357 _(b'do not log I/O related to the peer handshake'),
4346 4358 ),
4347 4359 ]
4348 4360 + cmdutil.remoteopts,
4349 4361 _(b'[PATH]'),
4350 4362 optionalrepo=True,
4351 4363 )
4352 4364 def debugwireproto(ui, repo, path=None, **opts):
4353 4365 """send wire protocol commands to a server
4354 4366
4355 4367 This command can be used to issue wire protocol commands to remote
4356 4368 peers and to debug the raw data being exchanged.
4357 4369
4358 4370 ``--localssh`` will start an SSH server against the current repository
4359 4371 and connect to that. By default, the connection will perform a handshake
4360 4372 and establish an appropriate peer instance.
4361 4373
4362 4374 ``--peer`` can be used to bypass the handshake protocol and construct a
4363 4375 peer instance using the specified class type. Valid values are ``raw``,
4364 4376 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4365 4377 raw data payloads and don't support higher-level command actions.
4366 4378
4367 4379 ``--noreadstderr`` can be used to disable automatic reading from stderr
4368 4380 of the peer (for SSH connections only). Disabling automatic reading of
4369 4381 stderr is useful for making output more deterministic.
4370 4382
4371 4383 Commands are issued via a mini language which is specified via stdin.
4372 4384 The language consists of individual actions to perform. An action is
4373 4385 defined by a block. A block is defined as a line with no leading
4374 4386 space followed by 0 or more lines with leading space. Blocks are
4375 4387 effectively a high-level command with additional metadata.
4376 4388
4377 4389 Lines beginning with ``#`` are ignored.
4378 4390
4379 4391 The following sections denote available actions.
4380 4392
4381 4393 raw
4382 4394 ---
4383 4395
4384 4396 Send raw data to the server.
4385 4397
4386 4398 The block payload contains the raw data to send as one atomic send
4387 4399 operation. The data may not actually be delivered in a single system
4388 4400 call: it depends on the abilities of the transport being used.
4389 4401
4390 4402 Each line in the block is de-indented and concatenated. Then, that
4391 4403 value is evaluated as a Python b'' literal. This allows the use of
4392 4404 backslash escaping, etc.
4393 4405
4394 4406 raw+
4395 4407 ----
4396 4408
4397 4409 Behaves like ``raw`` except flushes output afterwards.
4398 4410
4399 4411 command <X>
4400 4412 -----------
4401 4413
4402 4414 Send a request to run a named command, whose name follows the ``command``
4403 4415 string.
4404 4416
4405 4417 Arguments to the command are defined as lines in this block. The format of
4406 4418 each line is ``<key> <value>``. e.g.::
4407 4419
4408 4420 command listkeys
4409 4421 namespace bookmarks
4410 4422
4411 4423 If the value begins with ``eval:``, it will be interpreted as a Python
4412 4424 literal expression. Otherwise values are interpreted as Python b'' literals.
4413 4425 This allows sending complex types and encoding special byte sequences via
4414 4426 backslash escaping.
4415 4427
4416 4428 The following arguments have special meaning:
4417 4429
4418 4430 ``PUSHFILE``
4419 4431 When defined, the *push* mechanism of the peer will be used instead
4420 4432 of the static request-response mechanism and the content of the
4421 4433 file specified in the value of this argument will be sent as the
4422 4434 command payload.
4423 4435
4424 4436 This can be used to submit a local bundle file to the remote.
4425 4437
4426 4438 batchbegin
4427 4439 ----------
4428 4440
4429 4441 Instruct the peer to begin a batched send.
4430 4442
4431 4443 All ``command`` blocks are queued for execution until the next
4432 4444 ``batchsubmit`` block.
4433 4445
4434 4446 batchsubmit
4435 4447 -----------
4436 4448
4437 4449 Submit previously queued ``command`` blocks as a batch request.
4438 4450
4439 4451 This action MUST be paired with a ``batchbegin`` action.
4440 4452
4441 4453 httprequest <method> <path>
4442 4454 ---------------------------
4443 4455
4444 4456 (HTTP peer only)
4445 4457
4446 4458 Send an HTTP request to the peer.
4447 4459
4448 4460 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4449 4461
4450 4462 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4451 4463 headers to add to the request. e.g. ``Accept: foo``.
4452 4464
4453 4465 The following arguments are special:
4454 4466
4455 4467 ``BODYFILE``
4456 4468 The content of the file defined as the value to this argument will be
4457 4469 transferred verbatim as the HTTP request body.
4458 4470
4459 4471 ``frame <type> <flags> <payload>``
4460 4472 Send a unified protocol frame as part of the request body.
4461 4473
4462 4474 All frames will be collected and sent as the body to the HTTP
4463 4475 request.
4464 4476
4465 4477 close
4466 4478 -----
4467 4479
4468 4480 Close the connection to the server.
4469 4481
4470 4482 flush
4471 4483 -----
4472 4484
4473 4485 Flush data written to the server.
4474 4486
4475 4487 readavailable
4476 4488 -------------
4477 4489
4478 4490 Close the write end of the connection and read all available data from
4479 4491 the server.
4480 4492
4481 4493 If the connection to the server encompasses multiple pipes, we poll both
4482 4494 pipes and read available data.
4483 4495
4484 4496 readline
4485 4497 --------
4486 4498
4487 4499 Read a line of output from the server. If there are multiple output
4488 4500 pipes, reads only the main pipe.
4489 4501
4490 4502 ereadline
4491 4503 ---------
4492 4504
4493 4505 Like ``readline``, but read from the stderr pipe, if available.
4494 4506
4495 4507 read <X>
4496 4508 --------
4497 4509
4498 4510 ``read()`` N bytes from the server's main output pipe.
4499 4511
4500 4512 eread <X>
4501 4513 ---------
4502 4514
4503 4515 ``read()`` N bytes from the server's stderr pipe, if available.
4504 4516
4505 4517 Specifying Unified Frame-Based Protocol Frames
4506 4518 ----------------------------------------------
4507 4519
4508 4520 It is possible to emit a *Unified Frame-Based Protocol* by using special
4509 4521 syntax.
4510 4522
4511 4523 A frame is composed as a type, flags, and payload. These can be parsed
4512 4524 from a string of the form:
4513 4525
4514 4526 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4515 4527
4516 4528 ``request-id`` and ``stream-id`` are integers defining the request and
4517 4529 stream identifiers.
4518 4530
4519 4531 ``type`` can be an integer value for the frame type or the string name
4520 4532 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4521 4533 ``command-name``.
4522 4534
4523 4535 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4524 4536 components. Each component (and there can be just one) can be an integer
4525 4537 or a flag name for stream flags or frame flags, respectively. Values are
4526 4538 resolved to integers and then bitwise OR'd together.
4527 4539
4528 4540 ``payload`` represents the raw frame payload. If it begins with
4529 4541 ``cbor:``, the following string is evaluated as Python code and the
4530 4542 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4531 4543 as a Python byte string literal.
4532 4544 """
4533 4545 opts = pycompat.byteskwargs(opts)
4534 4546
4535 4547 if opts[b'localssh'] and not repo:
4536 4548 raise error.Abort(_(b'--localssh requires a repository'))
4537 4549
4538 4550 if opts[b'peer'] and opts[b'peer'] not in (
4539 4551 b'raw',
4540 4552 b'http2',
4541 4553 b'ssh1',
4542 4554 b'ssh2',
4543 4555 ):
4544 4556 raise error.Abort(
4545 4557 _(b'invalid value for --peer'),
4546 4558 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4547 4559 )
4548 4560
4549 4561 if path and opts[b'localssh']:
4550 4562 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4551 4563
4552 4564 if ui.interactive():
4553 4565 ui.write(_(b'(waiting for commands on stdin)\n'))
4554 4566
4555 4567 blocks = list(_parsewirelangblocks(ui.fin))
4556 4568
4557 4569 proc = None
4558 4570 stdin = None
4559 4571 stdout = None
4560 4572 stderr = None
4561 4573 opener = None
4562 4574
4563 4575 if opts[b'localssh']:
4564 4576 # We start the SSH server in its own process so there is process
4565 4577 # separation. This prevents a whole class of potential bugs around
4566 4578 # shared state from interfering with server operation.
4567 4579 args = procutil.hgcmd() + [
4568 4580 b'-R',
4569 4581 repo.root,
4570 4582 b'debugserve',
4571 4583 b'--sshstdio',
4572 4584 ]
4573 4585 proc = subprocess.Popen(
4574 4586 pycompat.rapply(procutil.tonativestr, args),
4575 4587 stdin=subprocess.PIPE,
4576 4588 stdout=subprocess.PIPE,
4577 4589 stderr=subprocess.PIPE,
4578 4590 bufsize=0,
4579 4591 )
4580 4592
4581 4593 stdin = proc.stdin
4582 4594 stdout = proc.stdout
4583 4595 stderr = proc.stderr
4584 4596
4585 4597 # We turn the pipes into observers so we can log I/O.
4586 4598 if ui.verbose or opts[b'peer'] == b'raw':
4587 4599 stdin = util.makeloggingfileobject(
4588 4600 ui, proc.stdin, b'i', logdata=True
4589 4601 )
4590 4602 stdout = util.makeloggingfileobject(
4591 4603 ui, proc.stdout, b'o', logdata=True
4592 4604 )
4593 4605 stderr = util.makeloggingfileobject(
4594 4606 ui, proc.stderr, b'e', logdata=True
4595 4607 )
4596 4608
4597 4609 # --localssh also implies the peer connection settings.
4598 4610
4599 4611 url = b'ssh://localserver'
4600 4612 autoreadstderr = not opts[b'noreadstderr']
4601 4613
4602 4614 if opts[b'peer'] == b'ssh1':
4603 4615 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4604 4616 peer = sshpeer.sshv1peer(
4605 4617 ui,
4606 4618 url,
4607 4619 proc,
4608 4620 stdin,
4609 4621 stdout,
4610 4622 stderr,
4611 4623 None,
4612 4624 autoreadstderr=autoreadstderr,
4613 4625 )
4614 4626 elif opts[b'peer'] == b'ssh2':
4615 4627 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4616 4628 peer = sshpeer.sshv2peer(
4617 4629 ui,
4618 4630 url,
4619 4631 proc,
4620 4632 stdin,
4621 4633 stdout,
4622 4634 stderr,
4623 4635 None,
4624 4636 autoreadstderr=autoreadstderr,
4625 4637 )
4626 4638 elif opts[b'peer'] == b'raw':
4627 4639 ui.write(_(b'using raw connection to peer\n'))
4628 4640 peer = None
4629 4641 else:
4630 4642 ui.write(_(b'creating ssh peer from handshake results\n'))
4631 4643 peer = sshpeer.makepeer(
4632 4644 ui,
4633 4645 url,
4634 4646 proc,
4635 4647 stdin,
4636 4648 stdout,
4637 4649 stderr,
4638 4650 autoreadstderr=autoreadstderr,
4639 4651 )
4640 4652
4641 4653 elif path:
4642 4654 # We bypass hg.peer() so we can proxy the sockets.
4643 4655 # TODO consider not doing this because we skip
4644 4656 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4645 4657 u = urlutil.url(path)
4646 4658 if u.scheme != b'http':
4647 4659 raise error.Abort(_(b'only http:// paths are currently supported'))
4648 4660
4649 4661 url, authinfo = u.authinfo()
4650 4662 openerargs = {
4651 4663 'useragent': b'Mercurial debugwireproto',
4652 4664 }
4653 4665
4654 4666 # Turn pipes/sockets into observers so we can log I/O.
4655 4667 if ui.verbose:
4656 4668 openerargs.update(
4657 4669 {
4658 4670 'loggingfh': ui,
4659 4671 'loggingname': b's',
4660 4672 'loggingopts': {
4661 4673 'logdata': True,
4662 4674 'logdataapis': False,
4663 4675 },
4664 4676 }
4665 4677 )
4666 4678
4667 4679 if ui.debugflag:
4668 4680 openerargs['loggingopts']['logdataapis'] = True
4669 4681
4670 4682 # Don't send default headers when in raw mode. This allows us to
4671 4683 # bypass most of the behavior of our URL handling code so we can
4672 4684 # have near complete control over what's sent on the wire.
4673 4685 if opts[b'peer'] == b'raw':
4674 4686 openerargs['sendaccept'] = False
4675 4687
4676 4688 opener = urlmod.opener(ui, authinfo, **openerargs)
4677 4689
4678 4690 if opts[b'peer'] == b'http2':
4679 4691 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4680 4692 # We go through makepeer() because we need an API descriptor for
4681 4693 # the peer instance to be useful.
4682 4694 maybe_silent = (
4683 4695 ui.silent()
4684 4696 if opts[b'nologhandshake']
4685 4697 else util.nullcontextmanager()
4686 4698 )
4687 4699 with maybe_silent, ui.configoverride(
4688 4700 {(b'experimental', b'httppeer.advertise-v2'): True}
4689 4701 ):
4690 4702 peer = httppeer.makepeer(ui, path, opener=opener)
4691 4703
4692 4704 if not isinstance(peer, httppeer.httpv2peer):
4693 4705 raise error.Abort(
4694 4706 _(
4695 4707 b'could not instantiate HTTP peer for '
4696 4708 b'wire protocol version 2'
4697 4709 ),
4698 4710 hint=_(
4699 4711 b'the server may not have the feature '
4700 4712 b'enabled or is not allowing this '
4701 4713 b'client version'
4702 4714 ),
4703 4715 )
4704 4716
4705 4717 elif opts[b'peer'] == b'raw':
4706 4718 ui.write(_(b'using raw connection to peer\n'))
4707 4719 peer = None
4708 4720 elif opts[b'peer']:
4709 4721 raise error.Abort(
4710 4722 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4711 4723 )
4712 4724 else:
4713 4725 peer = httppeer.makepeer(ui, path, opener=opener)
4714 4726
4715 4727 # We /could/ populate stdin/stdout with sock.makefile()...
4716 4728 else:
4717 4729 raise error.Abort(_(b'unsupported connection configuration'))
4718 4730
4719 4731 batchedcommands = None
4720 4732
4721 4733 # Now perform actions based on the parsed wire language instructions.
4722 4734 for action, lines in blocks:
4723 4735 if action in (b'raw', b'raw+'):
4724 4736 if not stdin:
4725 4737 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4726 4738
4727 4739 # Concatenate the data together.
4728 4740 data = b''.join(l.lstrip() for l in lines)
4729 4741 data = stringutil.unescapestr(data)
4730 4742 stdin.write(data)
4731 4743
4732 4744 if action == b'raw+':
4733 4745 stdin.flush()
4734 4746 elif action == b'flush':
4735 4747 if not stdin:
4736 4748 raise error.Abort(_(b'cannot call flush on this peer'))
4737 4749 stdin.flush()
4738 4750 elif action.startswith(b'command'):
4739 4751 if not peer:
4740 4752 raise error.Abort(
4741 4753 _(
4742 4754 b'cannot send commands unless peer instance '
4743 4755 b'is available'
4744 4756 )
4745 4757 )
4746 4758
4747 4759 command = action.split(b' ', 1)[1]
4748 4760
4749 4761 args = {}
4750 4762 for line in lines:
4751 4763 # We need to allow empty values.
4752 4764 fields = line.lstrip().split(b' ', 1)
4753 4765 if len(fields) == 1:
4754 4766 key = fields[0]
4755 4767 value = b''
4756 4768 else:
4757 4769 key, value = fields
4758 4770
4759 4771 if value.startswith(b'eval:'):
4760 4772 value = stringutil.evalpythonliteral(value[5:])
4761 4773 else:
4762 4774 value = stringutil.unescapestr(value)
4763 4775
4764 4776 args[key] = value
4765 4777
4766 4778 if batchedcommands is not None:
4767 4779 batchedcommands.append((command, args))
4768 4780 continue
4769 4781
4770 4782 ui.status(_(b'sending %s command\n') % command)
4771 4783
4772 4784 if b'PUSHFILE' in args:
4773 4785 with open(args[b'PUSHFILE'], 'rb') as fh:
4774 4786 del args[b'PUSHFILE']
4775 4787 res, output = peer._callpush(
4776 4788 command, fh, **pycompat.strkwargs(args)
4777 4789 )
4778 4790 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4779 4791 ui.status(
4780 4792 _(b'remote output: %s\n') % stringutil.escapestr(output)
4781 4793 )
4782 4794 else:
4783 4795 with peer.commandexecutor() as e:
4784 4796 res = e.callcommand(command, args).result()
4785 4797
4786 4798 if isinstance(res, wireprotov2peer.commandresponse):
4787 4799 val = res.objects()
4788 4800 ui.status(
4789 4801 _(b'response: %s\n')
4790 4802 % stringutil.pprint(val, bprefix=True, indent=2)
4791 4803 )
4792 4804 else:
4793 4805 ui.status(
4794 4806 _(b'response: %s\n')
4795 4807 % stringutil.pprint(res, bprefix=True, indent=2)
4796 4808 )
4797 4809
4798 4810 elif action == b'batchbegin':
4799 4811 if batchedcommands is not None:
4800 4812 raise error.Abort(_(b'nested batchbegin not allowed'))
4801 4813
4802 4814 batchedcommands = []
4803 4815 elif action == b'batchsubmit':
4804 4816 # There is a batching API we could go through. But it would be
4805 4817 # difficult to normalize requests into function calls. It is easier
4806 4818 # to bypass this layer and normalize to commands + args.
4807 4819 ui.status(
4808 4820 _(b'sending batch with %d sub-commands\n')
4809 4821 % len(batchedcommands)
4810 4822 )
4811 4823 assert peer is not None
4812 4824 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4813 4825 ui.status(
4814 4826 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4815 4827 )
4816 4828
4817 4829 batchedcommands = None
4818 4830
4819 4831 elif action.startswith(b'httprequest '):
4820 4832 if not opener:
4821 4833 raise error.Abort(
4822 4834 _(b'cannot use httprequest without an HTTP peer')
4823 4835 )
4824 4836
4825 4837 request = action.split(b' ', 2)
4826 4838 if len(request) != 3:
4827 4839 raise error.Abort(
4828 4840 _(
4829 4841 b'invalid httprequest: expected format is '
4830 4842 b'"httprequest <method> <path>'
4831 4843 )
4832 4844 )
4833 4845
4834 4846 method, httppath = request[1:]
4835 4847 headers = {}
4836 4848 body = None
4837 4849 frames = []
4838 4850 for line in lines:
4839 4851 line = line.lstrip()
4840 4852 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4841 4853 if m:
4842 4854 # Headers need to use native strings.
4843 4855 key = pycompat.strurl(m.group(1))
4844 4856 value = pycompat.strurl(m.group(2))
4845 4857 headers[key] = value
4846 4858 continue
4847 4859
4848 4860 if line.startswith(b'BODYFILE '):
4849 4861 with open(line.split(b' ', 1), b'rb') as fh:
4850 4862 body = fh.read()
4851 4863 elif line.startswith(b'frame '):
4852 4864 frame = wireprotoframing.makeframefromhumanstring(
4853 4865 line[len(b'frame ') :]
4854 4866 )
4855 4867
4856 4868 frames.append(frame)
4857 4869 else:
4858 4870 raise error.Abort(
4859 4871 _(b'unknown argument to httprequest: %s') % line
4860 4872 )
4861 4873
4862 4874 url = path + httppath
4863 4875
4864 4876 if frames:
4865 4877 body = b''.join(bytes(f) for f in frames)
4866 4878
4867 4879 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4868 4880
4869 4881 # urllib.Request insists on using has_data() as a proxy for
4870 4882 # determining the request method. Override that to use our
4871 4883 # explicitly requested method.
4872 4884 req.get_method = lambda: pycompat.sysstr(method)
4873 4885
4874 4886 try:
4875 4887 res = opener.open(req)
4876 4888 body = res.read()
4877 4889 except util.urlerr.urlerror as e:
4878 4890 # read() method must be called, but only exists in Python 2
4879 4891 getattr(e, 'read', lambda: None)()
4880 4892 continue
4881 4893
4882 4894 ct = res.headers.get('Content-Type')
4883 4895 if ct == 'application/mercurial-cbor':
4884 4896 ui.write(
4885 4897 _(b'cbor> %s\n')
4886 4898 % stringutil.pprint(
4887 4899 cborutil.decodeall(body), bprefix=True, indent=2
4888 4900 )
4889 4901 )
4890 4902
4891 4903 elif action == b'close':
4892 4904 assert peer is not None
4893 4905 peer.close()
4894 4906 elif action == b'readavailable':
4895 4907 if not stdout or not stderr:
4896 4908 raise error.Abort(
4897 4909 _(b'readavailable not available on this peer')
4898 4910 )
4899 4911
4900 4912 stdin.close()
4901 4913 stdout.read()
4902 4914 stderr.read()
4903 4915
4904 4916 elif action == b'readline':
4905 4917 if not stdout:
4906 4918 raise error.Abort(_(b'readline not available on this peer'))
4907 4919 stdout.readline()
4908 4920 elif action == b'ereadline':
4909 4921 if not stderr:
4910 4922 raise error.Abort(_(b'ereadline not available on this peer'))
4911 4923 stderr.readline()
4912 4924 elif action.startswith(b'read '):
4913 4925 count = int(action.split(b' ', 1)[1])
4914 4926 if not stdout:
4915 4927 raise error.Abort(_(b'read not available on this peer'))
4916 4928 stdout.read(count)
4917 4929 elif action.startswith(b'eread '):
4918 4930 count = int(action.split(b' ', 1)[1])
4919 4931 if not stderr:
4920 4932 raise error.Abort(_(b'eread not available on this peer'))
4921 4933 stderr.read(count)
4922 4934 else:
4923 4935 raise error.Abort(_(b'unknown action: %s') % action)
4924 4936
4925 4937 if batchedcommands is not None:
4926 4938 raise error.Abort(_(b'unclosed "batchbegin" request'))
4927 4939
4928 4940 if peer:
4929 4941 peer.close()
4930 4942
4931 4943 if proc:
4932 4944 proc.kill()
@@ -1,551 +1,563 b''
1 1 # repair.py - functions for repository repair for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 # Copyright 2007 Olivia Mackall
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import
10 10
11 11 import errno
12 12
13 13 from .i18n import _
14 14 from .node import (
15 15 hex,
16 16 short,
17 17 )
18 18 from . import (
19 19 bundle2,
20 20 changegroup,
21 21 discovery,
22 22 error,
23 23 exchange,
24 24 obsolete,
25 25 obsutil,
26 26 pathutil,
27 27 phases,
28 28 pycompat,
29 29 requirements,
30 30 scmutil,
31 31 util,
32 32 )
33 33 from .utils import (
34 34 hashutil,
35 35 stringutil,
36 36 urlutil,
37 37 )
38 38
39 39
40 40 def backupbundle(
41 41 repo, bases, heads, node, suffix, compress=True, obsolescence=True
42 42 ):
43 43 """create a bundle with the specified revisions as a backup"""
44 44
45 45 backupdir = b"strip-backup"
46 46 vfs = repo.vfs
47 47 if not vfs.isdir(backupdir):
48 48 vfs.mkdir(backupdir)
49 49
50 50 # Include a hash of all the nodes in the filename for uniqueness
51 51 allcommits = repo.set(b'%ln::%ln', bases, heads)
52 52 allhashes = sorted(c.hex() for c in allcommits)
53 53 totalhash = hashutil.sha1(b''.join(allhashes)).digest()
54 54 name = b"%s/%s-%s-%s.hg" % (
55 55 backupdir,
56 56 short(node),
57 57 hex(totalhash[:4]),
58 58 suffix,
59 59 )
60 60
61 61 cgversion = changegroup.localversion(repo)
62 62 comp = None
63 63 if cgversion != b'01':
64 64 bundletype = b"HG20"
65 65 if compress:
66 66 comp = b'BZ'
67 67 elif compress:
68 68 bundletype = b"HG10BZ"
69 69 else:
70 70 bundletype = b"HG10UN"
71 71
72 72 outgoing = discovery.outgoing(repo, missingroots=bases, ancestorsof=heads)
73 73 contentopts = {
74 74 b'cg.version': cgversion,
75 75 b'obsolescence': obsolescence,
76 76 b'phases': True,
77 77 }
78 78 return bundle2.writenewbundle(
79 79 repo.ui,
80 80 repo,
81 81 b'strip',
82 82 name,
83 83 bundletype,
84 84 outgoing,
85 85 contentopts,
86 86 vfs,
87 87 compression=comp,
88 88 )
89 89
90 90
91 91 def _collectfiles(repo, striprev):
92 92 """find out the filelogs affected by the strip"""
93 93 files = set()
94 94
95 95 for x in pycompat.xrange(striprev, len(repo)):
96 96 files.update(repo[x].files())
97 97
98 98 return sorted(files)
99 99
100 100
101 101 def _collectrevlog(revlog, striprev):
102 102 _, brokenset = revlog.getstrippoint(striprev)
103 103 return [revlog.linkrev(r) for r in brokenset]
104 104
105 105
106 106 def _collectbrokencsets(repo, files, striprev):
107 107 """return the changesets which will be broken by the truncation"""
108 108 s = set()
109 109
110 110 for revlog in manifestrevlogs(repo):
111 111 s.update(_collectrevlog(revlog, striprev))
112 112 for fname in files:
113 113 s.update(_collectrevlog(repo.file(fname), striprev))
114 114
115 115 return s
116 116
117 117
118 118 def strip(ui, repo, nodelist, backup=True, topic=b'backup'):
119 119 # This function requires the caller to lock the repo, but it operates
120 120 # within a transaction of its own, and thus requires there to be no current
121 121 # transaction when it is called.
122 122 if repo.currenttransaction() is not None:
123 123 raise error.ProgrammingError(b'cannot strip from inside a transaction')
124 124
125 125 # Simple way to maintain backwards compatibility for this
126 126 # argument.
127 127 if backup in [b'none', b'strip']:
128 128 backup = False
129 129
130 130 repo = repo.unfiltered()
131 131 repo.destroying()
132 132 vfs = repo.vfs
133 133 # load bookmark before changelog to avoid side effect from outdated
134 134 # changelog (see repo._refreshchangelog)
135 135 repo._bookmarks
136 136 cl = repo.changelog
137 137
138 138 # TODO handle undo of merge sets
139 139 if isinstance(nodelist, bytes):
140 140 nodelist = [nodelist]
141 141 striplist = [cl.rev(node) for node in nodelist]
142 142 striprev = min(striplist)
143 143
144 144 files = _collectfiles(repo, striprev)
145 145 saverevs = _collectbrokencsets(repo, files, striprev)
146 146
147 147 # Some revisions with rev > striprev may not be descendants of striprev.
148 148 # We have to find these revisions and put them in a bundle, so that
149 149 # we can restore them after the truncations.
150 150 # To create the bundle we use repo.changegroupsubset which requires
151 151 # the list of heads and bases of the set of interesting revisions.
152 152 # (head = revision in the set that has no descendant in the set;
153 153 # base = revision in the set that has no ancestor in the set)
154 154 tostrip = set(striplist)
155 155 saveheads = set(saverevs)
156 156 for r in cl.revs(start=striprev + 1):
157 157 if any(p in tostrip for p in cl.parentrevs(r)):
158 158 tostrip.add(r)
159 159
160 160 if r not in tostrip:
161 161 saverevs.add(r)
162 162 saveheads.difference_update(cl.parentrevs(r))
163 163 saveheads.add(r)
164 164 saveheads = [cl.node(r) for r in saveheads]
165 165
166 166 # compute base nodes
167 167 if saverevs:
168 168 descendants = set(cl.descendants(saverevs))
169 169 saverevs.difference_update(descendants)
170 170 savebases = [cl.node(r) for r in saverevs]
171 171 stripbases = [cl.node(r) for r in tostrip]
172 172
173 173 stripobsidx = obsmarkers = ()
174 174 if repo.ui.configbool(b'devel', b'strip-obsmarkers'):
175 175 obsmarkers = obsutil.exclusivemarkers(repo, stripbases)
176 176 if obsmarkers:
177 177 stripobsidx = [
178 178 i for i, m in enumerate(repo.obsstore) if m in obsmarkers
179 179 ]
180 180
181 181 newbmtarget, updatebm = _bookmarkmovements(repo, tostrip)
182 182
183 183 backupfile = None
184 184 node = nodelist[-1]
185 185 if backup:
186 186 backupfile = _createstripbackup(repo, stripbases, node, topic)
187 187 # create a changegroup for all the branches we need to keep
188 188 tmpbundlefile = None
189 189 if saveheads:
190 190 # do not compress temporary bundle if we remove it from disk later
191 191 #
192 192 # We do not include obsolescence, it might re-introduce prune markers
193 193 # we are trying to strip. This is harmless since the stripped markers
194 194 # are already backed up and we did not touched the markers for the
195 195 # saved changesets.
196 196 tmpbundlefile = backupbundle(
197 197 repo,
198 198 savebases,
199 199 saveheads,
200 200 node,
201 201 b'temp',
202 202 compress=False,
203 203 obsolescence=False,
204 204 )
205 205
206 206 with ui.uninterruptible():
207 207 try:
208 208 with repo.transaction(b"strip") as tr:
209 209 # TODO this code violates the interface abstraction of the
210 210 # transaction and makes assumptions that file storage is
211 211 # using append-only files. We'll need some kind of storage
212 212 # API to handle stripping for us.
213 213 oldfiles = set(tr._offsetmap.keys())
214 214 oldfiles.update(tr._newfiles)
215 215
216 216 tr.startgroup()
217 217 cl.strip(striprev, tr)
218 218 stripmanifest(repo, striprev, tr, files)
219 219
220 220 for fn in files:
221 221 repo.file(fn).strip(striprev, tr)
222 222 tr.endgroup()
223 223
224 224 entries = tr.readjournal()
225 225
226 226 for file, troffset in entries:
227 227 if file in oldfiles:
228 228 continue
229 229 with repo.svfs(file, b'a', checkambig=True) as fp:
230 230 fp.truncate(troffset)
231 231 if troffset == 0:
232 232 repo.store.markremoved(file)
233 233
234 234 deleteobsmarkers(repo.obsstore, stripobsidx)
235 235 del repo.obsstore
236 236 repo.invalidatevolatilesets()
237 237 repo._phasecache.filterunknown(repo)
238 238
239 239 if tmpbundlefile:
240 240 ui.note(_(b"adding branch\n"))
241 241 f = vfs.open(tmpbundlefile, b"rb")
242 242 gen = exchange.readbundle(ui, f, tmpbundlefile, vfs)
243 243 # silence internal shuffling chatter
244 244 maybe_silent = (
245 245 repo.ui.silent()
246 246 if not repo.ui.verbose
247 247 else util.nullcontextmanager()
248 248 )
249 249 with maybe_silent:
250 250 tmpbundleurl = b'bundle:' + vfs.join(tmpbundlefile)
251 251 txnname = b'strip'
252 252 if not isinstance(gen, bundle2.unbundle20):
253 253 txnname = b"strip\n%s" % urlutil.hidepassword(
254 254 tmpbundleurl
255 255 )
256 256 with repo.transaction(txnname) as tr:
257 257 bundle2.applybundle(
258 258 repo, gen, tr, source=b'strip', url=tmpbundleurl
259 259 )
260 260 f.close()
261 261
262 262 with repo.transaction(b'repair') as tr:
263 263 bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
264 264 repo._bookmarks.applychanges(repo, tr, bmchanges)
265 265
266 266 # remove undo files
267 267 for undovfs, undofile in repo.undofiles():
268 268 try:
269 269 undovfs.unlink(undofile)
270 270 except OSError as e:
271 271 if e.errno != errno.ENOENT:
272 272 ui.warn(
273 273 _(b'error removing %s: %s\n')
274 274 % (
275 275 undovfs.join(undofile),
276 276 stringutil.forcebytestr(e),
277 277 )
278 278 )
279 279
280 280 except: # re-raises
281 281 if backupfile:
282 282 ui.warn(
283 283 _(b"strip failed, backup bundle stored in '%s'\n")
284 284 % vfs.join(backupfile)
285 285 )
286 286 if tmpbundlefile:
287 287 ui.warn(
288 288 _(b"strip failed, unrecovered changes stored in '%s'\n")
289 289 % vfs.join(tmpbundlefile)
290 290 )
291 291 ui.warn(
292 292 _(
293 293 b"(fix the problem, then recover the changesets with "
294 294 b"\"hg unbundle '%s'\")\n"
295 295 )
296 296 % vfs.join(tmpbundlefile)
297 297 )
298 298 raise
299 299 else:
300 300 if tmpbundlefile:
301 301 # Remove temporary bundle only if there were no exceptions
302 302 vfs.unlink(tmpbundlefile)
303 303
304 304 repo.destroyed()
305 305 # return the backup file path (or None if 'backup' was False) so
306 306 # extensions can use it
307 307 return backupfile
308 308
309 309
310 310 def softstrip(ui, repo, nodelist, backup=True, topic=b'backup'):
311 311 """perform a "soft" strip using the archived phase"""
312 312 tostrip = [c.node() for c in repo.set(b'sort(%ln::)', nodelist)]
313 313 if not tostrip:
314 314 return None
315 315
316 316 backupfile = None
317 317 if backup:
318 318 node = tostrip[0]
319 319 backupfile = _createstripbackup(repo, tostrip, node, topic)
320 320
321 321 newbmtarget, updatebm = _bookmarkmovements(repo, tostrip)
322 322 with repo.transaction(b'strip') as tr:
323 323 phases.retractboundary(repo, tr, phases.archived, tostrip)
324 324 bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm]
325 325 repo._bookmarks.applychanges(repo, tr, bmchanges)
326 326 return backupfile
327 327
328 328
329 329 def _bookmarkmovements(repo, tostrip):
330 330 # compute necessary bookmark movement
331 331 bm = repo._bookmarks
332 332 updatebm = []
333 333 for m in bm:
334 334 rev = repo[bm[m]].rev()
335 335 if rev in tostrip:
336 336 updatebm.append(m)
337 337 newbmtarget = None
338 338 # If we need to move bookmarks, compute bookmark
339 339 # targets. Otherwise we can skip doing this logic.
340 340 if updatebm:
341 341 # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)),
342 342 # but is much faster
343 343 newbmtarget = repo.revs(b'max(parents(%ld) - (%ld))', tostrip, tostrip)
344 344 if newbmtarget:
345 345 newbmtarget = repo[newbmtarget.first()].node()
346 346 else:
347 347 newbmtarget = b'.'
348 348 return newbmtarget, updatebm
349 349
350 350
351 351 def _createstripbackup(repo, stripbases, node, topic):
352 352 # backup the changeset we are about to strip
353 353 vfs = repo.vfs
354 354 cl = repo.changelog
355 355 backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic)
356 356 repo.ui.status(_(b"saved backup bundle to %s\n") % vfs.join(backupfile))
357 357 repo.ui.log(
358 358 b"backupbundle", b"saved backup bundle to %s\n", vfs.join(backupfile)
359 359 )
360 360 return backupfile
361 361
362 362
363 363 def safestriproots(ui, repo, nodes):
364 364 """return list of roots of nodes where descendants are covered by nodes"""
365 365 torev = repo.unfiltered().changelog.rev
366 366 revs = {torev(n) for n in nodes}
367 367 # tostrip = wanted - unsafe = wanted - ancestors(orphaned)
368 368 # orphaned = affected - wanted
369 369 # affected = descendants(roots(wanted))
370 370 # wanted = revs
371 371 revset = b'%ld - ( ::( (roots(%ld):: and not _phase(%s)) -%ld) )'
372 372 tostrip = set(repo.revs(revset, revs, revs, phases.internal, revs))
373 373 notstrip = revs - tostrip
374 374 if notstrip:
375 375 nodestr = b', '.join(sorted(short(repo[n].node()) for n in notstrip))
376 376 ui.warn(
377 377 _(b'warning: orphaned descendants detected, not stripping %s\n')
378 378 % nodestr
379 379 )
380 380 return [c.node() for c in repo.set(b'roots(%ld)', tostrip)]
381 381
382 382
383 383 class stripcallback(object):
384 384 """used as a transaction postclose callback"""
385 385
386 386 def __init__(self, ui, repo, backup, topic):
387 387 self.ui = ui
388 388 self.repo = repo
389 389 self.backup = backup
390 390 self.topic = topic or b'backup'
391 391 self.nodelist = []
392 392
393 393 def addnodes(self, nodes):
394 394 self.nodelist.extend(nodes)
395 395
396 396 def __call__(self, tr):
397 397 roots = safestriproots(self.ui, self.repo, self.nodelist)
398 398 if roots:
399 399 strip(self.ui, self.repo, roots, self.backup, self.topic)
400 400
401 401
402 402 def delayedstrip(ui, repo, nodelist, topic=None, backup=True):
403 403 """like strip, but works inside transaction and won't strip irreverent revs
404 404
405 405 nodelist must explicitly contain all descendants. Otherwise a warning will
406 406 be printed that some nodes are not stripped.
407 407
408 408 Will do a backup if `backup` is True. The last non-None "topic" will be
409 409 used as the backup topic name. The default backup topic name is "backup".
410 410 """
411 411 tr = repo.currenttransaction()
412 412 if not tr:
413 413 nodes = safestriproots(ui, repo, nodelist)
414 414 return strip(ui, repo, nodes, backup=backup, topic=topic)
415 415 # transaction postclose callbacks are called in alphabet order.
416 416 # use '\xff' as prefix so we are likely to be called last.
417 417 callback = tr.getpostclose(b'\xffstrip')
418 418 if callback is None:
419 419 callback = stripcallback(ui, repo, backup=backup, topic=topic)
420 420 tr.addpostclose(b'\xffstrip', callback)
421 421 if topic:
422 422 callback.topic = topic
423 423 callback.addnodes(nodelist)
424 424
425 425
426 426 def stripmanifest(repo, striprev, tr, files):
427 427 for revlog in manifestrevlogs(repo):
428 428 revlog.strip(striprev, tr)
429 429
430 430
431 431 def manifestrevlogs(repo):
432 432 yield repo.manifestlog.getstorage(b'')
433 433 if scmutil.istreemanifest(repo):
434 434 # This logic is safe if treemanifest isn't enabled, but also
435 435 # pointless, so we skip it if treemanifest isn't enabled.
436 436 for t, unencoded, encoded, size in repo.store.datafiles():
437 437 if unencoded.startswith(b'meta/') and unencoded.endswith(
438 438 b'00manifest.i'
439 439 ):
440 440 dir = unencoded[5:-12]
441 441 yield repo.manifestlog.getstorage(dir)
442 442
443 443
444 def rebuildfncache(ui, repo):
444 def rebuildfncache(ui, repo, only_data=False):
445 445 """Rebuilds the fncache file from repo history.
446 446
447 447 Missing entries will be added. Extra entries will be removed.
448 448 """
449 449 repo = repo.unfiltered()
450 450
451 451 if requirements.FNCACHE_REQUIREMENT not in repo.requirements:
452 452 ui.warn(
453 453 _(
454 454 b'(not rebuilding fncache because repository does not '
455 455 b'support fncache)\n'
456 456 )
457 457 )
458 458 return
459 459
460 460 with repo.lock():
461 461 fnc = repo.store.fncache
462 462 fnc.ensureloaded(warn=ui.warn)
463 463
464 464 oldentries = set(fnc.entries)
465 465 newentries = set()
466 466 seenfiles = set()
467 467
468 progress = ui.makeprogress(
469 _(b'rebuilding'), unit=_(b'changesets'), total=len(repo)
470 )
471 for rev in repo:
472 progress.update(rev)
468 if only_data:
469 # Trust the listing of .i from the fncache, but not the .d. This is
470 # much faster, because we only need to stat every possible .d files,
471 # instead of reading the full changelog
472 for f in fnc:
473 if f[:5] == b'data/' and f[-2:] == b'.i':
474 seenfiles.add(f[5:-2])
475 newentries.add(f)
476 dataf = f[:-2] + b'.d'
477 if repo.store._exists(dataf):
478 newentries.add(dataf)
479 else:
480 progress = ui.makeprogress(
481 _(b'rebuilding'), unit=_(b'changesets'), total=len(repo)
482 )
483 for rev in repo:
484 progress.update(rev)
473 485
474 ctx = repo[rev]
475 for f in ctx.files():
476 # This is to minimize I/O.
477 if f in seenfiles:
478 continue
479 seenfiles.add(f)
486 ctx = repo[rev]
487 for f in ctx.files():
488 # This is to minimize I/O.
489 if f in seenfiles:
490 continue
491 seenfiles.add(f)
480 492
481 i = b'data/%s.i' % f
482 d = b'data/%s.d' % f
493 i = b'data/%s.i' % f
494 d = b'data/%s.d' % f
483 495
484 if repo.store._exists(i):
485 newentries.add(i)
486 if repo.store._exists(d):
487 newentries.add(d)
496 if repo.store._exists(i):
497 newentries.add(i)
498 if repo.store._exists(d):
499 newentries.add(d)
488 500
489 progress.complete()
501 progress.complete()
490 502
491 503 if requirements.TREEMANIFEST_REQUIREMENT in repo.requirements:
492 504 # This logic is safe if treemanifest isn't enabled, but also
493 505 # pointless, so we skip it if treemanifest isn't enabled.
494 506 for dir in pathutil.dirs(seenfiles):
495 507 i = b'meta/%s/00manifest.i' % dir
496 508 d = b'meta/%s/00manifest.d' % dir
497 509
498 510 if repo.store._exists(i):
499 511 newentries.add(i)
500 512 if repo.store._exists(d):
501 513 newentries.add(d)
502 514
503 515 addcount = len(newentries - oldentries)
504 516 removecount = len(oldentries - newentries)
505 517 for p in sorted(oldentries - newentries):
506 518 ui.write(_(b'removing %s\n') % p)
507 519 for p in sorted(newentries - oldentries):
508 520 ui.write(_(b'adding %s\n') % p)
509 521
510 522 if addcount or removecount:
511 523 ui.write(
512 524 _(b'%d items added, %d removed from fncache\n')
513 525 % (addcount, removecount)
514 526 )
515 527 fnc.entries = newentries
516 528 fnc._dirty = True
517 529
518 530 with repo.transaction(b'fncache') as tr:
519 531 fnc.write(tr)
520 532 else:
521 533 ui.write(_(b'fncache already up to date\n'))
522 534
523 535
524 536 def deleteobsmarkers(obsstore, indices):
525 537 """Delete some obsmarkers from obsstore and return how many were deleted
526 538
527 539 'indices' is a list of ints which are the indices
528 540 of the markers to be deleted.
529 541
530 542 Every invocation of this function completely rewrites the obsstore file,
531 543 skipping the markers we want to be removed. The new temporary file is
532 544 created, remaining markers are written there and on .close() this file
533 545 gets atomically renamed to obsstore, thus guaranteeing consistency."""
534 546 if not indices:
535 547 # we don't want to rewrite the obsstore with the same content
536 548 return
537 549
538 550 left = []
539 551 current = obsstore._all
540 552 n = 0
541 553 for i, m in enumerate(current):
542 554 if i in indices:
543 555 n += 1
544 556 continue
545 557 left.append(m)
546 558
547 559 newobsstorefile = obsstore.svfs(b'obsstore', b'w', atomictemp=True)
548 560 for bytes in obsolete.encodemarkers(left, True, obsstore._version):
549 561 newobsstorefile.write(bytes)
550 562 newobsstorefile.close()
551 563 return n
@@ -1,447 +1,447 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 abort
4 4 add
5 5 addremove
6 6 annotate
7 7 archive
8 8 backout
9 9 bisect
10 10 bookmarks
11 11 branch
12 12 branches
13 13 bundle
14 14 cat
15 15 clone
16 16 commit
17 17 config
18 18 continue
19 19 copy
20 20 diff
21 21 export
22 22 files
23 23 forget
24 24 graft
25 25 grep
26 26 heads
27 27 help
28 28 identify
29 29 import
30 30 incoming
31 31 init
32 32 locate
33 33 log
34 34 manifest
35 35 merge
36 36 outgoing
37 37 parents
38 38 paths
39 39 phase
40 40 pull
41 41 purge
42 42 push
43 43 recover
44 44 remove
45 45 rename
46 46 resolve
47 47 revert
48 48 rollback
49 49 root
50 50 serve
51 51 shelve
52 52 status
53 53 summary
54 54 tag
55 55 tags
56 56 tip
57 57 unbundle
58 58 unshelve
59 59 update
60 60 verify
61 61 version
62 62
63 63 Show all commands that start with "a"
64 64 $ hg debugcomplete a
65 65 abort
66 66 add
67 67 addremove
68 68 annotate
69 69 archive
70 70
71 71 Do not show debug commands if there are other candidates
72 72 $ hg debugcomplete d
73 73 diff
74 74
75 75 Show debug commands if there are no other candidates
76 76 $ hg debugcomplete debug
77 77 debug-repair-issue6528
78 78 debugancestor
79 79 debugantivirusrunning
80 80 debugapplystreamclonebundle
81 81 debugbackupbundle
82 82 debugbuilddag
83 83 debugbundle
84 84 debugcapabilities
85 85 debugchangedfiles
86 86 debugcheckstate
87 87 debugcolor
88 88 debugcommands
89 89 debugcomplete
90 90 debugconfig
91 91 debugcreatestreamclonebundle
92 92 debugdag
93 93 debugdata
94 94 debugdate
95 95 debugdeltachain
96 96 debugdirstate
97 97 debugdirstateignorepatternshash
98 98 debugdiscovery
99 99 debugdownload
100 100 debugextensions
101 101 debugfileset
102 102 debugformat
103 103 debugfsinfo
104 104 debuggetbundle
105 105 debugignore
106 106 debugindex
107 107 debugindexdot
108 108 debugindexstats
109 109 debuginstall
110 110 debugknown
111 111 debuglabelcomplete
112 112 debuglocks
113 113 debugmanifestfulltextcache
114 114 debugmergestate
115 115 debugnamecomplete
116 116 debugnodemap
117 117 debugobsolete
118 118 debugp1copies
119 119 debugp2copies
120 120 debugpathcomplete
121 121 debugpathcopies
122 122 debugpeer
123 123 debugpickmergetool
124 124 debugpushkey
125 125 debugpvec
126 126 debugrebuilddirstate
127 127 debugrebuildfncache
128 128 debugrename
129 129 debugrequires
130 130 debugrevlog
131 131 debugrevlogindex
132 132 debugrevspec
133 133 debugserve
134 134 debugsetparents
135 135 debugshell
136 136 debugsidedata
137 137 debugssl
138 138 debugstrip
139 139 debugsub
140 140 debugsuccessorssets
141 141 debugtagscache
142 142 debugtemplate
143 143 debuguigetpass
144 144 debuguiprompt
145 145 debugupdatecaches
146 146 debugupgraderepo
147 147 debugwalk
148 148 debugwhyunstable
149 149 debugwireargs
150 150 debugwireproto
151 151
152 152 Do not show the alias of a debug command if there are other candidates
153 153 (this should hide rawcommit)
154 154 $ hg debugcomplete r
155 155 recover
156 156 remove
157 157 rename
158 158 resolve
159 159 revert
160 160 rollback
161 161 root
162 162 Show the alias of a debug command if there are no other candidates
163 163 $ hg debugcomplete rawc
164 164
165 165
166 166 Show the global options
167 167 $ hg debugcomplete --options | sort
168 168 --color
169 169 --config
170 170 --cwd
171 171 --debug
172 172 --debugger
173 173 --encoding
174 174 --encodingmode
175 175 --help
176 176 --hidden
177 177 --noninteractive
178 178 --pager
179 179 --profile
180 180 --quiet
181 181 --repository
182 182 --time
183 183 --traceback
184 184 --verbose
185 185 --version
186 186 -R
187 187 -h
188 188 -q
189 189 -v
190 190 -y
191 191
192 192 Show the options for the "serve" command
193 193 $ hg debugcomplete --options serve | sort
194 194 --accesslog
195 195 --address
196 196 --certificate
197 197 --cmdserver
198 198 --color
199 199 --config
200 200 --cwd
201 201 --daemon
202 202 --daemon-postexec
203 203 --debug
204 204 --debugger
205 205 --encoding
206 206 --encodingmode
207 207 --errorlog
208 208 --help
209 209 --hidden
210 210 --ipv6
211 211 --name
212 212 --noninteractive
213 213 --pager
214 214 --pid-file
215 215 --port
216 216 --prefix
217 217 --print-url
218 218 --profile
219 219 --quiet
220 220 --repository
221 221 --stdio
222 222 --style
223 223 --subrepos
224 224 --templates
225 225 --time
226 226 --traceback
227 227 --verbose
228 228 --version
229 229 --web-conf
230 230 -6
231 231 -A
232 232 -E
233 233 -R
234 234 -S
235 235 -a
236 236 -d
237 237 -h
238 238 -n
239 239 -p
240 240 -q
241 241 -t
242 242 -v
243 243 -y
244 244
245 245 Show an error if we use --options with an ambiguous abbreviation
246 246 $ hg debugcomplete --options s
247 247 hg: command 's' is ambiguous:
248 248 serve shelve showconfig status summary
249 249 [10]
250 250
251 251 Show all commands + options
252 252 $ hg debugcommands
253 253 abort: dry-run
254 254 add: include, exclude, subrepos, dry-run
255 255 addremove: similarity, subrepos, include, exclude, dry-run
256 256 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
257 257 archive: no-decode, prefix, rev, type, subrepos, include, exclude
258 258 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
259 259 bisect: reset, good, bad, skip, extend, command, noupdate
260 260 bookmarks: force, rev, delete, rename, inactive, list, template
261 261 branch: force, clean, rev
262 262 branches: active, closed, rev, template
263 263 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
264 264 cat: output, rev, decode, include, exclude, template
265 265 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
266 266 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
267 267 config: untrusted, exp-all-known, edit, local, source, shared, non-shared, global, template
268 268 continue: dry-run
269 269 copy: forget, after, at-rev, force, include, exclude, dry-run
270 270 debug-repair-issue6528: to-report, from-report, paranoid, dry-run
271 271 debugancestor:
272 272 debugantivirusrunning:
273 273 debugapplystreamclonebundle:
274 274 debugbackupbundle: recover, patch, git, limit, no-merges, stat, graph, style, template
275 275 debugbuilddag: mergeable-file, overwritten-file, new-file
276 276 debugbundle: all, part-type, spec
277 277 debugcapabilities:
278 278 debugchangedfiles: compute
279 279 debugcheckstate:
280 280 debugcolor: style
281 281 debugcommands:
282 282 debugcomplete: options
283 283 debugcreatestreamclonebundle:
284 284 debugdag: tags, branches, dots, spaces
285 285 debugdata: changelog, manifest, dir
286 286 debugdate: extended
287 287 debugdeltachain: changelog, manifest, dir, template
288 288 debugdirstateignorepatternshash:
289 289 debugdirstate: nodates, dates, datesort, all
290 290 debugdiscovery: old, nonheads, rev, seed, local-as-revs, remote-as-revs, ssh, remotecmd, insecure, template
291 291 debugdownload: output
292 292 debugextensions: template
293 293 debugfileset: rev, all-files, show-matcher, show-stage
294 294 debugformat: template
295 295 debugfsinfo:
296 296 debuggetbundle: head, common, type
297 297 debugignore:
298 298 debugindex: changelog, manifest, dir, template
299 299 debugindexdot: changelog, manifest, dir
300 300 debugindexstats:
301 301 debuginstall: template
302 302 debugknown:
303 303 debuglabelcomplete:
304 304 debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
305 305 debugmanifestfulltextcache: clear, add
306 306 debugmergestate: style, template
307 307 debugnamecomplete:
308 308 debugnodemap: dump-new, dump-disk, check, metadata
309 309 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
310 310 debugp1copies: rev
311 311 debugp2copies: rev
312 312 debugpathcomplete: full, normal, added, removed
313 313 debugpathcopies: include, exclude
314 314 debugpeer:
315 315 debugpickmergetool: rev, changedelete, include, exclude, tool
316 316 debugpushkey:
317 317 debugpvec:
318 318 debugrebuilddirstate: rev, minimal
319 debugrebuildfncache:
319 debugrebuildfncache: only-data
320 320 debugrename: rev
321 321 debugrequires:
322 322 debugrevlog: changelog, manifest, dir, dump
323 323 debugrevlogindex: changelog, manifest, dir, format
324 324 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
325 325 debugserve: sshstdio, logiofd, logiofile
326 326 debugsetparents:
327 327 debugshell:
328 328 debugsidedata: changelog, manifest, dir
329 329 debugssl:
330 330 debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
331 331 debugsub: rev
332 332 debugsuccessorssets: closest
333 333 debugtagscache:
334 334 debugtemplate: rev, define
335 335 debuguigetpass: prompt
336 336 debuguiprompt: prompt
337 337 debugupdatecaches:
338 338 debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
339 339 debugwalk: include, exclude
340 340 debugwhyunstable:
341 341 debugwireargs: three, four, five, ssh, remotecmd, insecure
342 342 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
343 343 diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
344 344 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
345 345 files: rev, print0, include, exclude, template, subrepos
346 346 forget: interactive, include, exclude, dry-run
347 347 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
348 348 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
349 349 heads: rev, topo, active, closed, style, template
350 350 help: extension, command, keyword, system
351 351 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
352 352 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
353 353 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
354 354 init: ssh, remotecmd, insecure
355 355 locate: rev, print0, fullpath, include, exclude
356 356 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
357 357 manifest: rev, all, template
358 358 merge: force, rev, preview, abort, tool
359 359 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
360 360 parents: rev, style, template
361 361 paths: template
362 362 phase: public, draft, secret, force, rev
363 363 pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
364 364 purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
365 365 push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
366 366 recover: verify
367 367 remove: after, force, subrepos, include, exclude, dry-run
368 368 rename: forget, after, at-rev, force, include, exclude, dry-run
369 369 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
370 370 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
371 371 rollback: dry-run, force
372 372 root: template
373 373 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
374 374 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
375 375 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
376 376 summary: remote
377 377 tag: force, local, rev, remove, edit, message, date, user
378 378 tags: template
379 379 tip: patch, git, style, template
380 380 unbundle: update
381 381 unshelve: abort, continue, interactive, keep, name, tool, date
382 382 update: clean, check, merge, date, rev, tool
383 383 verify: full
384 384 version: template
385 385
386 386 $ hg init a
387 387 $ cd a
388 388 $ echo fee > fee
389 389 $ hg ci -q -Amfee
390 390 $ hg tag fee
391 391 $ mkdir fie
392 392 $ echo dead > fie/dead
393 393 $ echo live > fie/live
394 394 $ hg bookmark fo
395 395 $ hg branch -q fie
396 396 $ hg ci -q -Amfie
397 397 $ echo fo > fo
398 398 $ hg branch -qf default
399 399 $ hg ci -q -Amfo
400 400 $ echo Fum > Fum
401 401 $ hg ci -q -AmFum
402 402 $ hg bookmark Fum
403 403
404 404 Test debugpathcomplete
405 405
406 406 $ hg debugpathcomplete f
407 407 fee
408 408 fie
409 409 fo
410 410 $ hg debugpathcomplete -f f
411 411 fee
412 412 fie/dead
413 413 fie/live
414 414 fo
415 415
416 416 $ hg rm Fum
417 417 $ hg debugpathcomplete -r F
418 418 Fum
419 419
420 420 Test debugnamecomplete
421 421
422 422 $ hg debugnamecomplete
423 423 Fum
424 424 default
425 425 fee
426 426 fie
427 427 fo
428 428 tip
429 429 $ hg debugnamecomplete f
430 430 fee
431 431 fie
432 432 fo
433 433
434 434 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
435 435 used for completions in some shells.
436 436
437 437 $ hg debuglabelcomplete
438 438 Fum
439 439 default
440 440 fee
441 441 fie
442 442 fo
443 443 tip
444 444 $ hg debuglabelcomplete f
445 445 fee
446 446 fie
447 447 fo
@@ -1,168 +1,172 b''
1 1 Test correctness of revlog inline -> non-inline transition
2 2 ----------------------------------------------------------
3 3
4 4 Helper extension to intercept renames.
5 5
6 6 $ cat > $TESTTMP/intercept_rename.py << EOF
7 7 > import os
8 8 > import sys
9 9 > from mercurial import extensions, util
10 10 >
11 11 > def extsetup(ui):
12 12 > def close(orig, *args, **kwargs):
13 13 > path = util.normpath(args[0]._atomictempfile__name)
14 14 > if path.endswith(b'/.hg/store/data/file.i'):
15 15 > os._exit(80)
16 16 > return orig(*args, **kwargs)
17 17 > extensions.wrapfunction(util.atomictempfile, 'close', close)
18 18 > EOF
19 19
20 20 Test offset computation to correctly factor in the index entries themselve.
21 21 Also test that the new data size has the correct size if the transaction is aborted
22 22 after the index has been replaced.
23 23
24 24 Test repo has one small, one moderate and one big change. The clone has
25 25 the small and moderate change and will transition to non-inline storage when
26 26 adding the big change.
27 27
28 28 $ hg init troffset-computation --config format.revlog-compression=none
29 29 $ cd troffset-computation
30 30 $ printf '%20d' '1' > file
31 31 $ hg commit -Aqm_
32 32 $ printf '%1024d' '1' > file
33 33 $ hg commit -Aqm_
34 34 $ dd if=/dev/zero of=file bs=1k count=128 > /dev/null 2>&1
35 35 $ hg commit -Aqm_
36 36 $ cd ..
37 37
38 38 $ hg clone -r 1 troffset-computation troffset-computation-copy --config format.revlog-compression=none -q
39 39 $ cd troffset-computation-copy
40 40
41 41 Reference size:
42 42
43 43 $ f -s .hg/store/data/file*
44 44 .hg/store/data/file.i: size=1174
45 45
46 46 $ cat > .hg/hgrc <<EOF
47 47 > [hooks]
48 48 > pretxnchangegroup = python:$TESTDIR/helper-killhook.py:killme
49 49 > EOF
50 50 #if chg
51 51 $ hg pull ../troffset-computation
52 52 pulling from ../troffset-computation
53 53 [255]
54 54 #else
55 55 $ hg pull ../troffset-computation
56 56 pulling from ../troffset-computation
57 57 [80]
58 58 #endif
59 59 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file | tail -1
60 60 data/file.i 128
61 61
62 62 The first file.i entry should match the size above.
63 63 The first file.d entry is the temporary record during the split,
64 64 the second entry after the split happened. The sum of the second file.d
65 65 and the second file.i entry should match the first file.i entry.
66 66
67 67 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file
68 68 data/file.i 1174
69 69 data/file.d 0
70 70 data/file.d 1046
71 71 data/file.i 128
72 72 $ hg recover
73 73 rolling back interrupted transaction
74 74 (verify step skipped, run `hg verify` to check your repository content)
75 75 $ f -s .hg/store/data/file*
76 76 .hg/store/data/file.d: size=1046
77 77 .hg/store/data/file.i: size=128
78 78 $ hg tip
79 79 changeset: 1:3ce491143aec
80 80 tag: tip
81 81 user: test
82 82 date: Thu Jan 01 00:00:00 1970 +0000
83 83 summary: _
84 84
85 85 $ hg verify -q
86 86 warning: revlog 'data/file.d' not in fncache!
87 87 1 warnings encountered!
88 88 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
89 $ hg debugrebuildfncache --only-data
90 adding data/file.d
91 1 items added, 0 removed from fncache
92 $ hg verify -q
89 93 $ cd ..
90 94
91 95
92 96 Now retry the procedure but intercept the rename of the index and check that
93 97 the journal does not contain the new index size. This demonstrates the edge case
94 98 where the data file is left as garbage.
95 99
96 100 $ hg clone -r 1 troffset-computation troffset-computation-copy2 --config format.revlog-compression=none -q
97 101 $ cd troffset-computation-copy2
98 102 $ cat > .hg/hgrc <<EOF
99 103 > [extensions]
100 104 > intercept_rename = $TESTTMP/intercept_rename.py
101 105 > [hooks]
102 106 > pretxnchangegroup = python:$TESTDIR/helper-killhook.py:killme
103 107 > EOF
104 108 #if chg
105 109 $ hg pull ../troffset-computation
106 110 pulling from ../troffset-computation
107 111 [255]
108 112 #else
109 113 $ hg pull ../troffset-computation
110 114 pulling from ../troffset-computation
111 115 [80]
112 116 #endif
113 117 $ cat .hg/store/journal | tr -s '\000' ' ' | grep data/file
114 118 data/file.i 1174
115 119 data/file.d 0
116 120 data/file.d 1046
117 121
118 122 $ hg recover
119 123 rolling back interrupted transaction
120 124 (verify step skipped, run `hg verify` to check your repository content)
121 125 $ f -s .hg/store/data/file*
122 126 .hg/store/data/file.d: size=1046
123 127 .hg/store/data/file.i: size=1174
124 128 $ hg tip
125 129 changeset: 1:3ce491143aec
126 130 tag: tip
127 131 user: test
128 132 date: Thu Jan 01 00:00:00 1970 +0000
129 133 summary: _
130 134
131 135 $ hg verify -q
132 136 $ cd ..
133 137
134 138
135 139 Repeat the original test but let hg rollback the transaction.
136 140
137 141 $ hg clone -r 1 troffset-computation troffset-computation-copy-rb --config format.revlog-compression=none -q
138 142 $ cd troffset-computation-copy-rb
139 143 $ cat > .hg/hgrc <<EOF
140 144 > [hooks]
141 145 > pretxnchangegroup = false
142 146 > EOF
143 147 $ hg pull ../troffset-computation
144 148 pulling from ../troffset-computation
145 149 searching for changes
146 150 adding changesets
147 151 adding manifests
148 152 adding file changes
149 153 transaction abort!
150 154 rollback completed
151 155 abort: pretxnchangegroup hook exited with status 1
152 156 [40]
153 157 $ f -s .hg/store/data/file*
154 158 .hg/store/data/file.d: size=1046
155 159 .hg/store/data/file.i: size=128
156 160 $ hg tip
157 161 changeset: 1:3ce491143aec
158 162 tag: tip
159 163 user: test
160 164 date: Thu Jan 01 00:00:00 1970 +0000
161 165 summary: _
162 166
163 167 $ hg verify -q
164 168 warning: revlog 'data/file.d' not in fncache!
165 169 1 warnings encountered!
166 170 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
167 171 $ cd ..
168 172
General Comments 0
You need to be logged in to leave comments. Login now