##// END OF EJS Templates
nodemap: add a flag to dump the details of the docket...
marmoute -
r44806:6614b301 default
parent child Browse files
Show More
@@ -1,4351 +1,4364 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import platform
17 17 import random
18 18 import re
19 19 import socket
20 20 import ssl
21 21 import stat
22 22 import string
23 23 import subprocess
24 24 import sys
25 25 import time
26 26
27 27 from .i18n import _
28 28 from .node import (
29 29 bin,
30 30 hex,
31 31 nullhex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 encoding,
49 49 error,
50 50 exchange,
51 51 extensions,
52 52 filemerge,
53 53 filesetlang,
54 54 formatter,
55 55 hg,
56 56 httppeer,
57 57 localrepo,
58 58 lock as lockmod,
59 59 logcmdutil,
60 60 merge as mergemod,
61 61 obsolete,
62 62 obsutil,
63 63 pathutil,
64 64 phases,
65 65 policy,
66 66 pvec,
67 67 pycompat,
68 68 registrar,
69 69 repair,
70 70 revlog,
71 71 revset,
72 72 revsetlang,
73 73 scmutil,
74 74 setdiscovery,
75 75 simplemerge,
76 76 sshpeer,
77 77 sslutil,
78 78 streamclone,
79 79 tags as tagsmod,
80 80 templater,
81 81 treediscovery,
82 82 upgrade,
83 83 url as urlmod,
84 84 util,
85 85 vfs as vfsmod,
86 86 wireprotoframing,
87 87 wireprotoserver,
88 88 wireprotov2peer,
89 89 )
90 90 from .utils import (
91 91 cborutil,
92 92 compression,
93 93 dateutil,
94 94 procutil,
95 95 stringutil,
96 96 )
97 97
98 98 from .revlogutils import (
99 99 deltas as deltautil,
100 100 nodemap,
101 101 )
102 102
103 103 release = lockmod.release
104 104
105 105 command = registrar.command()
106 106
107 107
108 108 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
109 109 def debugancestor(ui, repo, *args):
110 110 """find the ancestor revision of two revisions in a given index"""
111 111 if len(args) == 3:
112 112 index, rev1, rev2 = args
113 113 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
114 114 lookup = r.lookup
115 115 elif len(args) == 2:
116 116 if not repo:
117 117 raise error.Abort(
118 118 _(b'there is no Mercurial repository here (.hg not found)')
119 119 )
120 120 rev1, rev2 = args
121 121 r = repo.changelog
122 122 lookup = repo.lookup
123 123 else:
124 124 raise error.Abort(_(b'either two or three arguments required'))
125 125 a = r.ancestor(lookup(rev1), lookup(rev2))
126 126 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
127 127
128 128
129 129 @command(b'debugapplystreamclonebundle', [], b'FILE')
130 130 def debugapplystreamclonebundle(ui, repo, fname):
131 131 """apply a stream clone bundle file"""
132 132 f = hg.openpath(ui, fname)
133 133 gen = exchange.readbundle(ui, f, fname)
134 134 gen.apply(repo)
135 135
136 136
137 137 @command(
138 138 b'debugbuilddag',
139 139 [
140 140 (
141 141 b'm',
142 142 b'mergeable-file',
143 143 None,
144 144 _(b'add single file mergeable changes'),
145 145 ),
146 146 (
147 147 b'o',
148 148 b'overwritten-file',
149 149 None,
150 150 _(b'add single file all revs overwrite'),
151 151 ),
152 152 (b'n', b'new-file', None, _(b'add new file at each rev')),
153 153 ],
154 154 _(b'[OPTION]... [TEXT]'),
155 155 )
156 156 def debugbuilddag(
157 157 ui,
158 158 repo,
159 159 text=None,
160 160 mergeable_file=False,
161 161 overwritten_file=False,
162 162 new_file=False,
163 163 ):
164 164 """builds a repo with a given DAG from scratch in the current empty repo
165 165
166 166 The description of the DAG is read from stdin if not given on the
167 167 command line.
168 168
169 169 Elements:
170 170
171 171 - "+n" is a linear run of n nodes based on the current default parent
172 172 - "." is a single node based on the current default parent
173 173 - "$" resets the default parent to null (implied at the start);
174 174 otherwise the default parent is always the last node created
175 175 - "<p" sets the default parent to the backref p
176 176 - "*p" is a fork at parent p, which is a backref
177 177 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
178 178 - "/p2" is a merge of the preceding node and p2
179 179 - ":tag" defines a local tag for the preceding node
180 180 - "@branch" sets the named branch for subsequent nodes
181 181 - "#...\\n" is a comment up to the end of the line
182 182
183 183 Whitespace between the above elements is ignored.
184 184
185 185 A backref is either
186 186
187 187 - a number n, which references the node curr-n, where curr is the current
188 188 node, or
189 189 - the name of a local tag you placed earlier using ":tag", or
190 190 - empty to denote the default parent.
191 191
192 192 All string valued-elements are either strictly alphanumeric, or must
193 193 be enclosed in double quotes ("..."), with "\\" as escape character.
194 194 """
195 195
196 196 if text is None:
197 197 ui.status(_(b"reading DAG from stdin\n"))
198 198 text = ui.fin.read()
199 199
200 200 cl = repo.changelog
201 201 if len(cl) > 0:
202 202 raise error.Abort(_(b'repository is not empty'))
203 203
204 204 # determine number of revs in DAG
205 205 total = 0
206 206 for type, data in dagparser.parsedag(text):
207 207 if type == b'n':
208 208 total += 1
209 209
210 210 if mergeable_file:
211 211 linesperrev = 2
212 212 # make a file with k lines per rev
213 213 initialmergedlines = [
214 214 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
215 215 ]
216 216 initialmergedlines.append(b"")
217 217
218 218 tags = []
219 219 progress = ui.makeprogress(
220 220 _(b'building'), unit=_(b'revisions'), total=total
221 221 )
222 222 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
223 223 at = -1
224 224 atbranch = b'default'
225 225 nodeids = []
226 226 id = 0
227 227 progress.update(id)
228 228 for type, data in dagparser.parsedag(text):
229 229 if type == b'n':
230 230 ui.note((b'node %s\n' % pycompat.bytestr(data)))
231 231 id, ps = data
232 232
233 233 files = []
234 234 filecontent = {}
235 235
236 236 p2 = None
237 237 if mergeable_file:
238 238 fn = b"mf"
239 239 p1 = repo[ps[0]]
240 240 if len(ps) > 1:
241 241 p2 = repo[ps[1]]
242 242 pa = p1.ancestor(p2)
243 243 base, local, other = [
244 244 x[fn].data() for x in (pa, p1, p2)
245 245 ]
246 246 m3 = simplemerge.Merge3Text(base, local, other)
247 247 ml = [l.strip() for l in m3.merge_lines()]
248 248 ml.append(b"")
249 249 elif at > 0:
250 250 ml = p1[fn].data().split(b"\n")
251 251 else:
252 252 ml = initialmergedlines
253 253 ml[id * linesperrev] += b" r%i" % id
254 254 mergedtext = b"\n".join(ml)
255 255 files.append(fn)
256 256 filecontent[fn] = mergedtext
257 257
258 258 if overwritten_file:
259 259 fn = b"of"
260 260 files.append(fn)
261 261 filecontent[fn] = b"r%i\n" % id
262 262
263 263 if new_file:
264 264 fn = b"nf%i" % id
265 265 files.append(fn)
266 266 filecontent[fn] = b"r%i\n" % id
267 267 if len(ps) > 1:
268 268 if not p2:
269 269 p2 = repo[ps[1]]
270 270 for fn in p2:
271 271 if fn.startswith(b"nf"):
272 272 files.append(fn)
273 273 filecontent[fn] = p2[fn].data()
274 274
275 275 def fctxfn(repo, cx, path):
276 276 if path in filecontent:
277 277 return context.memfilectx(
278 278 repo, cx, path, filecontent[path]
279 279 )
280 280 return None
281 281
282 282 if len(ps) == 0 or ps[0] < 0:
283 283 pars = [None, None]
284 284 elif len(ps) == 1:
285 285 pars = [nodeids[ps[0]], None]
286 286 else:
287 287 pars = [nodeids[p] for p in ps]
288 288 cx = context.memctx(
289 289 repo,
290 290 pars,
291 291 b"r%i" % id,
292 292 files,
293 293 fctxfn,
294 294 date=(id, 0),
295 295 user=b"debugbuilddag",
296 296 extra={b'branch': atbranch},
297 297 )
298 298 nodeid = repo.commitctx(cx)
299 299 nodeids.append(nodeid)
300 300 at = id
301 301 elif type == b'l':
302 302 id, name = data
303 303 ui.note((b'tag %s\n' % name))
304 304 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
305 305 elif type == b'a':
306 306 ui.note((b'branch %s\n' % data))
307 307 atbranch = data
308 308 progress.update(id)
309 309
310 310 if tags:
311 311 repo.vfs.write(b"localtags", b"".join(tags))
312 312
313 313
314 314 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
315 315 indent_string = b' ' * indent
316 316 if all:
317 317 ui.writenoi18n(
318 318 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
319 319 % indent_string
320 320 )
321 321
322 322 def showchunks(named):
323 323 ui.write(b"\n%s%s\n" % (indent_string, named))
324 324 for deltadata in gen.deltaiter():
325 325 node, p1, p2, cs, deltabase, delta, flags = deltadata
326 326 ui.write(
327 327 b"%s%s %s %s %s %s %d\n"
328 328 % (
329 329 indent_string,
330 330 hex(node),
331 331 hex(p1),
332 332 hex(p2),
333 333 hex(cs),
334 334 hex(deltabase),
335 335 len(delta),
336 336 )
337 337 )
338 338
339 339 gen.changelogheader()
340 340 showchunks(b"changelog")
341 341 gen.manifestheader()
342 342 showchunks(b"manifest")
343 343 for chunkdata in iter(gen.filelogheader, {}):
344 344 fname = chunkdata[b'filename']
345 345 showchunks(fname)
346 346 else:
347 347 if isinstance(gen, bundle2.unbundle20):
348 348 raise error.Abort(_(b'use debugbundle2 for this file'))
349 349 gen.changelogheader()
350 350 for deltadata in gen.deltaiter():
351 351 node, p1, p2, cs, deltabase, delta, flags = deltadata
352 352 ui.write(b"%s%s\n" % (indent_string, hex(node)))
353 353
354 354
355 355 def _debugobsmarkers(ui, part, indent=0, **opts):
356 356 """display version and markers contained in 'data'"""
357 357 opts = pycompat.byteskwargs(opts)
358 358 data = part.read()
359 359 indent_string = b' ' * indent
360 360 try:
361 361 version, markers = obsolete._readmarkers(data)
362 362 except error.UnknownVersion as exc:
363 363 msg = b"%sunsupported version: %s (%d bytes)\n"
364 364 msg %= indent_string, exc.version, len(data)
365 365 ui.write(msg)
366 366 else:
367 367 msg = b"%sversion: %d (%d bytes)\n"
368 368 msg %= indent_string, version, len(data)
369 369 ui.write(msg)
370 370 fm = ui.formatter(b'debugobsolete', opts)
371 371 for rawmarker in sorted(markers):
372 372 m = obsutil.marker(None, rawmarker)
373 373 fm.startitem()
374 374 fm.plain(indent_string)
375 375 cmdutil.showmarker(fm, m)
376 376 fm.end()
377 377
378 378
379 379 def _debugphaseheads(ui, data, indent=0):
380 380 """display version and markers contained in 'data'"""
381 381 indent_string = b' ' * indent
382 382 headsbyphase = phases.binarydecode(data)
383 383 for phase in phases.allphases:
384 384 for head in headsbyphase[phase]:
385 385 ui.write(indent_string)
386 386 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
387 387
388 388
389 389 def _quasirepr(thing):
390 390 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
391 391 return b'{%s}' % (
392 392 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
393 393 )
394 394 return pycompat.bytestr(repr(thing))
395 395
396 396
397 397 def _debugbundle2(ui, gen, all=None, **opts):
398 398 """lists the contents of a bundle2"""
399 399 if not isinstance(gen, bundle2.unbundle20):
400 400 raise error.Abort(_(b'not a bundle2 file'))
401 401 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
402 402 parttypes = opts.get('part_type', [])
403 403 for part in gen.iterparts():
404 404 if parttypes and part.type not in parttypes:
405 405 continue
406 406 msg = b'%s -- %s (mandatory: %r)\n'
407 407 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
408 408 if part.type == b'changegroup':
409 409 version = part.params.get(b'version', b'01')
410 410 cg = changegroup.getunbundler(version, part, b'UN')
411 411 if not ui.quiet:
412 412 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
413 413 if part.type == b'obsmarkers':
414 414 if not ui.quiet:
415 415 _debugobsmarkers(ui, part, indent=4, **opts)
416 416 if part.type == b'phase-heads':
417 417 if not ui.quiet:
418 418 _debugphaseheads(ui, part, indent=4)
419 419
420 420
421 421 @command(
422 422 b'debugbundle',
423 423 [
424 424 (b'a', b'all', None, _(b'show all details')),
425 425 (b'', b'part-type', [], _(b'show only the named part type')),
426 426 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
427 427 ],
428 428 _(b'FILE'),
429 429 norepo=True,
430 430 )
431 431 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
432 432 """lists the contents of a bundle"""
433 433 with hg.openpath(ui, bundlepath) as f:
434 434 if spec:
435 435 spec = exchange.getbundlespec(ui, f)
436 436 ui.write(b'%s\n' % spec)
437 437 return
438 438
439 439 gen = exchange.readbundle(ui, f, bundlepath)
440 440 if isinstance(gen, bundle2.unbundle20):
441 441 return _debugbundle2(ui, gen, all=all, **opts)
442 442 _debugchangegroup(ui, gen, all=all, **opts)
443 443
444 444
445 445 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
446 446 def debugcapabilities(ui, path, **opts):
447 447 """lists the capabilities of a remote peer"""
448 448 opts = pycompat.byteskwargs(opts)
449 449 peer = hg.peer(ui, opts, path)
450 450 caps = peer.capabilities()
451 451 ui.writenoi18n(b'Main capabilities:\n')
452 452 for c in sorted(caps):
453 453 ui.write(b' %s\n' % c)
454 454 b2caps = bundle2.bundle2caps(peer)
455 455 if b2caps:
456 456 ui.writenoi18n(b'Bundle2 capabilities:\n')
457 457 for key, values in sorted(pycompat.iteritems(b2caps)):
458 458 ui.write(b' %s\n' % key)
459 459 for v in values:
460 460 ui.write(b' %s\n' % v)
461 461
462 462
463 463 @command(b'debugcheckstate', [], b'')
464 464 def debugcheckstate(ui, repo):
465 465 """validate the correctness of the current dirstate"""
466 466 parent1, parent2 = repo.dirstate.parents()
467 467 m1 = repo[parent1].manifest()
468 468 m2 = repo[parent2].manifest()
469 469 errors = 0
470 470 for f in repo.dirstate:
471 471 state = repo.dirstate[f]
472 472 if state in b"nr" and f not in m1:
473 473 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
474 474 errors += 1
475 475 if state in b"a" and f in m1:
476 476 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
477 477 errors += 1
478 478 if state in b"m" and f not in m1 and f not in m2:
479 479 ui.warn(
480 480 _(b"%s in state %s, but not in either manifest\n") % (f, state)
481 481 )
482 482 errors += 1
483 483 for f in m1:
484 484 state = repo.dirstate[f]
485 485 if state not in b"nrm":
486 486 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
487 487 errors += 1
488 488 if errors:
489 489 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
490 490 raise error.Abort(errstr)
491 491
492 492
493 493 @command(
494 494 b'debugcolor',
495 495 [(b'', b'style', None, _(b'show all configured styles'))],
496 496 b'hg debugcolor',
497 497 )
498 498 def debugcolor(ui, repo, **opts):
499 499 """show available color, effects or style"""
500 500 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
501 501 if opts.get('style'):
502 502 return _debugdisplaystyle(ui)
503 503 else:
504 504 return _debugdisplaycolor(ui)
505 505
506 506
507 507 def _debugdisplaycolor(ui):
508 508 ui = ui.copy()
509 509 ui._styles.clear()
510 510 for effect in color._activeeffects(ui).keys():
511 511 ui._styles[effect] = effect
512 512 if ui._terminfoparams:
513 513 for k, v in ui.configitems(b'color'):
514 514 if k.startswith(b'color.'):
515 515 ui._styles[k] = k[6:]
516 516 elif k.startswith(b'terminfo.'):
517 517 ui._styles[k] = k[9:]
518 518 ui.write(_(b'available colors:\n'))
519 519 # sort label with a '_' after the other to group '_background' entry.
520 520 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
521 521 for colorname, label in items:
522 522 ui.write(b'%s\n' % colorname, label=label)
523 523
524 524
525 525 def _debugdisplaystyle(ui):
526 526 ui.write(_(b'available style:\n'))
527 527 if not ui._styles:
528 528 return
529 529 width = max(len(s) for s in ui._styles)
530 530 for label, effects in sorted(ui._styles.items()):
531 531 ui.write(b'%s' % label, label=label)
532 532 if effects:
533 533 # 50
534 534 ui.write(b': ')
535 535 ui.write(b' ' * (max(0, width - len(label))))
536 536 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
537 537 ui.write(b'\n')
538 538
539 539
540 540 @command(b'debugcreatestreamclonebundle', [], b'FILE')
541 541 def debugcreatestreamclonebundle(ui, repo, fname):
542 542 """create a stream clone bundle file
543 543
544 544 Stream bundles are special bundles that are essentially archives of
545 545 revlog files. They are commonly used for cloning very quickly.
546 546 """
547 547 # TODO we may want to turn this into an abort when this functionality
548 548 # is moved into `hg bundle`.
549 549 if phases.hassecret(repo):
550 550 ui.warn(
551 551 _(
552 552 b'(warning: stream clone bundle will contain secret '
553 553 b'revisions)\n'
554 554 )
555 555 )
556 556
557 557 requirements, gen = streamclone.generatebundlev1(repo)
558 558 changegroup.writechunks(ui, gen, fname)
559 559
560 560 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
561 561
562 562
563 563 @command(
564 564 b'debugdag',
565 565 [
566 566 (b't', b'tags', None, _(b'use tags as labels')),
567 567 (b'b', b'branches', None, _(b'annotate with branch names')),
568 568 (b'', b'dots', None, _(b'use dots for runs')),
569 569 (b's', b'spaces', None, _(b'separate elements by spaces')),
570 570 ],
571 571 _(b'[OPTION]... [FILE [REV]...]'),
572 572 optionalrepo=True,
573 573 )
574 574 def debugdag(ui, repo, file_=None, *revs, **opts):
575 575 """format the changelog or an index DAG as a concise textual description
576 576
577 577 If you pass a revlog index, the revlog's DAG is emitted. If you list
578 578 revision numbers, they get labeled in the output as rN.
579 579
580 580 Otherwise, the changelog DAG of the current repo is emitted.
581 581 """
582 582 spaces = opts.get('spaces')
583 583 dots = opts.get('dots')
584 584 if file_:
585 585 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
586 586 revs = set((int(r) for r in revs))
587 587
588 588 def events():
589 589 for r in rlog:
590 590 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
591 591 if r in revs:
592 592 yield b'l', (r, b"r%i" % r)
593 593
594 594 elif repo:
595 595 cl = repo.changelog
596 596 tags = opts.get('tags')
597 597 branches = opts.get('branches')
598 598 if tags:
599 599 labels = {}
600 600 for l, n in repo.tags().items():
601 601 labels.setdefault(cl.rev(n), []).append(l)
602 602
603 603 def events():
604 604 b = b"default"
605 605 for r in cl:
606 606 if branches:
607 607 newb = cl.read(cl.node(r))[5][b'branch']
608 608 if newb != b:
609 609 yield b'a', newb
610 610 b = newb
611 611 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
612 612 if tags:
613 613 ls = labels.get(r)
614 614 if ls:
615 615 for l in ls:
616 616 yield b'l', (r, l)
617 617
618 618 else:
619 619 raise error.Abort(_(b'need repo for changelog dag'))
620 620
621 621 for line in dagparser.dagtextlines(
622 622 events(),
623 623 addspaces=spaces,
624 624 wraplabels=True,
625 625 wrapannotations=True,
626 626 wrapnonlinear=dots,
627 627 usedots=dots,
628 628 maxlinewidth=70,
629 629 ):
630 630 ui.write(line)
631 631 ui.write(b"\n")
632 632
633 633
634 634 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
635 635 def debugdata(ui, repo, file_, rev=None, **opts):
636 636 """dump the contents of a data file revision"""
637 637 opts = pycompat.byteskwargs(opts)
638 638 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
639 639 if rev is not None:
640 640 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
641 641 file_, rev = None, file_
642 642 elif rev is None:
643 643 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
644 644 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
645 645 try:
646 646 ui.write(r.rawdata(r.lookup(rev)))
647 647 except KeyError:
648 648 raise error.Abort(_(b'invalid revision identifier %s') % rev)
649 649
650 650
651 651 @command(
652 652 b'debugdate',
653 653 [(b'e', b'extended', None, _(b'try extended date formats'))],
654 654 _(b'[-e] DATE [RANGE]'),
655 655 norepo=True,
656 656 optionalrepo=True,
657 657 )
658 658 def debugdate(ui, date, range=None, **opts):
659 659 """parse and display a date"""
660 660 if opts["extended"]:
661 661 d = dateutil.parsedate(date, dateutil.extendeddateformats)
662 662 else:
663 663 d = dateutil.parsedate(date)
664 664 ui.writenoi18n(b"internal: %d %d\n" % d)
665 665 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
666 666 if range:
667 667 m = dateutil.matchdate(range)
668 668 ui.writenoi18n(b"match: %s\n" % m(d[0]))
669 669
670 670
671 671 @command(
672 672 b'debugdeltachain',
673 673 cmdutil.debugrevlogopts + cmdutil.formatteropts,
674 674 _(b'-c|-m|FILE'),
675 675 optionalrepo=True,
676 676 )
677 677 def debugdeltachain(ui, repo, file_=None, **opts):
678 678 """dump information about delta chains in a revlog
679 679
680 680 Output can be templatized. Available template keywords are:
681 681
682 682 :``rev``: revision number
683 683 :``chainid``: delta chain identifier (numbered by unique base)
684 684 :``chainlen``: delta chain length to this revision
685 685 :``prevrev``: previous revision in delta chain
686 686 :``deltatype``: role of delta / how it was computed
687 687 :``compsize``: compressed size of revision
688 688 :``uncompsize``: uncompressed size of revision
689 689 :``chainsize``: total size of compressed revisions in chain
690 690 :``chainratio``: total chain size divided by uncompressed revision size
691 691 (new delta chains typically start at ratio 2.00)
692 692 :``lindist``: linear distance from base revision in delta chain to end
693 693 of this revision
694 694 :``extradist``: total size of revisions not part of this delta chain from
695 695 base of delta chain to end of this revision; a measurement
696 696 of how much extra data we need to read/seek across to read
697 697 the delta chain for this revision
698 698 :``extraratio``: extradist divided by chainsize; another representation of
699 699 how much unrelated data is needed to load this delta chain
700 700
701 701 If the repository is configured to use the sparse read, additional keywords
702 702 are available:
703 703
704 704 :``readsize``: total size of data read from the disk for a revision
705 705 (sum of the sizes of all the blocks)
706 706 :``largestblock``: size of the largest block of data read from the disk
707 707 :``readdensity``: density of useful bytes in the data read from the disk
708 708 :``srchunks``: in how many data hunks the whole revision would be read
709 709
710 710 The sparse read can be enabled with experimental.sparse-read = True
711 711 """
712 712 opts = pycompat.byteskwargs(opts)
713 713 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
714 714 index = r.index
715 715 start = r.start
716 716 length = r.length
717 717 generaldelta = r.version & revlog.FLAG_GENERALDELTA
718 718 withsparseread = getattr(r, '_withsparseread', False)
719 719
720 720 def revinfo(rev):
721 721 e = index[rev]
722 722 compsize = e[1]
723 723 uncompsize = e[2]
724 724 chainsize = 0
725 725
726 726 if generaldelta:
727 727 if e[3] == e[5]:
728 728 deltatype = b'p1'
729 729 elif e[3] == e[6]:
730 730 deltatype = b'p2'
731 731 elif e[3] == rev - 1:
732 732 deltatype = b'prev'
733 733 elif e[3] == rev:
734 734 deltatype = b'base'
735 735 else:
736 736 deltatype = b'other'
737 737 else:
738 738 if e[3] == rev:
739 739 deltatype = b'base'
740 740 else:
741 741 deltatype = b'prev'
742 742
743 743 chain = r._deltachain(rev)[0]
744 744 for iterrev in chain:
745 745 e = index[iterrev]
746 746 chainsize += e[1]
747 747
748 748 return compsize, uncompsize, deltatype, chain, chainsize
749 749
750 750 fm = ui.formatter(b'debugdeltachain', opts)
751 751
752 752 fm.plain(
753 753 b' rev chain# chainlen prev delta '
754 754 b'size rawsize chainsize ratio lindist extradist '
755 755 b'extraratio'
756 756 )
757 757 if withsparseread:
758 758 fm.plain(b' readsize largestblk rddensity srchunks')
759 759 fm.plain(b'\n')
760 760
761 761 chainbases = {}
762 762 for rev in r:
763 763 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
764 764 chainbase = chain[0]
765 765 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
766 766 basestart = start(chainbase)
767 767 revstart = start(rev)
768 768 lineardist = revstart + comp - basestart
769 769 extradist = lineardist - chainsize
770 770 try:
771 771 prevrev = chain[-2]
772 772 except IndexError:
773 773 prevrev = -1
774 774
775 775 if uncomp != 0:
776 776 chainratio = float(chainsize) / float(uncomp)
777 777 else:
778 778 chainratio = chainsize
779 779
780 780 if chainsize != 0:
781 781 extraratio = float(extradist) / float(chainsize)
782 782 else:
783 783 extraratio = extradist
784 784
785 785 fm.startitem()
786 786 fm.write(
787 787 b'rev chainid chainlen prevrev deltatype compsize '
788 788 b'uncompsize chainsize chainratio lindist extradist '
789 789 b'extraratio',
790 790 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
791 791 rev,
792 792 chainid,
793 793 len(chain),
794 794 prevrev,
795 795 deltatype,
796 796 comp,
797 797 uncomp,
798 798 chainsize,
799 799 chainratio,
800 800 lineardist,
801 801 extradist,
802 802 extraratio,
803 803 rev=rev,
804 804 chainid=chainid,
805 805 chainlen=len(chain),
806 806 prevrev=prevrev,
807 807 deltatype=deltatype,
808 808 compsize=comp,
809 809 uncompsize=uncomp,
810 810 chainsize=chainsize,
811 811 chainratio=chainratio,
812 812 lindist=lineardist,
813 813 extradist=extradist,
814 814 extraratio=extraratio,
815 815 )
816 816 if withsparseread:
817 817 readsize = 0
818 818 largestblock = 0
819 819 srchunks = 0
820 820
821 821 for revschunk in deltautil.slicechunk(r, chain):
822 822 srchunks += 1
823 823 blkend = start(revschunk[-1]) + length(revschunk[-1])
824 824 blksize = blkend - start(revschunk[0])
825 825
826 826 readsize += blksize
827 827 if largestblock < blksize:
828 828 largestblock = blksize
829 829
830 830 if readsize:
831 831 readdensity = float(chainsize) / float(readsize)
832 832 else:
833 833 readdensity = 1
834 834
835 835 fm.write(
836 836 b'readsize largestblock readdensity srchunks',
837 837 b' %10d %10d %9.5f %8d',
838 838 readsize,
839 839 largestblock,
840 840 readdensity,
841 841 srchunks,
842 842 readsize=readsize,
843 843 largestblock=largestblock,
844 844 readdensity=readdensity,
845 845 srchunks=srchunks,
846 846 )
847 847
848 848 fm.plain(b'\n')
849 849
850 850 fm.end()
851 851
852 852
853 853 @command(
854 854 b'debugdirstate|debugstate',
855 855 [
856 856 (
857 857 b'',
858 858 b'nodates',
859 859 None,
860 860 _(b'do not display the saved mtime (DEPRECATED)'),
861 861 ),
862 862 (b'', b'dates', True, _(b'display the saved mtime')),
863 863 (b'', b'datesort', None, _(b'sort by saved mtime')),
864 864 ],
865 865 _(b'[OPTION]...'),
866 866 )
867 867 def debugstate(ui, repo, **opts):
868 868 """show the contents of the current dirstate"""
869 869
870 870 nodates = not opts['dates']
871 871 if opts.get('nodates') is not None:
872 872 nodates = True
873 873 datesort = opts.get('datesort')
874 874
875 875 if datesort:
876 876 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
877 877 else:
878 878 keyfunc = None # sort by filename
879 879 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
880 880 if ent[3] == -1:
881 881 timestr = b'unset '
882 882 elif nodates:
883 883 timestr = b'set '
884 884 else:
885 885 timestr = time.strftime(
886 886 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
887 887 )
888 888 timestr = encoding.strtolocal(timestr)
889 889 if ent[1] & 0o20000:
890 890 mode = b'lnk'
891 891 else:
892 892 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
893 893 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
894 894 for f in repo.dirstate.copies():
895 895 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
896 896
897 897
898 898 @command(
899 899 b'debugdiscovery',
900 900 [
901 901 (b'', b'old', None, _(b'use old-style discovery')),
902 902 (
903 903 b'',
904 904 b'nonheads',
905 905 None,
906 906 _(b'use old-style discovery with non-heads included'),
907 907 ),
908 908 (b'', b'rev', [], b'restrict discovery to this set of revs'),
909 909 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
910 910 ]
911 911 + cmdutil.remoteopts,
912 912 _(b'[--rev REV] [OTHER]'),
913 913 )
914 914 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
915 915 """runs the changeset discovery protocol in isolation"""
916 916 opts = pycompat.byteskwargs(opts)
917 917 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
918 918 remote = hg.peer(repo, opts, remoteurl)
919 919 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
920 920
921 921 # make sure tests are repeatable
922 922 random.seed(int(opts[b'seed']))
923 923
924 924 if opts.get(b'old'):
925 925
926 926 def doit(pushedrevs, remoteheads, remote=remote):
927 927 if not util.safehasattr(remote, b'branches'):
928 928 # enable in-client legacy support
929 929 remote = localrepo.locallegacypeer(remote.local())
930 930 common, _in, hds = treediscovery.findcommonincoming(
931 931 repo, remote, force=True
932 932 )
933 933 common = set(common)
934 934 if not opts.get(b'nonheads'):
935 935 ui.writenoi18n(
936 936 b"unpruned common: %s\n"
937 937 % b" ".join(sorted(short(n) for n in common))
938 938 )
939 939
940 940 clnode = repo.changelog.node
941 941 common = repo.revs(b'heads(::%ln)', common)
942 942 common = {clnode(r) for r in common}
943 943 return common, hds
944 944
945 945 else:
946 946
947 947 def doit(pushedrevs, remoteheads, remote=remote):
948 948 nodes = None
949 949 if pushedrevs:
950 950 revs = scmutil.revrange(repo, pushedrevs)
951 951 nodes = [repo[r].node() for r in revs]
952 952 common, any, hds = setdiscovery.findcommonheads(
953 953 ui, repo, remote, ancestorsof=nodes
954 954 )
955 955 return common, hds
956 956
957 957 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
958 958 localrevs = opts[b'rev']
959 959 with util.timedcm('debug-discovery') as t:
960 960 common, hds = doit(localrevs, remoterevs)
961 961
962 962 # compute all statistics
963 963 common = set(common)
964 964 rheads = set(hds)
965 965 lheads = set(repo.heads())
966 966
967 967 data = {}
968 968 data[b'elapsed'] = t.elapsed
969 969 data[b'nb-common'] = len(common)
970 970 data[b'nb-common-local'] = len(common & lheads)
971 971 data[b'nb-common-remote'] = len(common & rheads)
972 972 data[b'nb-common-both'] = len(common & rheads & lheads)
973 973 data[b'nb-local'] = len(lheads)
974 974 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
975 975 data[b'nb-remote'] = len(rheads)
976 976 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
977 977 data[b'nb-revs'] = len(repo.revs(b'all()'))
978 978 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
979 979 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
980 980
981 981 # display discovery summary
982 982 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
983 983 ui.writenoi18n(b"heads summary:\n")
984 984 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
985 985 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
986 986 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
987 987 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
988 988 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
989 989 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
990 990 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
991 991 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
992 992 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
993 993 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
994 994 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
995 995 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
996 996 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
997 997
998 998 if ui.verbose:
999 999 ui.writenoi18n(
1000 1000 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
1001 1001 )
1002 1002
1003 1003
1004 1004 _chunksize = 4 << 10
1005 1005
1006 1006
1007 1007 @command(
1008 1008 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1009 1009 )
1010 1010 def debugdownload(ui, repo, url, output=None, **opts):
1011 1011 """download a resource using Mercurial logic and config
1012 1012 """
1013 1013 fh = urlmod.open(ui, url, output)
1014 1014
1015 1015 dest = ui
1016 1016 if output:
1017 1017 dest = open(output, b"wb", _chunksize)
1018 1018 try:
1019 1019 data = fh.read(_chunksize)
1020 1020 while data:
1021 1021 dest.write(data)
1022 1022 data = fh.read(_chunksize)
1023 1023 finally:
1024 1024 if output:
1025 1025 dest.close()
1026 1026
1027 1027
1028 1028 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1029 1029 def debugextensions(ui, repo, **opts):
1030 1030 '''show information about active extensions'''
1031 1031 opts = pycompat.byteskwargs(opts)
1032 1032 exts = extensions.extensions(ui)
1033 1033 hgver = util.version()
1034 1034 fm = ui.formatter(b'debugextensions', opts)
1035 1035 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1036 1036 isinternal = extensions.ismoduleinternal(extmod)
1037 1037 extsource = None
1038 1038
1039 1039 if util.safehasattr(extmod, '__file__'):
1040 1040 extsource = pycompat.fsencode(extmod.__file__)
1041 1041 elif getattr(sys, 'oxidized', False):
1042 1042 extsource = pycompat.sysexecutable
1043 1043 if isinternal:
1044 1044 exttestedwith = [] # never expose magic string to users
1045 1045 else:
1046 1046 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1047 1047 extbuglink = getattr(extmod, 'buglink', None)
1048 1048
1049 1049 fm.startitem()
1050 1050
1051 1051 if ui.quiet or ui.verbose:
1052 1052 fm.write(b'name', b'%s\n', extname)
1053 1053 else:
1054 1054 fm.write(b'name', b'%s', extname)
1055 1055 if isinternal or hgver in exttestedwith:
1056 1056 fm.plain(b'\n')
1057 1057 elif not exttestedwith:
1058 1058 fm.plain(_(b' (untested!)\n'))
1059 1059 else:
1060 1060 lasttestedversion = exttestedwith[-1]
1061 1061 fm.plain(b' (%s!)\n' % lasttestedversion)
1062 1062
1063 1063 fm.condwrite(
1064 1064 ui.verbose and extsource,
1065 1065 b'source',
1066 1066 _(b' location: %s\n'),
1067 1067 extsource or b"",
1068 1068 )
1069 1069
1070 1070 if ui.verbose:
1071 1071 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1072 1072 fm.data(bundled=isinternal)
1073 1073
1074 1074 fm.condwrite(
1075 1075 ui.verbose and exttestedwith,
1076 1076 b'testedwith',
1077 1077 _(b' tested with: %s\n'),
1078 1078 fm.formatlist(exttestedwith, name=b'ver'),
1079 1079 )
1080 1080
1081 1081 fm.condwrite(
1082 1082 ui.verbose and extbuglink,
1083 1083 b'buglink',
1084 1084 _(b' bug reporting: %s\n'),
1085 1085 extbuglink or b"",
1086 1086 )
1087 1087
1088 1088 fm.end()
1089 1089
1090 1090
1091 1091 @command(
1092 1092 b'debugfileset',
1093 1093 [
1094 1094 (
1095 1095 b'r',
1096 1096 b'rev',
1097 1097 b'',
1098 1098 _(b'apply the filespec on this revision'),
1099 1099 _(b'REV'),
1100 1100 ),
1101 1101 (
1102 1102 b'',
1103 1103 b'all-files',
1104 1104 False,
1105 1105 _(b'test files from all revisions and working directory'),
1106 1106 ),
1107 1107 (
1108 1108 b's',
1109 1109 b'show-matcher',
1110 1110 None,
1111 1111 _(b'print internal representation of matcher'),
1112 1112 ),
1113 1113 (
1114 1114 b'p',
1115 1115 b'show-stage',
1116 1116 [],
1117 1117 _(b'print parsed tree at the given stage'),
1118 1118 _(b'NAME'),
1119 1119 ),
1120 1120 ],
1121 1121 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1122 1122 )
1123 1123 def debugfileset(ui, repo, expr, **opts):
1124 1124 '''parse and apply a fileset specification'''
1125 1125 from . import fileset
1126 1126
1127 1127 fileset.symbols # force import of fileset so we have predicates to optimize
1128 1128 opts = pycompat.byteskwargs(opts)
1129 1129 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1130 1130
1131 1131 stages = [
1132 1132 (b'parsed', pycompat.identity),
1133 1133 (b'analyzed', filesetlang.analyze),
1134 1134 (b'optimized', filesetlang.optimize),
1135 1135 ]
1136 1136 stagenames = set(n for n, f in stages)
1137 1137
1138 1138 showalways = set()
1139 1139 if ui.verbose and not opts[b'show_stage']:
1140 1140 # show parsed tree by --verbose (deprecated)
1141 1141 showalways.add(b'parsed')
1142 1142 if opts[b'show_stage'] == [b'all']:
1143 1143 showalways.update(stagenames)
1144 1144 else:
1145 1145 for n in opts[b'show_stage']:
1146 1146 if n not in stagenames:
1147 1147 raise error.Abort(_(b'invalid stage name: %s') % n)
1148 1148 showalways.update(opts[b'show_stage'])
1149 1149
1150 1150 tree = filesetlang.parse(expr)
1151 1151 for n, f in stages:
1152 1152 tree = f(tree)
1153 1153 if n in showalways:
1154 1154 if opts[b'show_stage'] or n != b'parsed':
1155 1155 ui.write(b"* %s:\n" % n)
1156 1156 ui.write(filesetlang.prettyformat(tree), b"\n")
1157 1157
1158 1158 files = set()
1159 1159 if opts[b'all_files']:
1160 1160 for r in repo:
1161 1161 c = repo[r]
1162 1162 files.update(c.files())
1163 1163 files.update(c.substate)
1164 1164 if opts[b'all_files'] or ctx.rev() is None:
1165 1165 wctx = repo[None]
1166 1166 files.update(
1167 1167 repo.dirstate.walk(
1168 1168 scmutil.matchall(repo),
1169 1169 subrepos=list(wctx.substate),
1170 1170 unknown=True,
1171 1171 ignored=True,
1172 1172 )
1173 1173 )
1174 1174 files.update(wctx.substate)
1175 1175 else:
1176 1176 files.update(ctx.files())
1177 1177 files.update(ctx.substate)
1178 1178
1179 1179 m = ctx.matchfileset(repo.getcwd(), expr)
1180 1180 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1181 1181 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1182 1182 for f in sorted(files):
1183 1183 if not m(f):
1184 1184 continue
1185 1185 ui.write(b"%s\n" % f)
1186 1186
1187 1187
1188 1188 @command(b'debugformat', [] + cmdutil.formatteropts)
1189 1189 def debugformat(ui, repo, **opts):
1190 1190 """display format information about the current repository
1191 1191
1192 1192 Use --verbose to get extra information about current config value and
1193 1193 Mercurial default."""
1194 1194 opts = pycompat.byteskwargs(opts)
1195 1195 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1196 1196 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1197 1197
1198 1198 def makeformatname(name):
1199 1199 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1200 1200
1201 1201 fm = ui.formatter(b'debugformat', opts)
1202 1202 if fm.isplain():
1203 1203
1204 1204 def formatvalue(value):
1205 1205 if util.safehasattr(value, b'startswith'):
1206 1206 return value
1207 1207 if value:
1208 1208 return b'yes'
1209 1209 else:
1210 1210 return b'no'
1211 1211
1212 1212 else:
1213 1213 formatvalue = pycompat.identity
1214 1214
1215 1215 fm.plain(b'format-variant')
1216 1216 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1217 1217 fm.plain(b' repo')
1218 1218 if ui.verbose:
1219 1219 fm.plain(b' config default')
1220 1220 fm.plain(b'\n')
1221 1221 for fv in upgrade.allformatvariant:
1222 1222 fm.startitem()
1223 1223 repovalue = fv.fromrepo(repo)
1224 1224 configvalue = fv.fromconfig(repo)
1225 1225
1226 1226 if repovalue != configvalue:
1227 1227 namelabel = b'formatvariant.name.mismatchconfig'
1228 1228 repolabel = b'formatvariant.repo.mismatchconfig'
1229 1229 elif repovalue != fv.default:
1230 1230 namelabel = b'formatvariant.name.mismatchdefault'
1231 1231 repolabel = b'formatvariant.repo.mismatchdefault'
1232 1232 else:
1233 1233 namelabel = b'formatvariant.name.uptodate'
1234 1234 repolabel = b'formatvariant.repo.uptodate'
1235 1235
1236 1236 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1237 1237 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1238 1238 if fv.default != configvalue:
1239 1239 configlabel = b'formatvariant.config.special'
1240 1240 else:
1241 1241 configlabel = b'formatvariant.config.default'
1242 1242 fm.condwrite(
1243 1243 ui.verbose,
1244 1244 b'config',
1245 1245 b' %6s',
1246 1246 formatvalue(configvalue),
1247 1247 label=configlabel,
1248 1248 )
1249 1249 fm.condwrite(
1250 1250 ui.verbose,
1251 1251 b'default',
1252 1252 b' %7s',
1253 1253 formatvalue(fv.default),
1254 1254 label=b'formatvariant.default',
1255 1255 )
1256 1256 fm.plain(b'\n')
1257 1257 fm.end()
1258 1258
1259 1259
1260 1260 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1261 1261 def debugfsinfo(ui, path=b"."):
1262 1262 """show information detected about current filesystem"""
1263 1263 ui.writenoi18n(b'path: %s\n' % path)
1264 1264 ui.writenoi18n(
1265 1265 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1266 1266 )
1267 1267 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1268 1268 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1269 1269 ui.writenoi18n(
1270 1270 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1271 1271 )
1272 1272 ui.writenoi18n(
1273 1273 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1274 1274 )
1275 1275 casesensitive = b'(unknown)'
1276 1276 try:
1277 1277 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1278 1278 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1279 1279 except OSError:
1280 1280 pass
1281 1281 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1282 1282
1283 1283
1284 1284 @command(
1285 1285 b'debuggetbundle',
1286 1286 [
1287 1287 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1288 1288 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1289 1289 (
1290 1290 b't',
1291 1291 b'type',
1292 1292 b'bzip2',
1293 1293 _(b'bundle compression type to use'),
1294 1294 _(b'TYPE'),
1295 1295 ),
1296 1296 ],
1297 1297 _(b'REPO FILE [-H|-C ID]...'),
1298 1298 norepo=True,
1299 1299 )
1300 1300 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1301 1301 """retrieves a bundle from a repo
1302 1302
1303 1303 Every ID must be a full-length hex node id string. Saves the bundle to the
1304 1304 given file.
1305 1305 """
1306 1306 opts = pycompat.byteskwargs(opts)
1307 1307 repo = hg.peer(ui, opts, repopath)
1308 1308 if not repo.capable(b'getbundle'):
1309 1309 raise error.Abort(b"getbundle() not supported by target repository")
1310 1310 args = {}
1311 1311 if common:
1312 1312 args['common'] = [bin(s) for s in common]
1313 1313 if head:
1314 1314 args['heads'] = [bin(s) for s in head]
1315 1315 # TODO: get desired bundlecaps from command line.
1316 1316 args['bundlecaps'] = None
1317 1317 bundle = repo.getbundle(b'debug', **args)
1318 1318
1319 1319 bundletype = opts.get(b'type', b'bzip2').lower()
1320 1320 btypes = {
1321 1321 b'none': b'HG10UN',
1322 1322 b'bzip2': b'HG10BZ',
1323 1323 b'gzip': b'HG10GZ',
1324 1324 b'bundle2': b'HG20',
1325 1325 }
1326 1326 bundletype = btypes.get(bundletype)
1327 1327 if bundletype not in bundle2.bundletypes:
1328 1328 raise error.Abort(_(b'unknown bundle type specified with --type'))
1329 1329 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1330 1330
1331 1331
1332 1332 @command(b'debugignore', [], b'[FILE]')
1333 1333 def debugignore(ui, repo, *files, **opts):
1334 1334 """display the combined ignore pattern and information about ignored files
1335 1335
1336 1336 With no argument display the combined ignore pattern.
1337 1337
1338 1338 Given space separated file names, shows if the given file is ignored and
1339 1339 if so, show the ignore rule (file and line number) that matched it.
1340 1340 """
1341 1341 ignore = repo.dirstate._ignore
1342 1342 if not files:
1343 1343 # Show all the patterns
1344 1344 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1345 1345 else:
1346 1346 m = scmutil.match(repo[None], pats=files)
1347 1347 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1348 1348 for f in m.files():
1349 1349 nf = util.normpath(f)
1350 1350 ignored = None
1351 1351 ignoredata = None
1352 1352 if nf != b'.':
1353 1353 if ignore(nf):
1354 1354 ignored = nf
1355 1355 ignoredata = repo.dirstate._ignorefileandline(nf)
1356 1356 else:
1357 1357 for p in pathutil.finddirs(nf):
1358 1358 if ignore(p):
1359 1359 ignored = p
1360 1360 ignoredata = repo.dirstate._ignorefileandline(p)
1361 1361 break
1362 1362 if ignored:
1363 1363 if ignored == nf:
1364 1364 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1365 1365 else:
1366 1366 ui.write(
1367 1367 _(
1368 1368 b"%s is ignored because of "
1369 1369 b"containing directory %s\n"
1370 1370 )
1371 1371 % (uipathfn(f), ignored)
1372 1372 )
1373 1373 ignorefile, lineno, line = ignoredata
1374 1374 ui.write(
1375 1375 _(b"(ignore rule in %s, line %d: '%s')\n")
1376 1376 % (ignorefile, lineno, line)
1377 1377 )
1378 1378 else:
1379 1379 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1380 1380
1381 1381
1382 1382 @command(
1383 1383 b'debugindex',
1384 1384 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1385 1385 _(b'-c|-m|FILE'),
1386 1386 )
1387 1387 def debugindex(ui, repo, file_=None, **opts):
1388 1388 """dump index data for a storage primitive"""
1389 1389 opts = pycompat.byteskwargs(opts)
1390 1390 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1391 1391
1392 1392 if ui.debugflag:
1393 1393 shortfn = hex
1394 1394 else:
1395 1395 shortfn = short
1396 1396
1397 1397 idlen = 12
1398 1398 for i in store:
1399 1399 idlen = len(shortfn(store.node(i)))
1400 1400 break
1401 1401
1402 1402 fm = ui.formatter(b'debugindex', opts)
1403 1403 fm.plain(
1404 1404 b' rev linkrev %s %s p2\n'
1405 1405 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1406 1406 )
1407 1407
1408 1408 for rev in store:
1409 1409 node = store.node(rev)
1410 1410 parents = store.parents(node)
1411 1411
1412 1412 fm.startitem()
1413 1413 fm.write(b'rev', b'%6d ', rev)
1414 1414 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1415 1415 fm.write(b'node', b'%s ', shortfn(node))
1416 1416 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1417 1417 fm.write(b'p2', b'%s', shortfn(parents[1]))
1418 1418 fm.plain(b'\n')
1419 1419
1420 1420 fm.end()
1421 1421
1422 1422
1423 1423 @command(
1424 1424 b'debugindexdot',
1425 1425 cmdutil.debugrevlogopts,
1426 1426 _(b'-c|-m|FILE'),
1427 1427 optionalrepo=True,
1428 1428 )
1429 1429 def debugindexdot(ui, repo, file_=None, **opts):
1430 1430 """dump an index DAG as a graphviz dot file"""
1431 1431 opts = pycompat.byteskwargs(opts)
1432 1432 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1433 1433 ui.writenoi18n(b"digraph G {\n")
1434 1434 for i in r:
1435 1435 node = r.node(i)
1436 1436 pp = r.parents(node)
1437 1437 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1438 1438 if pp[1] != nullid:
1439 1439 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1440 1440 ui.write(b"}\n")
1441 1441
1442 1442
1443 1443 @command(b'debugindexstats', [])
1444 1444 def debugindexstats(ui, repo):
1445 1445 """show stats related to the changelog index"""
1446 1446 repo.changelog.shortest(nullid, 1)
1447 1447 index = repo.changelog.index
1448 1448 if not util.safehasattr(index, b'stats'):
1449 1449 raise error.Abort(_(b'debugindexstats only works with native code'))
1450 1450 for k, v in sorted(index.stats().items()):
1451 1451 ui.write(b'%s: %d\n' % (k, v))
1452 1452
1453 1453
1454 1454 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1455 1455 def debuginstall(ui, **opts):
1456 1456 '''test Mercurial installation
1457 1457
1458 1458 Returns 0 on success.
1459 1459 '''
1460 1460 opts = pycompat.byteskwargs(opts)
1461 1461
1462 1462 problems = 0
1463 1463
1464 1464 fm = ui.formatter(b'debuginstall', opts)
1465 1465 fm.startitem()
1466 1466
1467 1467 # encoding
1468 1468 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1469 1469 err = None
1470 1470 try:
1471 1471 codecs.lookup(pycompat.sysstr(encoding.encoding))
1472 1472 except LookupError as inst:
1473 1473 err = stringutil.forcebytestr(inst)
1474 1474 problems += 1
1475 1475 fm.condwrite(
1476 1476 err,
1477 1477 b'encodingerror',
1478 1478 _(b" %s\n (check that your locale is properly set)\n"),
1479 1479 err,
1480 1480 )
1481 1481
1482 1482 # Python
1483 1483 pythonlib = None
1484 1484 if util.safehasattr(os, '__file__'):
1485 1485 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1486 1486 elif getattr(sys, 'oxidized', False):
1487 1487 pythonlib = pycompat.sysexecutable
1488 1488
1489 1489 fm.write(
1490 1490 b'pythonexe',
1491 1491 _(b"checking Python executable (%s)\n"),
1492 1492 pycompat.sysexecutable or _(b"unknown"),
1493 1493 )
1494 1494 fm.write(
1495 1495 b'pythonimplementation',
1496 1496 _(b"checking Python implementation (%s)\n"),
1497 1497 pycompat.sysbytes(platform.python_implementation()),
1498 1498 )
1499 1499 fm.write(
1500 1500 b'pythonver',
1501 1501 _(b"checking Python version (%s)\n"),
1502 1502 (b"%d.%d.%d" % sys.version_info[:3]),
1503 1503 )
1504 1504 fm.write(
1505 1505 b'pythonlib',
1506 1506 _(b"checking Python lib (%s)...\n"),
1507 1507 pythonlib or _(b"unknown"),
1508 1508 )
1509 1509
1510 1510 security = set(sslutil.supportedprotocols)
1511 1511 if sslutil.hassni:
1512 1512 security.add(b'sni')
1513 1513
1514 1514 fm.write(
1515 1515 b'pythonsecurity',
1516 1516 _(b"checking Python security support (%s)\n"),
1517 1517 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1518 1518 )
1519 1519
1520 1520 # These are warnings, not errors. So don't increment problem count. This
1521 1521 # may change in the future.
1522 1522 if b'tls1.2' not in security:
1523 1523 fm.plain(
1524 1524 _(
1525 1525 b' TLS 1.2 not supported by Python install; '
1526 1526 b'network connections lack modern security\n'
1527 1527 )
1528 1528 )
1529 1529 if b'sni' not in security:
1530 1530 fm.plain(
1531 1531 _(
1532 1532 b' SNI not supported by Python install; may have '
1533 1533 b'connectivity issues with some servers\n'
1534 1534 )
1535 1535 )
1536 1536
1537 1537 # TODO print CA cert info
1538 1538
1539 1539 # hg version
1540 1540 hgver = util.version()
1541 1541 fm.write(
1542 1542 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1543 1543 )
1544 1544 fm.write(
1545 1545 b'hgverextra',
1546 1546 _(b"checking Mercurial custom build (%s)\n"),
1547 1547 b'+'.join(hgver.split(b'+')[1:]),
1548 1548 )
1549 1549
1550 1550 # compiled modules
1551 1551 hgmodules = None
1552 1552 if util.safehasattr(sys.modules[__name__], '__file__'):
1553 1553 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1554 1554 elif getattr(sys, 'oxidized', False):
1555 1555 hgmodules = pycompat.sysexecutable
1556 1556
1557 1557 fm.write(
1558 1558 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1559 1559 )
1560 1560 fm.write(
1561 1561 b'hgmodules',
1562 1562 _(b"checking installed modules (%s)...\n"),
1563 1563 hgmodules or _(b"unknown"),
1564 1564 )
1565 1565
1566 1566 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1567 1567 rustext = rustandc # for now, that's the only case
1568 1568 cext = policy.policy in (b'c', b'allow') or rustandc
1569 1569 nopure = cext or rustext
1570 1570 if nopure:
1571 1571 err = None
1572 1572 try:
1573 1573 if cext:
1574 1574 from .cext import ( # pytype: disable=import-error
1575 1575 base85,
1576 1576 bdiff,
1577 1577 mpatch,
1578 1578 osutil,
1579 1579 )
1580 1580
1581 1581 # quiet pyflakes
1582 1582 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1583 1583 if rustext:
1584 1584 from .rustext import ( # pytype: disable=import-error
1585 1585 ancestor,
1586 1586 dirstate,
1587 1587 )
1588 1588
1589 1589 dir(ancestor), dir(dirstate) # quiet pyflakes
1590 1590 except Exception as inst:
1591 1591 err = stringutil.forcebytestr(inst)
1592 1592 problems += 1
1593 1593 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1594 1594
1595 1595 compengines = util.compengines._engines.values()
1596 1596 fm.write(
1597 1597 b'compengines',
1598 1598 _(b'checking registered compression engines (%s)\n'),
1599 1599 fm.formatlist(
1600 1600 sorted(e.name() for e in compengines),
1601 1601 name=b'compengine',
1602 1602 fmt=b'%s',
1603 1603 sep=b', ',
1604 1604 ),
1605 1605 )
1606 1606 fm.write(
1607 1607 b'compenginesavail',
1608 1608 _(b'checking available compression engines (%s)\n'),
1609 1609 fm.formatlist(
1610 1610 sorted(e.name() for e in compengines if e.available()),
1611 1611 name=b'compengine',
1612 1612 fmt=b'%s',
1613 1613 sep=b', ',
1614 1614 ),
1615 1615 )
1616 1616 wirecompengines = compression.compengines.supportedwireengines(
1617 1617 compression.SERVERROLE
1618 1618 )
1619 1619 fm.write(
1620 1620 b'compenginesserver',
1621 1621 _(
1622 1622 b'checking available compression engines '
1623 1623 b'for wire protocol (%s)\n'
1624 1624 ),
1625 1625 fm.formatlist(
1626 1626 [e.name() for e in wirecompengines if e.wireprotosupport()],
1627 1627 name=b'compengine',
1628 1628 fmt=b'%s',
1629 1629 sep=b', ',
1630 1630 ),
1631 1631 )
1632 1632 re2 = b'missing'
1633 1633 if util._re2:
1634 1634 re2 = b'available'
1635 1635 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1636 1636 fm.data(re2=bool(util._re2))
1637 1637
1638 1638 # templates
1639 1639 p = templater.templatepaths()
1640 1640 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1641 1641 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1642 1642 if p:
1643 1643 m = templater.templatepath(b"map-cmdline.default")
1644 1644 if m:
1645 1645 # template found, check if it is working
1646 1646 err = None
1647 1647 try:
1648 1648 templater.templater.frommapfile(m)
1649 1649 except Exception as inst:
1650 1650 err = stringutil.forcebytestr(inst)
1651 1651 p = None
1652 1652 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1653 1653 else:
1654 1654 p = None
1655 1655 fm.condwrite(
1656 1656 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1657 1657 )
1658 1658 fm.condwrite(
1659 1659 not m,
1660 1660 b'defaulttemplatenotfound',
1661 1661 _(b" template '%s' not found\n"),
1662 1662 b"default",
1663 1663 )
1664 1664 if not p:
1665 1665 problems += 1
1666 1666 fm.condwrite(
1667 1667 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1668 1668 )
1669 1669
1670 1670 # editor
1671 1671 editor = ui.geteditor()
1672 1672 editor = util.expandpath(editor)
1673 1673 editorbin = procutil.shellsplit(editor)[0]
1674 1674 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1675 1675 cmdpath = procutil.findexe(editorbin)
1676 1676 fm.condwrite(
1677 1677 not cmdpath and editor == b'vi',
1678 1678 b'vinotfound',
1679 1679 _(
1680 1680 b" No commit editor set and can't find %s in PATH\n"
1681 1681 b" (specify a commit editor in your configuration"
1682 1682 b" file)\n"
1683 1683 ),
1684 1684 not cmdpath and editor == b'vi' and editorbin,
1685 1685 )
1686 1686 fm.condwrite(
1687 1687 not cmdpath and editor != b'vi',
1688 1688 b'editornotfound',
1689 1689 _(
1690 1690 b" Can't find editor '%s' in PATH\n"
1691 1691 b" (specify a commit editor in your configuration"
1692 1692 b" file)\n"
1693 1693 ),
1694 1694 not cmdpath and editorbin,
1695 1695 )
1696 1696 if not cmdpath and editor != b'vi':
1697 1697 problems += 1
1698 1698
1699 1699 # check username
1700 1700 username = None
1701 1701 err = None
1702 1702 try:
1703 1703 username = ui.username()
1704 1704 except error.Abort as e:
1705 1705 err = stringutil.forcebytestr(e)
1706 1706 problems += 1
1707 1707
1708 1708 fm.condwrite(
1709 1709 username, b'username', _(b"checking username (%s)\n"), username
1710 1710 )
1711 1711 fm.condwrite(
1712 1712 err,
1713 1713 b'usernameerror',
1714 1714 _(
1715 1715 b"checking username...\n %s\n"
1716 1716 b" (specify a username in your configuration file)\n"
1717 1717 ),
1718 1718 err,
1719 1719 )
1720 1720
1721 1721 for name, mod in extensions.extensions():
1722 1722 handler = getattr(mod, 'debuginstall', None)
1723 1723 if handler is not None:
1724 1724 problems += handler(ui, fm)
1725 1725
1726 1726 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1727 1727 if not problems:
1728 1728 fm.data(problems=problems)
1729 1729 fm.condwrite(
1730 1730 problems,
1731 1731 b'problems',
1732 1732 _(b"%d problems detected, please check your install!\n"),
1733 1733 problems,
1734 1734 )
1735 1735 fm.end()
1736 1736
1737 1737 return problems
1738 1738
1739 1739
1740 1740 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1741 1741 def debugknown(ui, repopath, *ids, **opts):
1742 1742 """test whether node ids are known to a repo
1743 1743
1744 1744 Every ID must be a full-length hex node id string. Returns a list of 0s
1745 1745 and 1s indicating unknown/known.
1746 1746 """
1747 1747 opts = pycompat.byteskwargs(opts)
1748 1748 repo = hg.peer(ui, opts, repopath)
1749 1749 if not repo.capable(b'known'):
1750 1750 raise error.Abort(b"known() not supported by target repository")
1751 1751 flags = repo.known([bin(s) for s in ids])
1752 1752 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1753 1753
1754 1754
1755 1755 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1756 1756 def debuglabelcomplete(ui, repo, *args):
1757 1757 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1758 1758 debugnamecomplete(ui, repo, *args)
1759 1759
1760 1760
1761 1761 @command(
1762 1762 b'debuglocks',
1763 1763 [
1764 1764 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1765 1765 (
1766 1766 b'W',
1767 1767 b'force-wlock',
1768 1768 None,
1769 1769 _(b'free the working state lock (DANGEROUS)'),
1770 1770 ),
1771 1771 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1772 1772 (
1773 1773 b'S',
1774 1774 b'set-wlock',
1775 1775 None,
1776 1776 _(b'set the working state lock until stopped'),
1777 1777 ),
1778 1778 ],
1779 1779 _(b'[OPTION]...'),
1780 1780 )
1781 1781 def debuglocks(ui, repo, **opts):
1782 1782 """show or modify state of locks
1783 1783
1784 1784 By default, this command will show which locks are held. This
1785 1785 includes the user and process holding the lock, the amount of time
1786 1786 the lock has been held, and the machine name where the process is
1787 1787 running if it's not local.
1788 1788
1789 1789 Locks protect the integrity of Mercurial's data, so should be
1790 1790 treated with care. System crashes or other interruptions may cause
1791 1791 locks to not be properly released, though Mercurial will usually
1792 1792 detect and remove such stale locks automatically.
1793 1793
1794 1794 However, detecting stale locks may not always be possible (for
1795 1795 instance, on a shared filesystem). Removing locks may also be
1796 1796 blocked by filesystem permissions.
1797 1797
1798 1798 Setting a lock will prevent other commands from changing the data.
1799 1799 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1800 1800 The set locks are removed when the command exits.
1801 1801
1802 1802 Returns 0 if no locks are held.
1803 1803
1804 1804 """
1805 1805
1806 1806 if opts.get('force_lock'):
1807 1807 repo.svfs.unlink(b'lock')
1808 1808 if opts.get('force_wlock'):
1809 1809 repo.vfs.unlink(b'wlock')
1810 1810 if opts.get('force_lock') or opts.get('force_wlock'):
1811 1811 return 0
1812 1812
1813 1813 locks = []
1814 1814 try:
1815 1815 if opts.get('set_wlock'):
1816 1816 try:
1817 1817 locks.append(repo.wlock(False))
1818 1818 except error.LockHeld:
1819 1819 raise error.Abort(_(b'wlock is already held'))
1820 1820 if opts.get('set_lock'):
1821 1821 try:
1822 1822 locks.append(repo.lock(False))
1823 1823 except error.LockHeld:
1824 1824 raise error.Abort(_(b'lock is already held'))
1825 1825 if len(locks):
1826 1826 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1827 1827 return 0
1828 1828 finally:
1829 1829 release(*locks)
1830 1830
1831 1831 now = time.time()
1832 1832 held = 0
1833 1833
1834 1834 def report(vfs, name, method):
1835 1835 # this causes stale locks to get reaped for more accurate reporting
1836 1836 try:
1837 1837 l = method(False)
1838 1838 except error.LockHeld:
1839 1839 l = None
1840 1840
1841 1841 if l:
1842 1842 l.release()
1843 1843 else:
1844 1844 try:
1845 1845 st = vfs.lstat(name)
1846 1846 age = now - st[stat.ST_MTIME]
1847 1847 user = util.username(st.st_uid)
1848 1848 locker = vfs.readlock(name)
1849 1849 if b":" in locker:
1850 1850 host, pid = locker.split(b':')
1851 1851 if host == socket.gethostname():
1852 1852 locker = b'user %s, process %s' % (user or b'None', pid)
1853 1853 else:
1854 1854 locker = b'user %s, process %s, host %s' % (
1855 1855 user or b'None',
1856 1856 pid,
1857 1857 host,
1858 1858 )
1859 1859 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1860 1860 return 1
1861 1861 except OSError as e:
1862 1862 if e.errno != errno.ENOENT:
1863 1863 raise
1864 1864
1865 1865 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1866 1866 return 0
1867 1867
1868 1868 held += report(repo.svfs, b"lock", repo.lock)
1869 1869 held += report(repo.vfs, b"wlock", repo.wlock)
1870 1870
1871 1871 return held
1872 1872
1873 1873
1874 1874 @command(
1875 1875 b'debugmanifestfulltextcache',
1876 1876 [
1877 1877 (b'', b'clear', False, _(b'clear the cache')),
1878 1878 (
1879 1879 b'a',
1880 1880 b'add',
1881 1881 [],
1882 1882 _(b'add the given manifest nodes to the cache'),
1883 1883 _(b'NODE'),
1884 1884 ),
1885 1885 ],
1886 1886 b'',
1887 1887 )
1888 1888 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1889 1889 """show, clear or amend the contents of the manifest fulltext cache"""
1890 1890
1891 1891 def getcache():
1892 1892 r = repo.manifestlog.getstorage(b'')
1893 1893 try:
1894 1894 return r._fulltextcache
1895 1895 except AttributeError:
1896 1896 msg = _(
1897 1897 b"Current revlog implementation doesn't appear to have a "
1898 1898 b"manifest fulltext cache\n"
1899 1899 )
1900 1900 raise error.Abort(msg)
1901 1901
1902 1902 if opts.get('clear'):
1903 1903 with repo.wlock():
1904 1904 cache = getcache()
1905 1905 cache.clear(clear_persisted_data=True)
1906 1906 return
1907 1907
1908 1908 if add:
1909 1909 with repo.wlock():
1910 1910 m = repo.manifestlog
1911 1911 store = m.getstorage(b'')
1912 1912 for n in add:
1913 1913 try:
1914 1914 manifest = m[store.lookup(n)]
1915 1915 except error.LookupError as e:
1916 1916 raise error.Abort(e, hint=b"Check your manifest node id")
1917 1917 manifest.read() # stores revisision in cache too
1918 1918 return
1919 1919
1920 1920 cache = getcache()
1921 1921 if not len(cache):
1922 1922 ui.write(_(b'cache empty\n'))
1923 1923 else:
1924 1924 ui.write(
1925 1925 _(
1926 1926 b'cache contains %d manifest entries, in order of most to '
1927 1927 b'least recent:\n'
1928 1928 )
1929 1929 % (len(cache),)
1930 1930 )
1931 1931 totalsize = 0
1932 1932 for nodeid in cache:
1933 1933 # Use cache.get to not update the LRU order
1934 1934 data = cache.peek(nodeid)
1935 1935 size = len(data)
1936 1936 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1937 1937 ui.write(
1938 1938 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1939 1939 )
1940 1940 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1941 1941 ui.write(
1942 1942 _(b'total cache data size %s, on-disk %s\n')
1943 1943 % (util.bytecount(totalsize), util.bytecount(ondisk))
1944 1944 )
1945 1945
1946 1946
1947 1947 @command(b'debugmergestate', [], b'')
1948 1948 def debugmergestate(ui, repo, *args):
1949 1949 """print merge state
1950 1950
1951 1951 Use --verbose to print out information about whether v1 or v2 merge state
1952 1952 was chosen."""
1953 1953
1954 1954 def _hashornull(h):
1955 1955 if h == nullhex:
1956 1956 return b'null'
1957 1957 else:
1958 1958 return h
1959 1959
1960 1960 def printrecords(version):
1961 1961 ui.writenoi18n(b'* version %d records\n' % version)
1962 1962 if version == 1:
1963 1963 records = v1records
1964 1964 else:
1965 1965 records = v2records
1966 1966
1967 1967 for rtype, record in records:
1968 1968 # pretty print some record types
1969 1969 if rtype == b'L':
1970 1970 ui.writenoi18n(b'local: %s\n' % record)
1971 1971 elif rtype == b'O':
1972 1972 ui.writenoi18n(b'other: %s\n' % record)
1973 1973 elif rtype == b'm':
1974 1974 driver, mdstate = record.split(b'\0', 1)
1975 1975 ui.writenoi18n(
1976 1976 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1977 1977 )
1978 1978 elif rtype in b'FDC':
1979 1979 r = record.split(b'\0')
1980 1980 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1981 1981 if version == 1:
1982 1982 onode = b'not stored in v1 format'
1983 1983 flags = r[7]
1984 1984 else:
1985 1985 onode, flags = r[7:9]
1986 1986 ui.writenoi18n(
1987 1987 b'file: %s (record type "%s", state "%s", hash %s)\n'
1988 1988 % (f, rtype, state, _hashornull(hash))
1989 1989 )
1990 1990 ui.writenoi18n(
1991 1991 b' local path: %s (flags "%s")\n' % (lfile, flags)
1992 1992 )
1993 1993 ui.writenoi18n(
1994 1994 b' ancestor path: %s (node %s)\n'
1995 1995 % (afile, _hashornull(anode))
1996 1996 )
1997 1997 ui.writenoi18n(
1998 1998 b' other path: %s (node %s)\n'
1999 1999 % (ofile, _hashornull(onode))
2000 2000 )
2001 2001 elif rtype == b'f':
2002 2002 filename, rawextras = record.split(b'\0', 1)
2003 2003 extras = rawextras.split(b'\0')
2004 2004 i = 0
2005 2005 extrastrings = []
2006 2006 while i < len(extras):
2007 2007 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
2008 2008 i += 2
2009 2009
2010 2010 ui.writenoi18n(
2011 2011 b'file extras: %s (%s)\n'
2012 2012 % (filename, b', '.join(extrastrings))
2013 2013 )
2014 2014 elif rtype == b'l':
2015 2015 labels = record.split(b'\0', 2)
2016 2016 labels = [l for l in labels if len(l) > 0]
2017 2017 ui.writenoi18n(b'labels:\n')
2018 2018 ui.write((b' local: %s\n' % labels[0]))
2019 2019 ui.write((b' other: %s\n' % labels[1]))
2020 2020 if len(labels) > 2:
2021 2021 ui.write((b' base: %s\n' % labels[2]))
2022 2022 else:
2023 2023 ui.writenoi18n(
2024 2024 b'unrecognized entry: %s\t%s\n'
2025 2025 % (rtype, record.replace(b'\0', b'\t'))
2026 2026 )
2027 2027
2028 2028 # Avoid mergestate.read() since it may raise an exception for unsupported
2029 2029 # merge state records. We shouldn't be doing this, but this is OK since this
2030 2030 # command is pretty low-level.
2031 2031 ms = mergemod.mergestate(repo)
2032 2032
2033 2033 # sort so that reasonable information is on top
2034 2034 v1records = ms._readrecordsv1()
2035 2035 v2records = ms._readrecordsv2()
2036 2036 order = b'LOml'
2037 2037
2038 2038 def key(r):
2039 2039 idx = order.find(r[0])
2040 2040 if idx == -1:
2041 2041 return (1, r[1])
2042 2042 else:
2043 2043 return (0, idx)
2044 2044
2045 2045 v1records.sort(key=key)
2046 2046 v2records.sort(key=key)
2047 2047
2048 2048 if not v1records and not v2records:
2049 2049 ui.writenoi18n(b'no merge state found\n')
2050 2050 elif not v2records:
2051 2051 ui.notenoi18n(b'no version 2 merge state\n')
2052 2052 printrecords(1)
2053 2053 elif ms._v1v2match(v1records, v2records):
2054 2054 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2055 2055 printrecords(2)
2056 2056 else:
2057 2057 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2058 2058 printrecords(1)
2059 2059 if ui.verbose:
2060 2060 printrecords(2)
2061 2061
2062 2062
2063 2063 @command(b'debugnamecomplete', [], _(b'NAME...'))
2064 2064 def debugnamecomplete(ui, repo, *args):
2065 2065 '''complete "names" - tags, open branch names, bookmark names'''
2066 2066
2067 2067 names = set()
2068 2068 # since we previously only listed open branches, we will handle that
2069 2069 # specially (after this for loop)
2070 2070 for name, ns in pycompat.iteritems(repo.names):
2071 2071 if name != b'branches':
2072 2072 names.update(ns.listnames(repo))
2073 2073 names.update(
2074 2074 tag
2075 2075 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2076 2076 if not closed
2077 2077 )
2078 2078 completions = set()
2079 2079 if not args:
2080 2080 args = [b'']
2081 2081 for a in args:
2082 2082 completions.update(n for n in names if n.startswith(a))
2083 2083 ui.write(b'\n'.join(sorted(completions)))
2084 2084 ui.write(b'\n')
2085 2085
2086 2086
2087 2087 @command(
2088 2088 b'debugnodemap',
2089 2089 [
2090 2090 (
2091 2091 b'',
2092 2092 b'dump-new',
2093 2093 False,
2094 2094 _(b'write a (new) persistent binary nodemap on stdin'),
2095 2095 ),
2096 2096 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2097 2097 (
2098 2098 b'',
2099 2099 b'check',
2100 2100 False,
2101 2101 _(b'check that the data on disk data are correct.'),
2102 2102 ),
2103 (
2104 b'',
2105 b'metadata',
2106 False,
2107 _(b'display the on disk meta data for the nodemap'),
2108 ),
2103 2109 ],
2104 2110 )
2105 2111 def debugnodemap(ui, repo, **opts):
2106 2112 """write and inspect on disk nodemap
2107 2113 """
2108 2114 if opts['dump_new']:
2109 2115 unfi = repo.unfiltered()
2110 2116 cl = unfi.changelog
2111 2117 data = nodemap.persistent_data(cl.index)
2112 2118 ui.write(data)
2113 2119 elif opts['dump_disk']:
2114 2120 unfi = repo.unfiltered()
2115 2121 cl = unfi.changelog
2116 2122 nm_data = nodemap.persisted_data(cl)
2117 2123 if nm_data is not None:
2118 2124 docket, data = nm_data
2119 2125 ui.write(data)
2120 2126 elif opts['check']:
2121 2127 unfi = repo.unfiltered()
2122 2128 cl = unfi.changelog
2123 2129 nm_data = nodemap.persisted_data(cl)
2124 2130 if nm_data is not None:
2125 2131 docket, data = nm_data
2126 2132 return nodemap.check_data(ui, cl.index, data)
2133 elif opts['metadata']:
2134 unfi = repo.unfiltered()
2135 cl = unfi.changelog
2136 nm_data = nodemap.persisted_data(cl)
2137 if nm_data is not None:
2138 docket, data = nm_data
2139 ui.write((b"uid: %s\n") % docket.uid)
2127 2140
2128 2141
2129 2142 @command(
2130 2143 b'debugobsolete',
2131 2144 [
2132 2145 (b'', b'flags', 0, _(b'markers flag')),
2133 2146 (
2134 2147 b'',
2135 2148 b'record-parents',
2136 2149 False,
2137 2150 _(b'record parent information for the precursor'),
2138 2151 ),
2139 2152 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2140 2153 (
2141 2154 b'',
2142 2155 b'exclusive',
2143 2156 False,
2144 2157 _(b'restrict display to markers only relevant to REV'),
2145 2158 ),
2146 2159 (b'', b'index', False, _(b'display index of the marker')),
2147 2160 (b'', b'delete', [], _(b'delete markers specified by indices')),
2148 2161 ]
2149 2162 + cmdutil.commitopts2
2150 2163 + cmdutil.formatteropts,
2151 2164 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2152 2165 )
2153 2166 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2154 2167 """create arbitrary obsolete marker
2155 2168
2156 2169 With no arguments, displays the list of obsolescence markers."""
2157 2170
2158 2171 opts = pycompat.byteskwargs(opts)
2159 2172
2160 2173 def parsenodeid(s):
2161 2174 try:
2162 2175 # We do not use revsingle/revrange functions here to accept
2163 2176 # arbitrary node identifiers, possibly not present in the
2164 2177 # local repository.
2165 2178 n = bin(s)
2166 2179 if len(n) != len(nullid):
2167 2180 raise TypeError()
2168 2181 return n
2169 2182 except TypeError:
2170 2183 raise error.Abort(
2171 2184 b'changeset references must be full hexadecimal '
2172 2185 b'node identifiers'
2173 2186 )
2174 2187
2175 2188 if opts.get(b'delete'):
2176 2189 indices = []
2177 2190 for v in opts.get(b'delete'):
2178 2191 try:
2179 2192 indices.append(int(v))
2180 2193 except ValueError:
2181 2194 raise error.Abort(
2182 2195 _(b'invalid index value: %r') % v,
2183 2196 hint=_(b'use integers for indices'),
2184 2197 )
2185 2198
2186 2199 if repo.currenttransaction():
2187 2200 raise error.Abort(
2188 2201 _(b'cannot delete obsmarkers in the middle of transaction.')
2189 2202 )
2190 2203
2191 2204 with repo.lock():
2192 2205 n = repair.deleteobsmarkers(repo.obsstore, indices)
2193 2206 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2194 2207
2195 2208 return
2196 2209
2197 2210 if precursor is not None:
2198 2211 if opts[b'rev']:
2199 2212 raise error.Abort(b'cannot select revision when creating marker')
2200 2213 metadata = {}
2201 2214 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2202 2215 succs = tuple(parsenodeid(succ) for succ in successors)
2203 2216 l = repo.lock()
2204 2217 try:
2205 2218 tr = repo.transaction(b'debugobsolete')
2206 2219 try:
2207 2220 date = opts.get(b'date')
2208 2221 if date:
2209 2222 date = dateutil.parsedate(date)
2210 2223 else:
2211 2224 date = None
2212 2225 prec = parsenodeid(precursor)
2213 2226 parents = None
2214 2227 if opts[b'record_parents']:
2215 2228 if prec not in repo.unfiltered():
2216 2229 raise error.Abort(
2217 2230 b'cannot used --record-parents on '
2218 2231 b'unknown changesets'
2219 2232 )
2220 2233 parents = repo.unfiltered()[prec].parents()
2221 2234 parents = tuple(p.node() for p in parents)
2222 2235 repo.obsstore.create(
2223 2236 tr,
2224 2237 prec,
2225 2238 succs,
2226 2239 opts[b'flags'],
2227 2240 parents=parents,
2228 2241 date=date,
2229 2242 metadata=metadata,
2230 2243 ui=ui,
2231 2244 )
2232 2245 tr.close()
2233 2246 except ValueError as exc:
2234 2247 raise error.Abort(
2235 2248 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2236 2249 )
2237 2250 finally:
2238 2251 tr.release()
2239 2252 finally:
2240 2253 l.release()
2241 2254 else:
2242 2255 if opts[b'rev']:
2243 2256 revs = scmutil.revrange(repo, opts[b'rev'])
2244 2257 nodes = [repo[r].node() for r in revs]
2245 2258 markers = list(
2246 2259 obsutil.getmarkers(
2247 2260 repo, nodes=nodes, exclusive=opts[b'exclusive']
2248 2261 )
2249 2262 )
2250 2263 markers.sort(key=lambda x: x._data)
2251 2264 else:
2252 2265 markers = obsutil.getmarkers(repo)
2253 2266
2254 2267 markerstoiter = markers
2255 2268 isrelevant = lambda m: True
2256 2269 if opts.get(b'rev') and opts.get(b'index'):
2257 2270 markerstoiter = obsutil.getmarkers(repo)
2258 2271 markerset = set(markers)
2259 2272 isrelevant = lambda m: m in markerset
2260 2273
2261 2274 fm = ui.formatter(b'debugobsolete', opts)
2262 2275 for i, m in enumerate(markerstoiter):
2263 2276 if not isrelevant(m):
2264 2277 # marker can be irrelevant when we're iterating over a set
2265 2278 # of markers (markerstoiter) which is bigger than the set
2266 2279 # of markers we want to display (markers)
2267 2280 # this can happen if both --index and --rev options are
2268 2281 # provided and thus we need to iterate over all of the markers
2269 2282 # to get the correct indices, but only display the ones that
2270 2283 # are relevant to --rev value
2271 2284 continue
2272 2285 fm.startitem()
2273 2286 ind = i if opts.get(b'index') else None
2274 2287 cmdutil.showmarker(fm, m, index=ind)
2275 2288 fm.end()
2276 2289
2277 2290
2278 2291 @command(
2279 2292 b'debugp1copies',
2280 2293 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2281 2294 _(b'[-r REV]'),
2282 2295 )
2283 2296 def debugp1copies(ui, repo, **opts):
2284 2297 """dump copy information compared to p1"""
2285 2298
2286 2299 opts = pycompat.byteskwargs(opts)
2287 2300 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2288 2301 for dst, src in ctx.p1copies().items():
2289 2302 ui.write(b'%s -> %s\n' % (src, dst))
2290 2303
2291 2304
2292 2305 @command(
2293 2306 b'debugp2copies',
2294 2307 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2295 2308 _(b'[-r REV]'),
2296 2309 )
2297 2310 def debugp1copies(ui, repo, **opts):
2298 2311 """dump copy information compared to p2"""
2299 2312
2300 2313 opts = pycompat.byteskwargs(opts)
2301 2314 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2302 2315 for dst, src in ctx.p2copies().items():
2303 2316 ui.write(b'%s -> %s\n' % (src, dst))
2304 2317
2305 2318
2306 2319 @command(
2307 2320 b'debugpathcomplete',
2308 2321 [
2309 2322 (b'f', b'full', None, _(b'complete an entire path')),
2310 2323 (b'n', b'normal', None, _(b'show only normal files')),
2311 2324 (b'a', b'added', None, _(b'show only added files')),
2312 2325 (b'r', b'removed', None, _(b'show only removed files')),
2313 2326 ],
2314 2327 _(b'FILESPEC...'),
2315 2328 )
2316 2329 def debugpathcomplete(ui, repo, *specs, **opts):
2317 2330 '''complete part or all of a tracked path
2318 2331
2319 2332 This command supports shells that offer path name completion. It
2320 2333 currently completes only files already known to the dirstate.
2321 2334
2322 2335 Completion extends only to the next path segment unless
2323 2336 --full is specified, in which case entire paths are used.'''
2324 2337
2325 2338 def complete(path, acceptable):
2326 2339 dirstate = repo.dirstate
2327 2340 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2328 2341 rootdir = repo.root + pycompat.ossep
2329 2342 if spec != repo.root and not spec.startswith(rootdir):
2330 2343 return [], []
2331 2344 if os.path.isdir(spec):
2332 2345 spec += b'/'
2333 2346 spec = spec[len(rootdir) :]
2334 2347 fixpaths = pycompat.ossep != b'/'
2335 2348 if fixpaths:
2336 2349 spec = spec.replace(pycompat.ossep, b'/')
2337 2350 speclen = len(spec)
2338 2351 fullpaths = opts['full']
2339 2352 files, dirs = set(), set()
2340 2353 adddir, addfile = dirs.add, files.add
2341 2354 for f, st in pycompat.iteritems(dirstate):
2342 2355 if f.startswith(spec) and st[0] in acceptable:
2343 2356 if fixpaths:
2344 2357 f = f.replace(b'/', pycompat.ossep)
2345 2358 if fullpaths:
2346 2359 addfile(f)
2347 2360 continue
2348 2361 s = f.find(pycompat.ossep, speclen)
2349 2362 if s >= 0:
2350 2363 adddir(f[:s])
2351 2364 else:
2352 2365 addfile(f)
2353 2366 return files, dirs
2354 2367
2355 2368 acceptable = b''
2356 2369 if opts['normal']:
2357 2370 acceptable += b'nm'
2358 2371 if opts['added']:
2359 2372 acceptable += b'a'
2360 2373 if opts['removed']:
2361 2374 acceptable += b'r'
2362 2375 cwd = repo.getcwd()
2363 2376 if not specs:
2364 2377 specs = [b'.']
2365 2378
2366 2379 files, dirs = set(), set()
2367 2380 for spec in specs:
2368 2381 f, d = complete(spec, acceptable or b'nmar')
2369 2382 files.update(f)
2370 2383 dirs.update(d)
2371 2384 files.update(dirs)
2372 2385 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2373 2386 ui.write(b'\n')
2374 2387
2375 2388
2376 2389 @command(
2377 2390 b'debugpathcopies',
2378 2391 cmdutil.walkopts,
2379 2392 b'hg debugpathcopies REV1 REV2 [FILE]',
2380 2393 inferrepo=True,
2381 2394 )
2382 2395 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2383 2396 """show copies between two revisions"""
2384 2397 ctx1 = scmutil.revsingle(repo, rev1)
2385 2398 ctx2 = scmutil.revsingle(repo, rev2)
2386 2399 m = scmutil.match(ctx1, pats, opts)
2387 2400 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2388 2401 ui.write(b'%s -> %s\n' % (src, dst))
2389 2402
2390 2403
2391 2404 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2392 2405 def debugpeer(ui, path):
2393 2406 """establish a connection to a peer repository"""
2394 2407 # Always enable peer request logging. Requires --debug to display
2395 2408 # though.
2396 2409 overrides = {
2397 2410 (b'devel', b'debug.peer-request'): True,
2398 2411 }
2399 2412
2400 2413 with ui.configoverride(overrides):
2401 2414 peer = hg.peer(ui, {}, path)
2402 2415
2403 2416 local = peer.local() is not None
2404 2417 canpush = peer.canpush()
2405 2418
2406 2419 ui.write(_(b'url: %s\n') % peer.url())
2407 2420 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2408 2421 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2409 2422
2410 2423
2411 2424 @command(
2412 2425 b'debugpickmergetool',
2413 2426 [
2414 2427 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2415 2428 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2416 2429 ]
2417 2430 + cmdutil.walkopts
2418 2431 + cmdutil.mergetoolopts,
2419 2432 _(b'[PATTERN]...'),
2420 2433 inferrepo=True,
2421 2434 )
2422 2435 def debugpickmergetool(ui, repo, *pats, **opts):
2423 2436 """examine which merge tool is chosen for specified file
2424 2437
2425 2438 As described in :hg:`help merge-tools`, Mercurial examines
2426 2439 configurations below in this order to decide which merge tool is
2427 2440 chosen for specified file.
2428 2441
2429 2442 1. ``--tool`` option
2430 2443 2. ``HGMERGE`` environment variable
2431 2444 3. configurations in ``merge-patterns`` section
2432 2445 4. configuration of ``ui.merge``
2433 2446 5. configurations in ``merge-tools`` section
2434 2447 6. ``hgmerge`` tool (for historical reason only)
2435 2448 7. default tool for fallback (``:merge`` or ``:prompt``)
2436 2449
2437 2450 This command writes out examination result in the style below::
2438 2451
2439 2452 FILE = MERGETOOL
2440 2453
2441 2454 By default, all files known in the first parent context of the
2442 2455 working directory are examined. Use file patterns and/or -I/-X
2443 2456 options to limit target files. -r/--rev is also useful to examine
2444 2457 files in another context without actual updating to it.
2445 2458
2446 2459 With --debug, this command shows warning messages while matching
2447 2460 against ``merge-patterns`` and so on, too. It is recommended to
2448 2461 use this option with explicit file patterns and/or -I/-X options,
2449 2462 because this option increases amount of output per file according
2450 2463 to configurations in hgrc.
2451 2464
2452 2465 With -v/--verbose, this command shows configurations below at
2453 2466 first (only if specified).
2454 2467
2455 2468 - ``--tool`` option
2456 2469 - ``HGMERGE`` environment variable
2457 2470 - configuration of ``ui.merge``
2458 2471
2459 2472 If merge tool is chosen before matching against
2460 2473 ``merge-patterns``, this command can't show any helpful
2461 2474 information, even with --debug. In such case, information above is
2462 2475 useful to know why a merge tool is chosen.
2463 2476 """
2464 2477 opts = pycompat.byteskwargs(opts)
2465 2478 overrides = {}
2466 2479 if opts[b'tool']:
2467 2480 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2468 2481 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2469 2482
2470 2483 with ui.configoverride(overrides, b'debugmergepatterns'):
2471 2484 hgmerge = encoding.environ.get(b"HGMERGE")
2472 2485 if hgmerge is not None:
2473 2486 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2474 2487 uimerge = ui.config(b"ui", b"merge")
2475 2488 if uimerge:
2476 2489 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2477 2490
2478 2491 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2479 2492 m = scmutil.match(ctx, pats, opts)
2480 2493 changedelete = opts[b'changedelete']
2481 2494 for path in ctx.walk(m):
2482 2495 fctx = ctx[path]
2483 2496 try:
2484 2497 if not ui.debugflag:
2485 2498 ui.pushbuffer(error=True)
2486 2499 tool, toolpath = filemerge._picktool(
2487 2500 repo,
2488 2501 ui,
2489 2502 path,
2490 2503 fctx.isbinary(),
2491 2504 b'l' in fctx.flags(),
2492 2505 changedelete,
2493 2506 )
2494 2507 finally:
2495 2508 if not ui.debugflag:
2496 2509 ui.popbuffer()
2497 2510 ui.write(b'%s = %s\n' % (path, tool))
2498 2511
2499 2512
2500 2513 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2501 2514 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2502 2515 '''access the pushkey key/value protocol
2503 2516
2504 2517 With two args, list the keys in the given namespace.
2505 2518
2506 2519 With five args, set a key to new if it currently is set to old.
2507 2520 Reports success or failure.
2508 2521 '''
2509 2522
2510 2523 target = hg.peer(ui, {}, repopath)
2511 2524 if keyinfo:
2512 2525 key, old, new = keyinfo
2513 2526 with target.commandexecutor() as e:
2514 2527 r = e.callcommand(
2515 2528 b'pushkey',
2516 2529 {
2517 2530 b'namespace': namespace,
2518 2531 b'key': key,
2519 2532 b'old': old,
2520 2533 b'new': new,
2521 2534 },
2522 2535 ).result()
2523 2536
2524 2537 ui.status(pycompat.bytestr(r) + b'\n')
2525 2538 return not r
2526 2539 else:
2527 2540 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2528 2541 ui.write(
2529 2542 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2530 2543 )
2531 2544
2532 2545
2533 2546 @command(b'debugpvec', [], _(b'A B'))
2534 2547 def debugpvec(ui, repo, a, b=None):
2535 2548 ca = scmutil.revsingle(repo, a)
2536 2549 cb = scmutil.revsingle(repo, b)
2537 2550 pa = pvec.ctxpvec(ca)
2538 2551 pb = pvec.ctxpvec(cb)
2539 2552 if pa == pb:
2540 2553 rel = b"="
2541 2554 elif pa > pb:
2542 2555 rel = b">"
2543 2556 elif pa < pb:
2544 2557 rel = b"<"
2545 2558 elif pa | pb:
2546 2559 rel = b"|"
2547 2560 ui.write(_(b"a: %s\n") % pa)
2548 2561 ui.write(_(b"b: %s\n") % pb)
2549 2562 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2550 2563 ui.write(
2551 2564 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2552 2565 % (
2553 2566 abs(pa._depth - pb._depth),
2554 2567 pvec._hamming(pa._vec, pb._vec),
2555 2568 pa.distance(pb),
2556 2569 rel,
2557 2570 )
2558 2571 )
2559 2572
2560 2573
2561 2574 @command(
2562 2575 b'debugrebuilddirstate|debugrebuildstate',
2563 2576 [
2564 2577 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2565 2578 (
2566 2579 b'',
2567 2580 b'minimal',
2568 2581 None,
2569 2582 _(
2570 2583 b'only rebuild files that are inconsistent with '
2571 2584 b'the working copy parent'
2572 2585 ),
2573 2586 ),
2574 2587 ],
2575 2588 _(b'[-r REV]'),
2576 2589 )
2577 2590 def debugrebuilddirstate(ui, repo, rev, **opts):
2578 2591 """rebuild the dirstate as it would look like for the given revision
2579 2592
2580 2593 If no revision is specified the first current parent will be used.
2581 2594
2582 2595 The dirstate will be set to the files of the given revision.
2583 2596 The actual working directory content or existing dirstate
2584 2597 information such as adds or removes is not considered.
2585 2598
2586 2599 ``minimal`` will only rebuild the dirstate status for files that claim to be
2587 2600 tracked but are not in the parent manifest, or that exist in the parent
2588 2601 manifest but are not in the dirstate. It will not change adds, removes, or
2589 2602 modified files that are in the working copy parent.
2590 2603
2591 2604 One use of this command is to make the next :hg:`status` invocation
2592 2605 check the actual file content.
2593 2606 """
2594 2607 ctx = scmutil.revsingle(repo, rev)
2595 2608 with repo.wlock():
2596 2609 dirstate = repo.dirstate
2597 2610 changedfiles = None
2598 2611 # See command doc for what minimal does.
2599 2612 if opts.get('minimal'):
2600 2613 manifestfiles = set(ctx.manifest().keys())
2601 2614 dirstatefiles = set(dirstate)
2602 2615 manifestonly = manifestfiles - dirstatefiles
2603 2616 dsonly = dirstatefiles - manifestfiles
2604 2617 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2605 2618 changedfiles = manifestonly | dsnotadded
2606 2619
2607 2620 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2608 2621
2609 2622
2610 2623 @command(b'debugrebuildfncache', [], b'')
2611 2624 def debugrebuildfncache(ui, repo):
2612 2625 """rebuild the fncache file"""
2613 2626 repair.rebuildfncache(ui, repo)
2614 2627
2615 2628
2616 2629 @command(
2617 2630 b'debugrename',
2618 2631 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2619 2632 _(b'[-r REV] [FILE]...'),
2620 2633 )
2621 2634 def debugrename(ui, repo, *pats, **opts):
2622 2635 """dump rename information"""
2623 2636
2624 2637 opts = pycompat.byteskwargs(opts)
2625 2638 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2626 2639 m = scmutil.match(ctx, pats, opts)
2627 2640 for abs in ctx.walk(m):
2628 2641 fctx = ctx[abs]
2629 2642 o = fctx.filelog().renamed(fctx.filenode())
2630 2643 rel = repo.pathto(abs)
2631 2644 if o:
2632 2645 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2633 2646 else:
2634 2647 ui.write(_(b"%s not renamed\n") % rel)
2635 2648
2636 2649
2637 2650 @command(
2638 2651 b'debugrevlog',
2639 2652 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2640 2653 _(b'-c|-m|FILE'),
2641 2654 optionalrepo=True,
2642 2655 )
2643 2656 def debugrevlog(ui, repo, file_=None, **opts):
2644 2657 """show data and statistics about a revlog"""
2645 2658 opts = pycompat.byteskwargs(opts)
2646 2659 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2647 2660
2648 2661 if opts.get(b"dump"):
2649 2662 numrevs = len(r)
2650 2663 ui.write(
2651 2664 (
2652 2665 b"# rev p1rev p2rev start end deltastart base p1 p2"
2653 2666 b" rawsize totalsize compression heads chainlen\n"
2654 2667 )
2655 2668 )
2656 2669 ts = 0
2657 2670 heads = set()
2658 2671
2659 2672 for rev in pycompat.xrange(numrevs):
2660 2673 dbase = r.deltaparent(rev)
2661 2674 if dbase == -1:
2662 2675 dbase = rev
2663 2676 cbase = r.chainbase(rev)
2664 2677 clen = r.chainlen(rev)
2665 2678 p1, p2 = r.parentrevs(rev)
2666 2679 rs = r.rawsize(rev)
2667 2680 ts = ts + rs
2668 2681 heads -= set(r.parentrevs(rev))
2669 2682 heads.add(rev)
2670 2683 try:
2671 2684 compression = ts / r.end(rev)
2672 2685 except ZeroDivisionError:
2673 2686 compression = 0
2674 2687 ui.write(
2675 2688 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2676 2689 b"%11d %5d %8d\n"
2677 2690 % (
2678 2691 rev,
2679 2692 p1,
2680 2693 p2,
2681 2694 r.start(rev),
2682 2695 r.end(rev),
2683 2696 r.start(dbase),
2684 2697 r.start(cbase),
2685 2698 r.start(p1),
2686 2699 r.start(p2),
2687 2700 rs,
2688 2701 ts,
2689 2702 compression,
2690 2703 len(heads),
2691 2704 clen,
2692 2705 )
2693 2706 )
2694 2707 return 0
2695 2708
2696 2709 v = r.version
2697 2710 format = v & 0xFFFF
2698 2711 flags = []
2699 2712 gdelta = False
2700 2713 if v & revlog.FLAG_INLINE_DATA:
2701 2714 flags.append(b'inline')
2702 2715 if v & revlog.FLAG_GENERALDELTA:
2703 2716 gdelta = True
2704 2717 flags.append(b'generaldelta')
2705 2718 if not flags:
2706 2719 flags = [b'(none)']
2707 2720
2708 2721 ### tracks merge vs single parent
2709 2722 nummerges = 0
2710 2723
2711 2724 ### tracks ways the "delta" are build
2712 2725 # nodelta
2713 2726 numempty = 0
2714 2727 numemptytext = 0
2715 2728 numemptydelta = 0
2716 2729 # full file content
2717 2730 numfull = 0
2718 2731 # intermediate snapshot against a prior snapshot
2719 2732 numsemi = 0
2720 2733 # snapshot count per depth
2721 2734 numsnapdepth = collections.defaultdict(lambda: 0)
2722 2735 # delta against previous revision
2723 2736 numprev = 0
2724 2737 # delta against first or second parent (not prev)
2725 2738 nump1 = 0
2726 2739 nump2 = 0
2727 2740 # delta against neither prev nor parents
2728 2741 numother = 0
2729 2742 # delta against prev that are also first or second parent
2730 2743 # (details of `numprev`)
2731 2744 nump1prev = 0
2732 2745 nump2prev = 0
2733 2746
2734 2747 # data about delta chain of each revs
2735 2748 chainlengths = []
2736 2749 chainbases = []
2737 2750 chainspans = []
2738 2751
2739 2752 # data about each revision
2740 2753 datasize = [None, 0, 0]
2741 2754 fullsize = [None, 0, 0]
2742 2755 semisize = [None, 0, 0]
2743 2756 # snapshot count per depth
2744 2757 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2745 2758 deltasize = [None, 0, 0]
2746 2759 chunktypecounts = {}
2747 2760 chunktypesizes = {}
2748 2761
2749 2762 def addsize(size, l):
2750 2763 if l[0] is None or size < l[0]:
2751 2764 l[0] = size
2752 2765 if size > l[1]:
2753 2766 l[1] = size
2754 2767 l[2] += size
2755 2768
2756 2769 numrevs = len(r)
2757 2770 for rev in pycompat.xrange(numrevs):
2758 2771 p1, p2 = r.parentrevs(rev)
2759 2772 delta = r.deltaparent(rev)
2760 2773 if format > 0:
2761 2774 addsize(r.rawsize(rev), datasize)
2762 2775 if p2 != nullrev:
2763 2776 nummerges += 1
2764 2777 size = r.length(rev)
2765 2778 if delta == nullrev:
2766 2779 chainlengths.append(0)
2767 2780 chainbases.append(r.start(rev))
2768 2781 chainspans.append(size)
2769 2782 if size == 0:
2770 2783 numempty += 1
2771 2784 numemptytext += 1
2772 2785 else:
2773 2786 numfull += 1
2774 2787 numsnapdepth[0] += 1
2775 2788 addsize(size, fullsize)
2776 2789 addsize(size, snapsizedepth[0])
2777 2790 else:
2778 2791 chainlengths.append(chainlengths[delta] + 1)
2779 2792 baseaddr = chainbases[delta]
2780 2793 revaddr = r.start(rev)
2781 2794 chainbases.append(baseaddr)
2782 2795 chainspans.append((revaddr - baseaddr) + size)
2783 2796 if size == 0:
2784 2797 numempty += 1
2785 2798 numemptydelta += 1
2786 2799 elif r.issnapshot(rev):
2787 2800 addsize(size, semisize)
2788 2801 numsemi += 1
2789 2802 depth = r.snapshotdepth(rev)
2790 2803 numsnapdepth[depth] += 1
2791 2804 addsize(size, snapsizedepth[depth])
2792 2805 else:
2793 2806 addsize(size, deltasize)
2794 2807 if delta == rev - 1:
2795 2808 numprev += 1
2796 2809 if delta == p1:
2797 2810 nump1prev += 1
2798 2811 elif delta == p2:
2799 2812 nump2prev += 1
2800 2813 elif delta == p1:
2801 2814 nump1 += 1
2802 2815 elif delta == p2:
2803 2816 nump2 += 1
2804 2817 elif delta != nullrev:
2805 2818 numother += 1
2806 2819
2807 2820 # Obtain data on the raw chunks in the revlog.
2808 2821 if util.safehasattr(r, b'_getsegmentforrevs'):
2809 2822 segment = r._getsegmentforrevs(rev, rev)[1]
2810 2823 else:
2811 2824 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2812 2825 if segment:
2813 2826 chunktype = bytes(segment[0:1])
2814 2827 else:
2815 2828 chunktype = b'empty'
2816 2829
2817 2830 if chunktype not in chunktypecounts:
2818 2831 chunktypecounts[chunktype] = 0
2819 2832 chunktypesizes[chunktype] = 0
2820 2833
2821 2834 chunktypecounts[chunktype] += 1
2822 2835 chunktypesizes[chunktype] += size
2823 2836
2824 2837 # Adjust size min value for empty cases
2825 2838 for size in (datasize, fullsize, semisize, deltasize):
2826 2839 if size[0] is None:
2827 2840 size[0] = 0
2828 2841
2829 2842 numdeltas = numrevs - numfull - numempty - numsemi
2830 2843 numoprev = numprev - nump1prev - nump2prev
2831 2844 totalrawsize = datasize[2]
2832 2845 datasize[2] /= numrevs
2833 2846 fulltotal = fullsize[2]
2834 2847 if numfull == 0:
2835 2848 fullsize[2] = 0
2836 2849 else:
2837 2850 fullsize[2] /= numfull
2838 2851 semitotal = semisize[2]
2839 2852 snaptotal = {}
2840 2853 if numsemi > 0:
2841 2854 semisize[2] /= numsemi
2842 2855 for depth in snapsizedepth:
2843 2856 snaptotal[depth] = snapsizedepth[depth][2]
2844 2857 snapsizedepth[depth][2] /= numsnapdepth[depth]
2845 2858
2846 2859 deltatotal = deltasize[2]
2847 2860 if numdeltas > 0:
2848 2861 deltasize[2] /= numdeltas
2849 2862 totalsize = fulltotal + semitotal + deltatotal
2850 2863 avgchainlen = sum(chainlengths) / numrevs
2851 2864 maxchainlen = max(chainlengths)
2852 2865 maxchainspan = max(chainspans)
2853 2866 compratio = 1
2854 2867 if totalsize:
2855 2868 compratio = totalrawsize / totalsize
2856 2869
2857 2870 basedfmtstr = b'%%%dd\n'
2858 2871 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2859 2872
2860 2873 def dfmtstr(max):
2861 2874 return basedfmtstr % len(str(max))
2862 2875
2863 2876 def pcfmtstr(max, padding=0):
2864 2877 return basepcfmtstr % (len(str(max)), b' ' * padding)
2865 2878
2866 2879 def pcfmt(value, total):
2867 2880 if total:
2868 2881 return (value, 100 * float(value) / total)
2869 2882 else:
2870 2883 return value, 100.0
2871 2884
2872 2885 ui.writenoi18n(b'format : %d\n' % format)
2873 2886 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2874 2887
2875 2888 ui.write(b'\n')
2876 2889 fmt = pcfmtstr(totalsize)
2877 2890 fmt2 = dfmtstr(totalsize)
2878 2891 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2879 2892 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2880 2893 ui.writenoi18n(
2881 2894 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2882 2895 )
2883 2896 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2884 2897 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2885 2898 ui.writenoi18n(
2886 2899 b' text : '
2887 2900 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2888 2901 )
2889 2902 ui.writenoi18n(
2890 2903 b' delta : '
2891 2904 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2892 2905 )
2893 2906 ui.writenoi18n(
2894 2907 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2895 2908 )
2896 2909 for depth in sorted(numsnapdepth):
2897 2910 ui.write(
2898 2911 (b' lvl-%-3d : ' % depth)
2899 2912 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2900 2913 )
2901 2914 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2902 2915 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2903 2916 ui.writenoi18n(
2904 2917 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2905 2918 )
2906 2919 for depth in sorted(numsnapdepth):
2907 2920 ui.write(
2908 2921 (b' lvl-%-3d : ' % depth)
2909 2922 + fmt % pcfmt(snaptotal[depth], totalsize)
2910 2923 )
2911 2924 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2912 2925
2913 2926 def fmtchunktype(chunktype):
2914 2927 if chunktype == b'empty':
2915 2928 return b' %s : ' % chunktype
2916 2929 elif chunktype in pycompat.bytestr(string.ascii_letters):
2917 2930 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2918 2931 else:
2919 2932 return b' 0x%s : ' % hex(chunktype)
2920 2933
2921 2934 ui.write(b'\n')
2922 2935 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2923 2936 for chunktype in sorted(chunktypecounts):
2924 2937 ui.write(fmtchunktype(chunktype))
2925 2938 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2926 2939 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2927 2940 for chunktype in sorted(chunktypecounts):
2928 2941 ui.write(fmtchunktype(chunktype))
2929 2942 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2930 2943
2931 2944 ui.write(b'\n')
2932 2945 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2933 2946 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2934 2947 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2935 2948 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2936 2949 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2937 2950
2938 2951 if format > 0:
2939 2952 ui.write(b'\n')
2940 2953 ui.writenoi18n(
2941 2954 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2942 2955 % tuple(datasize)
2943 2956 )
2944 2957 ui.writenoi18n(
2945 2958 b'full revision size (min/max/avg) : %d / %d / %d\n'
2946 2959 % tuple(fullsize)
2947 2960 )
2948 2961 ui.writenoi18n(
2949 2962 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2950 2963 % tuple(semisize)
2951 2964 )
2952 2965 for depth in sorted(snapsizedepth):
2953 2966 if depth == 0:
2954 2967 continue
2955 2968 ui.writenoi18n(
2956 2969 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2957 2970 % ((depth,) + tuple(snapsizedepth[depth]))
2958 2971 )
2959 2972 ui.writenoi18n(
2960 2973 b'delta size (min/max/avg) : %d / %d / %d\n'
2961 2974 % tuple(deltasize)
2962 2975 )
2963 2976
2964 2977 if numdeltas > 0:
2965 2978 ui.write(b'\n')
2966 2979 fmt = pcfmtstr(numdeltas)
2967 2980 fmt2 = pcfmtstr(numdeltas, 4)
2968 2981 ui.writenoi18n(
2969 2982 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2970 2983 )
2971 2984 if numprev > 0:
2972 2985 ui.writenoi18n(
2973 2986 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2974 2987 )
2975 2988 ui.writenoi18n(
2976 2989 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2977 2990 )
2978 2991 ui.writenoi18n(
2979 2992 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2980 2993 )
2981 2994 if gdelta:
2982 2995 ui.writenoi18n(
2983 2996 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2984 2997 )
2985 2998 ui.writenoi18n(
2986 2999 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2987 3000 )
2988 3001 ui.writenoi18n(
2989 3002 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2990 3003 )
2991 3004
2992 3005
2993 3006 @command(
2994 3007 b'debugrevlogindex',
2995 3008 cmdutil.debugrevlogopts
2996 3009 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2997 3010 _(b'[-f FORMAT] -c|-m|FILE'),
2998 3011 optionalrepo=True,
2999 3012 )
3000 3013 def debugrevlogindex(ui, repo, file_=None, **opts):
3001 3014 """dump the contents of a revlog index"""
3002 3015 opts = pycompat.byteskwargs(opts)
3003 3016 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3004 3017 format = opts.get(b'format', 0)
3005 3018 if format not in (0, 1):
3006 3019 raise error.Abort(_(b"unknown format %d") % format)
3007 3020
3008 3021 if ui.debugflag:
3009 3022 shortfn = hex
3010 3023 else:
3011 3024 shortfn = short
3012 3025
3013 3026 # There might not be anything in r, so have a sane default
3014 3027 idlen = 12
3015 3028 for i in r:
3016 3029 idlen = len(shortfn(r.node(i)))
3017 3030 break
3018 3031
3019 3032 if format == 0:
3020 3033 if ui.verbose:
3021 3034 ui.writenoi18n(
3022 3035 b" rev offset length linkrev %s %s p2\n"
3023 3036 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3024 3037 )
3025 3038 else:
3026 3039 ui.writenoi18n(
3027 3040 b" rev linkrev %s %s p2\n"
3028 3041 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3029 3042 )
3030 3043 elif format == 1:
3031 3044 if ui.verbose:
3032 3045 ui.writenoi18n(
3033 3046 (
3034 3047 b" rev flag offset length size link p1"
3035 3048 b" p2 %s\n"
3036 3049 )
3037 3050 % b"nodeid".rjust(idlen)
3038 3051 )
3039 3052 else:
3040 3053 ui.writenoi18n(
3041 3054 b" rev flag size link p1 p2 %s\n"
3042 3055 % b"nodeid".rjust(idlen)
3043 3056 )
3044 3057
3045 3058 for i in r:
3046 3059 node = r.node(i)
3047 3060 if format == 0:
3048 3061 try:
3049 3062 pp = r.parents(node)
3050 3063 except Exception:
3051 3064 pp = [nullid, nullid]
3052 3065 if ui.verbose:
3053 3066 ui.write(
3054 3067 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3055 3068 % (
3056 3069 i,
3057 3070 r.start(i),
3058 3071 r.length(i),
3059 3072 r.linkrev(i),
3060 3073 shortfn(node),
3061 3074 shortfn(pp[0]),
3062 3075 shortfn(pp[1]),
3063 3076 )
3064 3077 )
3065 3078 else:
3066 3079 ui.write(
3067 3080 b"% 6d % 7d %s %s %s\n"
3068 3081 % (
3069 3082 i,
3070 3083 r.linkrev(i),
3071 3084 shortfn(node),
3072 3085 shortfn(pp[0]),
3073 3086 shortfn(pp[1]),
3074 3087 )
3075 3088 )
3076 3089 elif format == 1:
3077 3090 pr = r.parentrevs(i)
3078 3091 if ui.verbose:
3079 3092 ui.write(
3080 3093 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3081 3094 % (
3082 3095 i,
3083 3096 r.flags(i),
3084 3097 r.start(i),
3085 3098 r.length(i),
3086 3099 r.rawsize(i),
3087 3100 r.linkrev(i),
3088 3101 pr[0],
3089 3102 pr[1],
3090 3103 shortfn(node),
3091 3104 )
3092 3105 )
3093 3106 else:
3094 3107 ui.write(
3095 3108 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3096 3109 % (
3097 3110 i,
3098 3111 r.flags(i),
3099 3112 r.rawsize(i),
3100 3113 r.linkrev(i),
3101 3114 pr[0],
3102 3115 pr[1],
3103 3116 shortfn(node),
3104 3117 )
3105 3118 )
3106 3119
3107 3120
3108 3121 @command(
3109 3122 b'debugrevspec',
3110 3123 [
3111 3124 (
3112 3125 b'',
3113 3126 b'optimize',
3114 3127 None,
3115 3128 _(b'print parsed tree after optimizing (DEPRECATED)'),
3116 3129 ),
3117 3130 (
3118 3131 b'',
3119 3132 b'show-revs',
3120 3133 True,
3121 3134 _(b'print list of result revisions (default)'),
3122 3135 ),
3123 3136 (
3124 3137 b's',
3125 3138 b'show-set',
3126 3139 None,
3127 3140 _(b'print internal representation of result set'),
3128 3141 ),
3129 3142 (
3130 3143 b'p',
3131 3144 b'show-stage',
3132 3145 [],
3133 3146 _(b'print parsed tree at the given stage'),
3134 3147 _(b'NAME'),
3135 3148 ),
3136 3149 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3137 3150 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3138 3151 ],
3139 3152 b'REVSPEC',
3140 3153 )
3141 3154 def debugrevspec(ui, repo, expr, **opts):
3142 3155 """parse and apply a revision specification
3143 3156
3144 3157 Use -p/--show-stage option to print the parsed tree at the given stages.
3145 3158 Use -p all to print tree at every stage.
3146 3159
3147 3160 Use --no-show-revs option with -s or -p to print only the set
3148 3161 representation or the parsed tree respectively.
3149 3162
3150 3163 Use --verify-optimized to compare the optimized result with the unoptimized
3151 3164 one. Returns 1 if the optimized result differs.
3152 3165 """
3153 3166 opts = pycompat.byteskwargs(opts)
3154 3167 aliases = ui.configitems(b'revsetalias')
3155 3168 stages = [
3156 3169 (b'parsed', lambda tree: tree),
3157 3170 (
3158 3171 b'expanded',
3159 3172 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3160 3173 ),
3161 3174 (b'concatenated', revsetlang.foldconcat),
3162 3175 (b'analyzed', revsetlang.analyze),
3163 3176 (b'optimized', revsetlang.optimize),
3164 3177 ]
3165 3178 if opts[b'no_optimized']:
3166 3179 stages = stages[:-1]
3167 3180 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3168 3181 raise error.Abort(
3169 3182 _(b'cannot use --verify-optimized with --no-optimized')
3170 3183 )
3171 3184 stagenames = set(n for n, f in stages)
3172 3185
3173 3186 showalways = set()
3174 3187 showchanged = set()
3175 3188 if ui.verbose and not opts[b'show_stage']:
3176 3189 # show parsed tree by --verbose (deprecated)
3177 3190 showalways.add(b'parsed')
3178 3191 showchanged.update([b'expanded', b'concatenated'])
3179 3192 if opts[b'optimize']:
3180 3193 showalways.add(b'optimized')
3181 3194 if opts[b'show_stage'] and opts[b'optimize']:
3182 3195 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3183 3196 if opts[b'show_stage'] == [b'all']:
3184 3197 showalways.update(stagenames)
3185 3198 else:
3186 3199 for n in opts[b'show_stage']:
3187 3200 if n not in stagenames:
3188 3201 raise error.Abort(_(b'invalid stage name: %s') % n)
3189 3202 showalways.update(opts[b'show_stage'])
3190 3203
3191 3204 treebystage = {}
3192 3205 printedtree = None
3193 3206 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3194 3207 for n, f in stages:
3195 3208 treebystage[n] = tree = f(tree)
3196 3209 if n in showalways or (n in showchanged and tree != printedtree):
3197 3210 if opts[b'show_stage'] or n != b'parsed':
3198 3211 ui.write(b"* %s:\n" % n)
3199 3212 ui.write(revsetlang.prettyformat(tree), b"\n")
3200 3213 printedtree = tree
3201 3214
3202 3215 if opts[b'verify_optimized']:
3203 3216 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3204 3217 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3205 3218 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3206 3219 ui.writenoi18n(
3207 3220 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3208 3221 )
3209 3222 ui.writenoi18n(
3210 3223 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3211 3224 )
3212 3225 arevs = list(arevs)
3213 3226 brevs = list(brevs)
3214 3227 if arevs == brevs:
3215 3228 return 0
3216 3229 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3217 3230 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3218 3231 sm = difflib.SequenceMatcher(None, arevs, brevs)
3219 3232 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3220 3233 if tag in ('delete', 'replace'):
3221 3234 for c in arevs[alo:ahi]:
3222 3235 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3223 3236 if tag in ('insert', 'replace'):
3224 3237 for c in brevs[blo:bhi]:
3225 3238 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3226 3239 if tag == 'equal':
3227 3240 for c in arevs[alo:ahi]:
3228 3241 ui.write(b' %d\n' % c)
3229 3242 return 1
3230 3243
3231 3244 func = revset.makematcher(tree)
3232 3245 revs = func(repo)
3233 3246 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3234 3247 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3235 3248 if not opts[b'show_revs']:
3236 3249 return
3237 3250 for c in revs:
3238 3251 ui.write(b"%d\n" % c)
3239 3252
3240 3253
3241 3254 @command(
3242 3255 b'debugserve',
3243 3256 [
3244 3257 (
3245 3258 b'',
3246 3259 b'sshstdio',
3247 3260 False,
3248 3261 _(b'run an SSH server bound to process handles'),
3249 3262 ),
3250 3263 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3251 3264 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3252 3265 ],
3253 3266 b'',
3254 3267 )
3255 3268 def debugserve(ui, repo, **opts):
3256 3269 """run a server with advanced settings
3257 3270
3258 3271 This command is similar to :hg:`serve`. It exists partially as a
3259 3272 workaround to the fact that ``hg serve --stdio`` must have specific
3260 3273 arguments for security reasons.
3261 3274 """
3262 3275 opts = pycompat.byteskwargs(opts)
3263 3276
3264 3277 if not opts[b'sshstdio']:
3265 3278 raise error.Abort(_(b'only --sshstdio is currently supported'))
3266 3279
3267 3280 logfh = None
3268 3281
3269 3282 if opts[b'logiofd'] and opts[b'logiofile']:
3270 3283 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3271 3284
3272 3285 if opts[b'logiofd']:
3273 3286 # Ideally we would be line buffered. But line buffering in binary
3274 3287 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3275 3288 # buffering could have performance impacts. But since this isn't
3276 3289 # performance critical code, it should be fine.
3277 3290 try:
3278 3291 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3279 3292 except OSError as e:
3280 3293 if e.errno != errno.ESPIPE:
3281 3294 raise
3282 3295 # can't seek a pipe, so `ab` mode fails on py3
3283 3296 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3284 3297 elif opts[b'logiofile']:
3285 3298 logfh = open(opts[b'logiofile'], b'ab', 0)
3286 3299
3287 3300 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3288 3301 s.serve_forever()
3289 3302
3290 3303
3291 3304 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3292 3305 def debugsetparents(ui, repo, rev1, rev2=None):
3293 3306 """manually set the parents of the current working directory
3294 3307
3295 3308 This is useful for writing repository conversion tools, but should
3296 3309 be used with care. For example, neither the working directory nor the
3297 3310 dirstate is updated, so file status may be incorrect after running this
3298 3311 command.
3299 3312
3300 3313 Returns 0 on success.
3301 3314 """
3302 3315
3303 3316 node1 = scmutil.revsingle(repo, rev1).node()
3304 3317 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3305 3318
3306 3319 with repo.wlock():
3307 3320 repo.setparents(node1, node2)
3308 3321
3309 3322
3310 3323 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3311 3324 def debugsidedata(ui, repo, file_, rev=None, **opts):
3312 3325 """dump the side data for a cl/manifest/file revision
3313 3326
3314 3327 Use --verbose to dump the sidedata content."""
3315 3328 opts = pycompat.byteskwargs(opts)
3316 3329 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3317 3330 if rev is not None:
3318 3331 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3319 3332 file_, rev = None, file_
3320 3333 elif rev is None:
3321 3334 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3322 3335 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3323 3336 r = getattr(r, '_revlog', r)
3324 3337 try:
3325 3338 sidedata = r.sidedata(r.lookup(rev))
3326 3339 except KeyError:
3327 3340 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3328 3341 if sidedata:
3329 3342 sidedata = list(sidedata.items())
3330 3343 sidedata.sort()
3331 3344 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3332 3345 for key, value in sidedata:
3333 3346 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3334 3347 if ui.verbose:
3335 3348 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3336 3349
3337 3350
3338 3351 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3339 3352 def debugssl(ui, repo, source=None, **opts):
3340 3353 '''test a secure connection to a server
3341 3354
3342 3355 This builds the certificate chain for the server on Windows, installing the
3343 3356 missing intermediates and trusted root via Windows Update if necessary. It
3344 3357 does nothing on other platforms.
3345 3358
3346 3359 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3347 3360 that server is used. See :hg:`help urls` for more information.
3348 3361
3349 3362 If the update succeeds, retry the original operation. Otherwise, the cause
3350 3363 of the SSL error is likely another issue.
3351 3364 '''
3352 3365 if not pycompat.iswindows:
3353 3366 raise error.Abort(
3354 3367 _(b'certificate chain building is only possible on Windows')
3355 3368 )
3356 3369
3357 3370 if not source:
3358 3371 if not repo:
3359 3372 raise error.Abort(
3360 3373 _(
3361 3374 b"there is no Mercurial repository here, and no "
3362 3375 b"server specified"
3363 3376 )
3364 3377 )
3365 3378 source = b"default"
3366 3379
3367 3380 source, branches = hg.parseurl(ui.expandpath(source))
3368 3381 url = util.url(source)
3369 3382
3370 3383 defaultport = {b'https': 443, b'ssh': 22}
3371 3384 if url.scheme in defaultport:
3372 3385 try:
3373 3386 addr = (url.host, int(url.port or defaultport[url.scheme]))
3374 3387 except ValueError:
3375 3388 raise error.Abort(_(b"malformed port number in URL"))
3376 3389 else:
3377 3390 raise error.Abort(_(b"only https and ssh connections are supported"))
3378 3391
3379 3392 from . import win32
3380 3393
3381 3394 s = ssl.wrap_socket(
3382 3395 socket.socket(),
3383 3396 ssl_version=ssl.PROTOCOL_TLS,
3384 3397 cert_reqs=ssl.CERT_NONE,
3385 3398 ca_certs=None,
3386 3399 )
3387 3400
3388 3401 try:
3389 3402 s.connect(addr)
3390 3403 cert = s.getpeercert(True)
3391 3404
3392 3405 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3393 3406
3394 3407 complete = win32.checkcertificatechain(cert, build=False)
3395 3408
3396 3409 if not complete:
3397 3410 ui.status(_(b'certificate chain is incomplete, updating... '))
3398 3411
3399 3412 if not win32.checkcertificatechain(cert):
3400 3413 ui.status(_(b'failed.\n'))
3401 3414 else:
3402 3415 ui.status(_(b'done.\n'))
3403 3416 else:
3404 3417 ui.status(_(b'full certificate chain is available\n'))
3405 3418 finally:
3406 3419 s.close()
3407 3420
3408 3421
3409 3422 @command(
3410 3423 b'debugsub',
3411 3424 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3412 3425 _(b'[-r REV] [REV]'),
3413 3426 )
3414 3427 def debugsub(ui, repo, rev=None):
3415 3428 ctx = scmutil.revsingle(repo, rev, None)
3416 3429 for k, v in sorted(ctx.substate.items()):
3417 3430 ui.writenoi18n(b'path %s\n' % k)
3418 3431 ui.writenoi18n(b' source %s\n' % v[0])
3419 3432 ui.writenoi18n(b' revision %s\n' % v[1])
3420 3433
3421 3434
3422 3435 @command(
3423 3436 b'debugsuccessorssets',
3424 3437 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3425 3438 _(b'[REV]'),
3426 3439 )
3427 3440 def debugsuccessorssets(ui, repo, *revs, **opts):
3428 3441 """show set of successors for revision
3429 3442
3430 3443 A successors set of changeset A is a consistent group of revisions that
3431 3444 succeed A. It contains non-obsolete changesets only unless closests
3432 3445 successors set is set.
3433 3446
3434 3447 In most cases a changeset A has a single successors set containing a single
3435 3448 successor (changeset A replaced by A').
3436 3449
3437 3450 A changeset that is made obsolete with no successors are called "pruned".
3438 3451 Such changesets have no successors sets at all.
3439 3452
3440 3453 A changeset that has been "split" will have a successors set containing
3441 3454 more than one successor.
3442 3455
3443 3456 A changeset that has been rewritten in multiple different ways is called
3444 3457 "divergent". Such changesets have multiple successor sets (each of which
3445 3458 may also be split, i.e. have multiple successors).
3446 3459
3447 3460 Results are displayed as follows::
3448 3461
3449 3462 <rev1>
3450 3463 <successors-1A>
3451 3464 <rev2>
3452 3465 <successors-2A>
3453 3466 <successors-2B1> <successors-2B2> <successors-2B3>
3454 3467
3455 3468 Here rev2 has two possible (i.e. divergent) successors sets. The first
3456 3469 holds one element, whereas the second holds three (i.e. the changeset has
3457 3470 been split).
3458 3471 """
3459 3472 # passed to successorssets caching computation from one call to another
3460 3473 cache = {}
3461 3474 ctx2str = bytes
3462 3475 node2str = short
3463 3476 for rev in scmutil.revrange(repo, revs):
3464 3477 ctx = repo[rev]
3465 3478 ui.write(b'%s\n' % ctx2str(ctx))
3466 3479 for succsset in obsutil.successorssets(
3467 3480 repo, ctx.node(), closest=opts['closest'], cache=cache
3468 3481 ):
3469 3482 if succsset:
3470 3483 ui.write(b' ')
3471 3484 ui.write(node2str(succsset[0]))
3472 3485 for node in succsset[1:]:
3473 3486 ui.write(b' ')
3474 3487 ui.write(node2str(node))
3475 3488 ui.write(b'\n')
3476 3489
3477 3490
3478 3491 @command(b'debugtagscache', [])
3479 3492 def debugtagscache(ui, repo):
3480 3493 """display the contents of .hg/cache/hgtagsfnodes1"""
3481 3494 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3482 3495 for r in repo:
3483 3496 node = repo[r].node()
3484 3497 tagsnode = cache.getfnode(node, computemissing=False)
3485 3498 tagsnodedisplay = hex(tagsnode) if tagsnode else 'missing/invalid'
3486 3499 ui.write(b'%s %s %s\n' % (r, hex(node), tagsnodedisplay))
3487 3500
3488 3501
3489 3502 @command(
3490 3503 b'debugtemplate',
3491 3504 [
3492 3505 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3493 3506 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3494 3507 ],
3495 3508 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3496 3509 optionalrepo=True,
3497 3510 )
3498 3511 def debugtemplate(ui, repo, tmpl, **opts):
3499 3512 """parse and apply a template
3500 3513
3501 3514 If -r/--rev is given, the template is processed as a log template and
3502 3515 applied to the given changesets. Otherwise, it is processed as a generic
3503 3516 template.
3504 3517
3505 3518 Use --verbose to print the parsed tree.
3506 3519 """
3507 3520 revs = None
3508 3521 if opts['rev']:
3509 3522 if repo is None:
3510 3523 raise error.RepoError(
3511 3524 _(b'there is no Mercurial repository here (.hg not found)')
3512 3525 )
3513 3526 revs = scmutil.revrange(repo, opts['rev'])
3514 3527
3515 3528 props = {}
3516 3529 for d in opts['define']:
3517 3530 try:
3518 3531 k, v = (e.strip() for e in d.split(b'=', 1))
3519 3532 if not k or k == b'ui':
3520 3533 raise ValueError
3521 3534 props[k] = v
3522 3535 except ValueError:
3523 3536 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3524 3537
3525 3538 if ui.verbose:
3526 3539 aliases = ui.configitems(b'templatealias')
3527 3540 tree = templater.parse(tmpl)
3528 3541 ui.note(templater.prettyformat(tree), b'\n')
3529 3542 newtree = templater.expandaliases(tree, aliases)
3530 3543 if newtree != tree:
3531 3544 ui.notenoi18n(
3532 3545 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3533 3546 )
3534 3547
3535 3548 if revs is None:
3536 3549 tres = formatter.templateresources(ui, repo)
3537 3550 t = formatter.maketemplater(ui, tmpl, resources=tres)
3538 3551 if ui.verbose:
3539 3552 kwds, funcs = t.symbolsuseddefault()
3540 3553 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3541 3554 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3542 3555 ui.write(t.renderdefault(props))
3543 3556 else:
3544 3557 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3545 3558 if ui.verbose:
3546 3559 kwds, funcs = displayer.t.symbolsuseddefault()
3547 3560 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3548 3561 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3549 3562 for r in revs:
3550 3563 displayer.show(repo[r], **pycompat.strkwargs(props))
3551 3564 displayer.close()
3552 3565
3553 3566
3554 3567 @command(
3555 3568 b'debuguigetpass',
3556 3569 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3557 3570 _(b'[-p TEXT]'),
3558 3571 norepo=True,
3559 3572 )
3560 3573 def debuguigetpass(ui, prompt=b''):
3561 3574 """show prompt to type password"""
3562 3575 r = ui.getpass(prompt)
3563 3576 ui.writenoi18n(b'respose: %s\n' % r)
3564 3577
3565 3578
3566 3579 @command(
3567 3580 b'debuguiprompt',
3568 3581 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3569 3582 _(b'[-p TEXT]'),
3570 3583 norepo=True,
3571 3584 )
3572 3585 def debuguiprompt(ui, prompt=b''):
3573 3586 """show plain prompt"""
3574 3587 r = ui.prompt(prompt)
3575 3588 ui.writenoi18n(b'response: %s\n' % r)
3576 3589
3577 3590
3578 3591 @command(b'debugupdatecaches', [])
3579 3592 def debugupdatecaches(ui, repo, *pats, **opts):
3580 3593 """warm all known caches in the repository"""
3581 3594 with repo.wlock(), repo.lock():
3582 3595 repo.updatecaches(full=True)
3583 3596
3584 3597
3585 3598 @command(
3586 3599 b'debugupgraderepo',
3587 3600 [
3588 3601 (
3589 3602 b'o',
3590 3603 b'optimize',
3591 3604 [],
3592 3605 _(b'extra optimization to perform'),
3593 3606 _(b'NAME'),
3594 3607 ),
3595 3608 (b'', b'run', False, _(b'performs an upgrade')),
3596 3609 (b'', b'backup', True, _(b'keep the old repository content around')),
3597 3610 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3598 3611 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3599 3612 ],
3600 3613 )
3601 3614 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3602 3615 """upgrade a repository to use different features
3603 3616
3604 3617 If no arguments are specified, the repository is evaluated for upgrade
3605 3618 and a list of problems and potential optimizations is printed.
3606 3619
3607 3620 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3608 3621 can be influenced via additional arguments. More details will be provided
3609 3622 by the command output when run without ``--run``.
3610 3623
3611 3624 During the upgrade, the repository will be locked and no writes will be
3612 3625 allowed.
3613 3626
3614 3627 At the end of the upgrade, the repository may not be readable while new
3615 3628 repository data is swapped in. This window will be as long as it takes to
3616 3629 rename some directories inside the ``.hg`` directory. On most machines, this
3617 3630 should complete almost instantaneously and the chances of a consumer being
3618 3631 unable to access the repository should be low.
3619 3632
3620 3633 By default, all revlog will be upgraded. You can restrict this using flag
3621 3634 such as `--manifest`:
3622 3635
3623 3636 * `--manifest`: only optimize the manifest
3624 3637 * `--no-manifest`: optimize all revlog but the manifest
3625 3638 * `--changelog`: optimize the changelog only
3626 3639 * `--no-changelog --no-manifest`: optimize filelogs only
3627 3640 """
3628 3641 return upgrade.upgraderepo(
3629 3642 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3630 3643 )
3631 3644
3632 3645
3633 3646 @command(
3634 3647 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3635 3648 )
3636 3649 def debugwalk(ui, repo, *pats, **opts):
3637 3650 """show how files match on given patterns"""
3638 3651 opts = pycompat.byteskwargs(opts)
3639 3652 m = scmutil.match(repo[None], pats, opts)
3640 3653 if ui.verbose:
3641 3654 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3642 3655 items = list(repo[None].walk(m))
3643 3656 if not items:
3644 3657 return
3645 3658 f = lambda fn: fn
3646 3659 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3647 3660 f = lambda fn: util.normpath(fn)
3648 3661 fmt = b'f %%-%ds %%-%ds %%s' % (
3649 3662 max([len(abs) for abs in items]),
3650 3663 max([len(repo.pathto(abs)) for abs in items]),
3651 3664 )
3652 3665 for abs in items:
3653 3666 line = fmt % (
3654 3667 abs,
3655 3668 f(repo.pathto(abs)),
3656 3669 m.exact(abs) and b'exact' or b'',
3657 3670 )
3658 3671 ui.write(b"%s\n" % line.rstrip())
3659 3672
3660 3673
3661 3674 @command(b'debugwhyunstable', [], _(b'REV'))
3662 3675 def debugwhyunstable(ui, repo, rev):
3663 3676 """explain instabilities of a changeset"""
3664 3677 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3665 3678 dnodes = b''
3666 3679 if entry.get(b'divergentnodes'):
3667 3680 dnodes = (
3668 3681 b' '.join(
3669 3682 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3670 3683 for ctx in entry[b'divergentnodes']
3671 3684 )
3672 3685 + b' '
3673 3686 )
3674 3687 ui.write(
3675 3688 b'%s: %s%s %s\n'
3676 3689 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3677 3690 )
3678 3691
3679 3692
3680 3693 @command(
3681 3694 b'debugwireargs',
3682 3695 [
3683 3696 (b'', b'three', b'', b'three'),
3684 3697 (b'', b'four', b'', b'four'),
3685 3698 (b'', b'five', b'', b'five'),
3686 3699 ]
3687 3700 + cmdutil.remoteopts,
3688 3701 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3689 3702 norepo=True,
3690 3703 )
3691 3704 def debugwireargs(ui, repopath, *vals, **opts):
3692 3705 opts = pycompat.byteskwargs(opts)
3693 3706 repo = hg.peer(ui, opts, repopath)
3694 3707 for opt in cmdutil.remoteopts:
3695 3708 del opts[opt[1]]
3696 3709 args = {}
3697 3710 for k, v in pycompat.iteritems(opts):
3698 3711 if v:
3699 3712 args[k] = v
3700 3713 args = pycompat.strkwargs(args)
3701 3714 # run twice to check that we don't mess up the stream for the next command
3702 3715 res1 = repo.debugwireargs(*vals, **args)
3703 3716 res2 = repo.debugwireargs(*vals, **args)
3704 3717 ui.write(b"%s\n" % res1)
3705 3718 if res1 != res2:
3706 3719 ui.warn(b"%s\n" % res2)
3707 3720
3708 3721
3709 3722 def _parsewirelangblocks(fh):
3710 3723 activeaction = None
3711 3724 blocklines = []
3712 3725 lastindent = 0
3713 3726
3714 3727 for line in fh:
3715 3728 line = line.rstrip()
3716 3729 if not line:
3717 3730 continue
3718 3731
3719 3732 if line.startswith(b'#'):
3720 3733 continue
3721 3734
3722 3735 if not line.startswith(b' '):
3723 3736 # New block. Flush previous one.
3724 3737 if activeaction:
3725 3738 yield activeaction, blocklines
3726 3739
3727 3740 activeaction = line
3728 3741 blocklines = []
3729 3742 lastindent = 0
3730 3743 continue
3731 3744
3732 3745 # Else we start with an indent.
3733 3746
3734 3747 if not activeaction:
3735 3748 raise error.Abort(_(b'indented line outside of block'))
3736 3749
3737 3750 indent = len(line) - len(line.lstrip())
3738 3751
3739 3752 # If this line is indented more than the last line, concatenate it.
3740 3753 if indent > lastindent and blocklines:
3741 3754 blocklines[-1] += line.lstrip()
3742 3755 else:
3743 3756 blocklines.append(line)
3744 3757 lastindent = indent
3745 3758
3746 3759 # Flush last block.
3747 3760 if activeaction:
3748 3761 yield activeaction, blocklines
3749 3762
3750 3763
3751 3764 @command(
3752 3765 b'debugwireproto',
3753 3766 [
3754 3767 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3755 3768 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3756 3769 (
3757 3770 b'',
3758 3771 b'noreadstderr',
3759 3772 False,
3760 3773 _(b'do not read from stderr of the remote'),
3761 3774 ),
3762 3775 (
3763 3776 b'',
3764 3777 b'nologhandshake',
3765 3778 False,
3766 3779 _(b'do not log I/O related to the peer handshake'),
3767 3780 ),
3768 3781 ]
3769 3782 + cmdutil.remoteopts,
3770 3783 _(b'[PATH]'),
3771 3784 optionalrepo=True,
3772 3785 )
3773 3786 def debugwireproto(ui, repo, path=None, **opts):
3774 3787 """send wire protocol commands to a server
3775 3788
3776 3789 This command can be used to issue wire protocol commands to remote
3777 3790 peers and to debug the raw data being exchanged.
3778 3791
3779 3792 ``--localssh`` will start an SSH server against the current repository
3780 3793 and connect to that. By default, the connection will perform a handshake
3781 3794 and establish an appropriate peer instance.
3782 3795
3783 3796 ``--peer`` can be used to bypass the handshake protocol and construct a
3784 3797 peer instance using the specified class type. Valid values are ``raw``,
3785 3798 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3786 3799 raw data payloads and don't support higher-level command actions.
3787 3800
3788 3801 ``--noreadstderr`` can be used to disable automatic reading from stderr
3789 3802 of the peer (for SSH connections only). Disabling automatic reading of
3790 3803 stderr is useful for making output more deterministic.
3791 3804
3792 3805 Commands are issued via a mini language which is specified via stdin.
3793 3806 The language consists of individual actions to perform. An action is
3794 3807 defined by a block. A block is defined as a line with no leading
3795 3808 space followed by 0 or more lines with leading space. Blocks are
3796 3809 effectively a high-level command with additional metadata.
3797 3810
3798 3811 Lines beginning with ``#`` are ignored.
3799 3812
3800 3813 The following sections denote available actions.
3801 3814
3802 3815 raw
3803 3816 ---
3804 3817
3805 3818 Send raw data to the server.
3806 3819
3807 3820 The block payload contains the raw data to send as one atomic send
3808 3821 operation. The data may not actually be delivered in a single system
3809 3822 call: it depends on the abilities of the transport being used.
3810 3823
3811 3824 Each line in the block is de-indented and concatenated. Then, that
3812 3825 value is evaluated as a Python b'' literal. This allows the use of
3813 3826 backslash escaping, etc.
3814 3827
3815 3828 raw+
3816 3829 ----
3817 3830
3818 3831 Behaves like ``raw`` except flushes output afterwards.
3819 3832
3820 3833 command <X>
3821 3834 -----------
3822 3835
3823 3836 Send a request to run a named command, whose name follows the ``command``
3824 3837 string.
3825 3838
3826 3839 Arguments to the command are defined as lines in this block. The format of
3827 3840 each line is ``<key> <value>``. e.g.::
3828 3841
3829 3842 command listkeys
3830 3843 namespace bookmarks
3831 3844
3832 3845 If the value begins with ``eval:``, it will be interpreted as a Python
3833 3846 literal expression. Otherwise values are interpreted as Python b'' literals.
3834 3847 This allows sending complex types and encoding special byte sequences via
3835 3848 backslash escaping.
3836 3849
3837 3850 The following arguments have special meaning:
3838 3851
3839 3852 ``PUSHFILE``
3840 3853 When defined, the *push* mechanism of the peer will be used instead
3841 3854 of the static request-response mechanism and the content of the
3842 3855 file specified in the value of this argument will be sent as the
3843 3856 command payload.
3844 3857
3845 3858 This can be used to submit a local bundle file to the remote.
3846 3859
3847 3860 batchbegin
3848 3861 ----------
3849 3862
3850 3863 Instruct the peer to begin a batched send.
3851 3864
3852 3865 All ``command`` blocks are queued for execution until the next
3853 3866 ``batchsubmit`` block.
3854 3867
3855 3868 batchsubmit
3856 3869 -----------
3857 3870
3858 3871 Submit previously queued ``command`` blocks as a batch request.
3859 3872
3860 3873 This action MUST be paired with a ``batchbegin`` action.
3861 3874
3862 3875 httprequest <method> <path>
3863 3876 ---------------------------
3864 3877
3865 3878 (HTTP peer only)
3866 3879
3867 3880 Send an HTTP request to the peer.
3868 3881
3869 3882 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3870 3883
3871 3884 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3872 3885 headers to add to the request. e.g. ``Accept: foo``.
3873 3886
3874 3887 The following arguments are special:
3875 3888
3876 3889 ``BODYFILE``
3877 3890 The content of the file defined as the value to this argument will be
3878 3891 transferred verbatim as the HTTP request body.
3879 3892
3880 3893 ``frame <type> <flags> <payload>``
3881 3894 Send a unified protocol frame as part of the request body.
3882 3895
3883 3896 All frames will be collected and sent as the body to the HTTP
3884 3897 request.
3885 3898
3886 3899 close
3887 3900 -----
3888 3901
3889 3902 Close the connection to the server.
3890 3903
3891 3904 flush
3892 3905 -----
3893 3906
3894 3907 Flush data written to the server.
3895 3908
3896 3909 readavailable
3897 3910 -------------
3898 3911
3899 3912 Close the write end of the connection and read all available data from
3900 3913 the server.
3901 3914
3902 3915 If the connection to the server encompasses multiple pipes, we poll both
3903 3916 pipes and read available data.
3904 3917
3905 3918 readline
3906 3919 --------
3907 3920
3908 3921 Read a line of output from the server. If there are multiple output
3909 3922 pipes, reads only the main pipe.
3910 3923
3911 3924 ereadline
3912 3925 ---------
3913 3926
3914 3927 Like ``readline``, but read from the stderr pipe, if available.
3915 3928
3916 3929 read <X>
3917 3930 --------
3918 3931
3919 3932 ``read()`` N bytes from the server's main output pipe.
3920 3933
3921 3934 eread <X>
3922 3935 ---------
3923 3936
3924 3937 ``read()`` N bytes from the server's stderr pipe, if available.
3925 3938
3926 3939 Specifying Unified Frame-Based Protocol Frames
3927 3940 ----------------------------------------------
3928 3941
3929 3942 It is possible to emit a *Unified Frame-Based Protocol* by using special
3930 3943 syntax.
3931 3944
3932 3945 A frame is composed as a type, flags, and payload. These can be parsed
3933 3946 from a string of the form:
3934 3947
3935 3948 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3936 3949
3937 3950 ``request-id`` and ``stream-id`` are integers defining the request and
3938 3951 stream identifiers.
3939 3952
3940 3953 ``type`` can be an integer value for the frame type or the string name
3941 3954 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3942 3955 ``command-name``.
3943 3956
3944 3957 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3945 3958 components. Each component (and there can be just one) can be an integer
3946 3959 or a flag name for stream flags or frame flags, respectively. Values are
3947 3960 resolved to integers and then bitwise OR'd together.
3948 3961
3949 3962 ``payload`` represents the raw frame payload. If it begins with
3950 3963 ``cbor:``, the following string is evaluated as Python code and the
3951 3964 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3952 3965 as a Python byte string literal.
3953 3966 """
3954 3967 opts = pycompat.byteskwargs(opts)
3955 3968
3956 3969 if opts[b'localssh'] and not repo:
3957 3970 raise error.Abort(_(b'--localssh requires a repository'))
3958 3971
3959 3972 if opts[b'peer'] and opts[b'peer'] not in (
3960 3973 b'raw',
3961 3974 b'http2',
3962 3975 b'ssh1',
3963 3976 b'ssh2',
3964 3977 ):
3965 3978 raise error.Abort(
3966 3979 _(b'invalid value for --peer'),
3967 3980 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3968 3981 )
3969 3982
3970 3983 if path and opts[b'localssh']:
3971 3984 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3972 3985
3973 3986 if ui.interactive():
3974 3987 ui.write(_(b'(waiting for commands on stdin)\n'))
3975 3988
3976 3989 blocks = list(_parsewirelangblocks(ui.fin))
3977 3990
3978 3991 proc = None
3979 3992 stdin = None
3980 3993 stdout = None
3981 3994 stderr = None
3982 3995 opener = None
3983 3996
3984 3997 if opts[b'localssh']:
3985 3998 # We start the SSH server in its own process so there is process
3986 3999 # separation. This prevents a whole class of potential bugs around
3987 4000 # shared state from interfering with server operation.
3988 4001 args = procutil.hgcmd() + [
3989 4002 b'-R',
3990 4003 repo.root,
3991 4004 b'debugserve',
3992 4005 b'--sshstdio',
3993 4006 ]
3994 4007 proc = subprocess.Popen(
3995 4008 pycompat.rapply(procutil.tonativestr, args),
3996 4009 stdin=subprocess.PIPE,
3997 4010 stdout=subprocess.PIPE,
3998 4011 stderr=subprocess.PIPE,
3999 4012 bufsize=0,
4000 4013 )
4001 4014
4002 4015 stdin = proc.stdin
4003 4016 stdout = proc.stdout
4004 4017 stderr = proc.stderr
4005 4018
4006 4019 # We turn the pipes into observers so we can log I/O.
4007 4020 if ui.verbose or opts[b'peer'] == b'raw':
4008 4021 stdin = util.makeloggingfileobject(
4009 4022 ui, proc.stdin, b'i', logdata=True
4010 4023 )
4011 4024 stdout = util.makeloggingfileobject(
4012 4025 ui, proc.stdout, b'o', logdata=True
4013 4026 )
4014 4027 stderr = util.makeloggingfileobject(
4015 4028 ui, proc.stderr, b'e', logdata=True
4016 4029 )
4017 4030
4018 4031 # --localssh also implies the peer connection settings.
4019 4032
4020 4033 url = b'ssh://localserver'
4021 4034 autoreadstderr = not opts[b'noreadstderr']
4022 4035
4023 4036 if opts[b'peer'] == b'ssh1':
4024 4037 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4025 4038 peer = sshpeer.sshv1peer(
4026 4039 ui,
4027 4040 url,
4028 4041 proc,
4029 4042 stdin,
4030 4043 stdout,
4031 4044 stderr,
4032 4045 None,
4033 4046 autoreadstderr=autoreadstderr,
4034 4047 )
4035 4048 elif opts[b'peer'] == b'ssh2':
4036 4049 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4037 4050 peer = sshpeer.sshv2peer(
4038 4051 ui,
4039 4052 url,
4040 4053 proc,
4041 4054 stdin,
4042 4055 stdout,
4043 4056 stderr,
4044 4057 None,
4045 4058 autoreadstderr=autoreadstderr,
4046 4059 )
4047 4060 elif opts[b'peer'] == b'raw':
4048 4061 ui.write(_(b'using raw connection to peer\n'))
4049 4062 peer = None
4050 4063 else:
4051 4064 ui.write(_(b'creating ssh peer from handshake results\n'))
4052 4065 peer = sshpeer.makepeer(
4053 4066 ui,
4054 4067 url,
4055 4068 proc,
4056 4069 stdin,
4057 4070 stdout,
4058 4071 stderr,
4059 4072 autoreadstderr=autoreadstderr,
4060 4073 )
4061 4074
4062 4075 elif path:
4063 4076 # We bypass hg.peer() so we can proxy the sockets.
4064 4077 # TODO consider not doing this because we skip
4065 4078 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4066 4079 u = util.url(path)
4067 4080 if u.scheme != b'http':
4068 4081 raise error.Abort(_(b'only http:// paths are currently supported'))
4069 4082
4070 4083 url, authinfo = u.authinfo()
4071 4084 openerargs = {
4072 4085 'useragent': b'Mercurial debugwireproto',
4073 4086 }
4074 4087
4075 4088 # Turn pipes/sockets into observers so we can log I/O.
4076 4089 if ui.verbose:
4077 4090 openerargs.update(
4078 4091 {
4079 4092 'loggingfh': ui,
4080 4093 'loggingname': b's',
4081 4094 'loggingopts': {'logdata': True, 'logdataapis': False,},
4082 4095 }
4083 4096 )
4084 4097
4085 4098 if ui.debugflag:
4086 4099 openerargs['loggingopts']['logdataapis'] = True
4087 4100
4088 4101 # Don't send default headers when in raw mode. This allows us to
4089 4102 # bypass most of the behavior of our URL handling code so we can
4090 4103 # have near complete control over what's sent on the wire.
4091 4104 if opts[b'peer'] == b'raw':
4092 4105 openerargs['sendaccept'] = False
4093 4106
4094 4107 opener = urlmod.opener(ui, authinfo, **openerargs)
4095 4108
4096 4109 if opts[b'peer'] == b'http2':
4097 4110 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4098 4111 # We go through makepeer() because we need an API descriptor for
4099 4112 # the peer instance to be useful.
4100 4113 with ui.configoverride(
4101 4114 {(b'experimental', b'httppeer.advertise-v2'): True}
4102 4115 ):
4103 4116 if opts[b'nologhandshake']:
4104 4117 ui.pushbuffer()
4105 4118
4106 4119 peer = httppeer.makepeer(ui, path, opener=opener)
4107 4120
4108 4121 if opts[b'nologhandshake']:
4109 4122 ui.popbuffer()
4110 4123
4111 4124 if not isinstance(peer, httppeer.httpv2peer):
4112 4125 raise error.Abort(
4113 4126 _(
4114 4127 b'could not instantiate HTTP peer for '
4115 4128 b'wire protocol version 2'
4116 4129 ),
4117 4130 hint=_(
4118 4131 b'the server may not have the feature '
4119 4132 b'enabled or is not allowing this '
4120 4133 b'client version'
4121 4134 ),
4122 4135 )
4123 4136
4124 4137 elif opts[b'peer'] == b'raw':
4125 4138 ui.write(_(b'using raw connection to peer\n'))
4126 4139 peer = None
4127 4140 elif opts[b'peer']:
4128 4141 raise error.Abort(
4129 4142 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4130 4143 )
4131 4144 else:
4132 4145 peer = httppeer.makepeer(ui, path, opener=opener)
4133 4146
4134 4147 # We /could/ populate stdin/stdout with sock.makefile()...
4135 4148 else:
4136 4149 raise error.Abort(_(b'unsupported connection configuration'))
4137 4150
4138 4151 batchedcommands = None
4139 4152
4140 4153 # Now perform actions based on the parsed wire language instructions.
4141 4154 for action, lines in blocks:
4142 4155 if action in (b'raw', b'raw+'):
4143 4156 if not stdin:
4144 4157 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4145 4158
4146 4159 # Concatenate the data together.
4147 4160 data = b''.join(l.lstrip() for l in lines)
4148 4161 data = stringutil.unescapestr(data)
4149 4162 stdin.write(data)
4150 4163
4151 4164 if action == b'raw+':
4152 4165 stdin.flush()
4153 4166 elif action == b'flush':
4154 4167 if not stdin:
4155 4168 raise error.Abort(_(b'cannot call flush on this peer'))
4156 4169 stdin.flush()
4157 4170 elif action.startswith(b'command'):
4158 4171 if not peer:
4159 4172 raise error.Abort(
4160 4173 _(
4161 4174 b'cannot send commands unless peer instance '
4162 4175 b'is available'
4163 4176 )
4164 4177 )
4165 4178
4166 4179 command = action.split(b' ', 1)[1]
4167 4180
4168 4181 args = {}
4169 4182 for line in lines:
4170 4183 # We need to allow empty values.
4171 4184 fields = line.lstrip().split(b' ', 1)
4172 4185 if len(fields) == 1:
4173 4186 key = fields[0]
4174 4187 value = b''
4175 4188 else:
4176 4189 key, value = fields
4177 4190
4178 4191 if value.startswith(b'eval:'):
4179 4192 value = stringutil.evalpythonliteral(value[5:])
4180 4193 else:
4181 4194 value = stringutil.unescapestr(value)
4182 4195
4183 4196 args[key] = value
4184 4197
4185 4198 if batchedcommands is not None:
4186 4199 batchedcommands.append((command, args))
4187 4200 continue
4188 4201
4189 4202 ui.status(_(b'sending %s command\n') % command)
4190 4203
4191 4204 if b'PUSHFILE' in args:
4192 4205 with open(args[b'PUSHFILE'], 'rb') as fh:
4193 4206 del args[b'PUSHFILE']
4194 4207 res, output = peer._callpush(
4195 4208 command, fh, **pycompat.strkwargs(args)
4196 4209 )
4197 4210 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4198 4211 ui.status(
4199 4212 _(b'remote output: %s\n') % stringutil.escapestr(output)
4200 4213 )
4201 4214 else:
4202 4215 with peer.commandexecutor() as e:
4203 4216 res = e.callcommand(command, args).result()
4204 4217
4205 4218 if isinstance(res, wireprotov2peer.commandresponse):
4206 4219 val = res.objects()
4207 4220 ui.status(
4208 4221 _(b'response: %s\n')
4209 4222 % stringutil.pprint(val, bprefix=True, indent=2)
4210 4223 )
4211 4224 else:
4212 4225 ui.status(
4213 4226 _(b'response: %s\n')
4214 4227 % stringutil.pprint(res, bprefix=True, indent=2)
4215 4228 )
4216 4229
4217 4230 elif action == b'batchbegin':
4218 4231 if batchedcommands is not None:
4219 4232 raise error.Abort(_(b'nested batchbegin not allowed'))
4220 4233
4221 4234 batchedcommands = []
4222 4235 elif action == b'batchsubmit':
4223 4236 # There is a batching API we could go through. But it would be
4224 4237 # difficult to normalize requests into function calls. It is easier
4225 4238 # to bypass this layer and normalize to commands + args.
4226 4239 ui.status(
4227 4240 _(b'sending batch with %d sub-commands\n')
4228 4241 % len(batchedcommands)
4229 4242 )
4230 4243 assert peer is not None
4231 4244 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4232 4245 ui.status(
4233 4246 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4234 4247 )
4235 4248
4236 4249 batchedcommands = None
4237 4250
4238 4251 elif action.startswith(b'httprequest '):
4239 4252 if not opener:
4240 4253 raise error.Abort(
4241 4254 _(b'cannot use httprequest without an HTTP peer')
4242 4255 )
4243 4256
4244 4257 request = action.split(b' ', 2)
4245 4258 if len(request) != 3:
4246 4259 raise error.Abort(
4247 4260 _(
4248 4261 b'invalid httprequest: expected format is '
4249 4262 b'"httprequest <method> <path>'
4250 4263 )
4251 4264 )
4252 4265
4253 4266 method, httppath = request[1:]
4254 4267 headers = {}
4255 4268 body = None
4256 4269 frames = []
4257 4270 for line in lines:
4258 4271 line = line.lstrip()
4259 4272 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4260 4273 if m:
4261 4274 # Headers need to use native strings.
4262 4275 key = pycompat.strurl(m.group(1))
4263 4276 value = pycompat.strurl(m.group(2))
4264 4277 headers[key] = value
4265 4278 continue
4266 4279
4267 4280 if line.startswith(b'BODYFILE '):
4268 4281 with open(line.split(b' ', 1), b'rb') as fh:
4269 4282 body = fh.read()
4270 4283 elif line.startswith(b'frame '):
4271 4284 frame = wireprotoframing.makeframefromhumanstring(
4272 4285 line[len(b'frame ') :]
4273 4286 )
4274 4287
4275 4288 frames.append(frame)
4276 4289 else:
4277 4290 raise error.Abort(
4278 4291 _(b'unknown argument to httprequest: %s') % line
4279 4292 )
4280 4293
4281 4294 url = path + httppath
4282 4295
4283 4296 if frames:
4284 4297 body = b''.join(bytes(f) for f in frames)
4285 4298
4286 4299 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4287 4300
4288 4301 # urllib.Request insists on using has_data() as a proxy for
4289 4302 # determining the request method. Override that to use our
4290 4303 # explicitly requested method.
4291 4304 req.get_method = lambda: pycompat.sysstr(method)
4292 4305
4293 4306 try:
4294 4307 res = opener.open(req)
4295 4308 body = res.read()
4296 4309 except util.urlerr.urlerror as e:
4297 4310 # read() method must be called, but only exists in Python 2
4298 4311 getattr(e, 'read', lambda: None)()
4299 4312 continue
4300 4313
4301 4314 ct = res.headers.get('Content-Type')
4302 4315 if ct == 'application/mercurial-cbor':
4303 4316 ui.write(
4304 4317 _(b'cbor> %s\n')
4305 4318 % stringutil.pprint(
4306 4319 cborutil.decodeall(body), bprefix=True, indent=2
4307 4320 )
4308 4321 )
4309 4322
4310 4323 elif action == b'close':
4311 4324 assert peer is not None
4312 4325 peer.close()
4313 4326 elif action == b'readavailable':
4314 4327 if not stdout or not stderr:
4315 4328 raise error.Abort(
4316 4329 _(b'readavailable not available on this peer')
4317 4330 )
4318 4331
4319 4332 stdin.close()
4320 4333 stdout.read()
4321 4334 stderr.read()
4322 4335
4323 4336 elif action == b'readline':
4324 4337 if not stdout:
4325 4338 raise error.Abort(_(b'readline not available on this peer'))
4326 4339 stdout.readline()
4327 4340 elif action == b'ereadline':
4328 4341 if not stderr:
4329 4342 raise error.Abort(_(b'ereadline not available on this peer'))
4330 4343 stderr.readline()
4331 4344 elif action.startswith(b'read '):
4332 4345 count = int(action.split(b' ', 1)[1])
4333 4346 if not stdout:
4334 4347 raise error.Abort(_(b'read not available on this peer'))
4335 4348 stdout.read(count)
4336 4349 elif action.startswith(b'eread '):
4337 4350 count = int(action.split(b' ', 1)[1])
4338 4351 if not stderr:
4339 4352 raise error.Abort(_(b'eread not available on this peer'))
4340 4353 stderr.read(count)
4341 4354 else:
4342 4355 raise error.Abort(_(b'unknown action: %s') % action)
4343 4356
4344 4357 if batchedcommands is not None:
4345 4358 raise error.Abort(_(b'unclosed "batchbegin" request'))
4346 4359
4347 4360 if peer:
4348 4361 peer.close()
4349 4362
4350 4363 if proc:
4351 4364 proc.kill()
@@ -1,429 +1,429 b''
1 1 Show all commands except debug commands
2 2 $ hg debugcomplete
3 3 abort
4 4 add
5 5 addremove
6 6 annotate
7 7 archive
8 8 backout
9 9 bisect
10 10 bookmarks
11 11 branch
12 12 branches
13 13 bundle
14 14 cat
15 15 clone
16 16 commit
17 17 config
18 18 continue
19 19 copy
20 20 diff
21 21 export
22 22 files
23 23 forget
24 24 graft
25 25 grep
26 26 heads
27 27 help
28 28 identify
29 29 import
30 30 incoming
31 31 init
32 32 locate
33 33 log
34 34 manifest
35 35 merge
36 36 outgoing
37 37 parents
38 38 paths
39 39 phase
40 40 pull
41 41 push
42 42 recover
43 43 remove
44 44 rename
45 45 resolve
46 46 revert
47 47 rollback
48 48 root
49 49 serve
50 50 shelve
51 51 status
52 52 summary
53 53 tag
54 54 tags
55 55 tip
56 56 unbundle
57 57 unshelve
58 58 update
59 59 verify
60 60 version
61 61
62 62 Show all commands that start with "a"
63 63 $ hg debugcomplete a
64 64 abort
65 65 add
66 66 addremove
67 67 annotate
68 68 archive
69 69
70 70 Do not show debug commands if there are other candidates
71 71 $ hg debugcomplete d
72 72 diff
73 73
74 74 Show debug commands if there are no other candidates
75 75 $ hg debugcomplete debug
76 76 debugancestor
77 77 debugapplystreamclonebundle
78 78 debugbuilddag
79 79 debugbundle
80 80 debugcapabilities
81 81 debugcheckstate
82 82 debugcolor
83 83 debugcommands
84 84 debugcomplete
85 85 debugconfig
86 86 debugcreatestreamclonebundle
87 87 debugdag
88 88 debugdata
89 89 debugdate
90 90 debugdeltachain
91 91 debugdirstate
92 92 debugdiscovery
93 93 debugdownload
94 94 debugextensions
95 95 debugfileset
96 96 debugformat
97 97 debugfsinfo
98 98 debuggetbundle
99 99 debugignore
100 100 debugindex
101 101 debugindexdot
102 102 debugindexstats
103 103 debuginstall
104 104 debugknown
105 105 debuglabelcomplete
106 106 debuglocks
107 107 debugmanifestfulltextcache
108 108 debugmergestate
109 109 debugnamecomplete
110 110 debugnodemap
111 111 debugobsolete
112 112 debugp1copies
113 113 debugp2copies
114 114 debugpathcomplete
115 115 debugpathcopies
116 116 debugpeer
117 117 debugpickmergetool
118 118 debugpushkey
119 119 debugpvec
120 120 debugrebuilddirstate
121 121 debugrebuildfncache
122 122 debugrename
123 123 debugrevlog
124 124 debugrevlogindex
125 125 debugrevspec
126 126 debugserve
127 127 debugsetparents
128 128 debugsidedata
129 129 debugssl
130 130 debugsub
131 131 debugsuccessorssets
132 132 debugtagscache
133 133 debugtemplate
134 134 debuguigetpass
135 135 debuguiprompt
136 136 debugupdatecaches
137 137 debugupgraderepo
138 138 debugwalk
139 139 debugwhyunstable
140 140 debugwireargs
141 141 debugwireproto
142 142
143 143 Do not show the alias of a debug command if there are other candidates
144 144 (this should hide rawcommit)
145 145 $ hg debugcomplete r
146 146 recover
147 147 remove
148 148 rename
149 149 resolve
150 150 revert
151 151 rollback
152 152 root
153 153 Show the alias of a debug command if there are no other candidates
154 154 $ hg debugcomplete rawc
155 155
156 156
157 157 Show the global options
158 158 $ hg debugcomplete --options | sort
159 159 --color
160 160 --config
161 161 --cwd
162 162 --debug
163 163 --debugger
164 164 --encoding
165 165 --encodingmode
166 166 --help
167 167 --hidden
168 168 --noninteractive
169 169 --pager
170 170 --profile
171 171 --quiet
172 172 --repository
173 173 --time
174 174 --traceback
175 175 --verbose
176 176 --version
177 177 -R
178 178 -h
179 179 -q
180 180 -v
181 181 -y
182 182
183 183 Show the options for the "serve" command
184 184 $ hg debugcomplete --options serve | sort
185 185 --accesslog
186 186 --address
187 187 --certificate
188 188 --cmdserver
189 189 --color
190 190 --config
191 191 --cwd
192 192 --daemon
193 193 --daemon-postexec
194 194 --debug
195 195 --debugger
196 196 --encoding
197 197 --encodingmode
198 198 --errorlog
199 199 --help
200 200 --hidden
201 201 --ipv6
202 202 --name
203 203 --noninteractive
204 204 --pager
205 205 --pid-file
206 206 --port
207 207 --prefix
208 208 --print-url
209 209 --profile
210 210 --quiet
211 211 --repository
212 212 --stdio
213 213 --style
214 214 --subrepos
215 215 --templates
216 216 --time
217 217 --traceback
218 218 --verbose
219 219 --version
220 220 --web-conf
221 221 -6
222 222 -A
223 223 -E
224 224 -R
225 225 -S
226 226 -a
227 227 -d
228 228 -h
229 229 -n
230 230 -p
231 231 -q
232 232 -t
233 233 -v
234 234 -y
235 235
236 236 Show an error if we use --options with an ambiguous abbreviation
237 237 $ hg debugcomplete --options s
238 238 hg: command 's' is ambiguous:
239 239 serve shelve showconfig status summary
240 240 [255]
241 241
242 242 Show all commands + options
243 243 $ hg debugcommands
244 244 abort: dry-run
245 245 add: include, exclude, subrepos, dry-run
246 246 addremove: similarity, subrepos, include, exclude, dry-run
247 247 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, skip, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, include, exclude, template
248 248 archive: no-decode, prefix, rev, type, subrepos, include, exclude
249 249 backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user
250 250 bisect: reset, good, bad, skip, extend, command, noupdate
251 251 bookmarks: force, rev, delete, rename, inactive, list, template
252 252 branch: force, clean, rev
253 253 branches: active, closed, rev, template
254 254 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
255 255 cat: output, rev, decode, include, exclude, template
256 256 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
257 257 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
258 258 config: untrusted, edit, local, global, template
259 259 continue: dry-run
260 260 copy: after, force, include, exclude, dry-run
261 261 debugancestor:
262 262 debugapplystreamclonebundle:
263 263 debugbuilddag: mergeable-file, overwritten-file, new-file
264 264 debugbundle: all, part-type, spec
265 265 debugcapabilities:
266 266 debugcheckstate:
267 267 debugcolor: style
268 268 debugcommands:
269 269 debugcomplete: options
270 270 debugcreatestreamclonebundle:
271 271 debugdag: tags, branches, dots, spaces
272 272 debugdata: changelog, manifest, dir
273 273 debugdate: extended
274 274 debugdeltachain: changelog, manifest, dir, template
275 275 debugdirstate: nodates, dates, datesort
276 276 debugdiscovery: old, nonheads, rev, seed, ssh, remotecmd, insecure
277 277 debugdownload: output
278 278 debugextensions: template
279 279 debugfileset: rev, all-files, show-matcher, show-stage
280 280 debugformat: template
281 281 debugfsinfo:
282 282 debuggetbundle: head, common, type
283 283 debugignore:
284 284 debugindex: changelog, manifest, dir, template
285 285 debugindexdot: changelog, manifest, dir
286 286 debugindexstats:
287 287 debuginstall: template
288 288 debugknown:
289 289 debuglabelcomplete:
290 290 debuglocks: force-lock, force-wlock, set-lock, set-wlock
291 291 debugmanifestfulltextcache: clear, add
292 292 debugmergestate:
293 293 debugnamecomplete:
294 debugnodemap: dump-new, dump-disk, check
294 debugnodemap: dump-new, dump-disk, check, metadata
295 295 debugobsolete: flags, record-parents, rev, exclusive, index, delete, date, user, template
296 296 debugp1copies: rev
297 297 debugp2copies: rev
298 298 debugpathcomplete: full, normal, added, removed
299 299 debugpathcopies: include, exclude
300 300 debugpeer:
301 301 debugpickmergetool: rev, changedelete, include, exclude, tool
302 302 debugpushkey:
303 303 debugpvec:
304 304 debugrebuilddirstate: rev, minimal
305 305 debugrebuildfncache:
306 306 debugrename: rev
307 307 debugrevlog: changelog, manifest, dir, dump
308 308 debugrevlogindex: changelog, manifest, dir, format
309 309 debugrevspec: optimize, show-revs, show-set, show-stage, no-optimized, verify-optimized
310 310 debugserve: sshstdio, logiofd, logiofile
311 311 debugsetparents:
312 312 debugsidedata: changelog, manifest, dir
313 313 debugssl:
314 314 debugsub: rev
315 315 debugsuccessorssets: closest
316 316 debugtagscache:
317 317 debugtemplate: rev, define
318 318 debuguigetpass: prompt
319 319 debuguiprompt: prompt
320 320 debugupdatecaches:
321 321 debugupgraderepo: optimize, run, backup, changelog, manifest
322 322 debugwalk: include, exclude
323 323 debugwhyunstable:
324 324 debugwireargs: three, four, five, ssh, remotecmd, insecure
325 325 debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
326 326 diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
327 327 export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
328 328 files: rev, print0, include, exclude, template, subrepos
329 329 forget: interactive, include, exclude, dry-run
330 330 graft: rev, base, continue, stop, abort, edit, log, no-commit, force, currentdate, currentuser, date, user, tool, dry-run
331 331 grep: print0, all, diff, text, follow, ignore-case, files-with-matches, line-number, rev, all-files, user, date, template, include, exclude
332 332 heads: rev, topo, active, closed, style, template
333 333 help: extension, command, keyword, system
334 334 identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure, template
335 335 import: strip, base, secret, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity
336 336 incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
337 337 init: ssh, remotecmd, insecure
338 338 locate: rev, print0, fullpath, include, exclude
339 339 log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
340 340 manifest: rev, all, template
341 341 merge: force, rev, preview, abort, tool
342 342 outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
343 343 parents: rev, style, template
344 344 paths: template
345 345 phase: public, draft, secret, force, rev
346 346 pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure
347 347 push: force, rev, bookmark, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
348 348 recover: verify
349 349 remove: after, force, subrepos, include, exclude, dry-run
350 350 rename: after, force, include, exclude, dry-run
351 351 resolve: all, list, mark, unmark, no-status, re-merge, tool, include, exclude, template
352 352 revert: all, date, rev, no-backup, interactive, include, exclude, dry-run
353 353 rollback: dry-run, force
354 354 root: template
355 355 serve: accesslog, daemon, daemon-postexec, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate, print-url, subrepos
356 356 shelve: addremove, unknown, cleanup, date, delete, edit, keep, list, message, name, patch, interactive, stat, include, exclude
357 357 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, terse, copies, print0, rev, change, include, exclude, subrepos, template
358 358 summary: remote
359 359 tag: force, local, rev, remove, edit, message, date, user
360 360 tags: template
361 361 tip: patch, git, style, template
362 362 unbundle: update
363 363 unshelve: abort, continue, interactive, keep, name, tool, date
364 364 update: clean, check, merge, date, rev, tool
365 365 verify: full
366 366 version: template
367 367
368 368 $ hg init a
369 369 $ cd a
370 370 $ echo fee > fee
371 371 $ hg ci -q -Amfee
372 372 $ hg tag fee
373 373 $ mkdir fie
374 374 $ echo dead > fie/dead
375 375 $ echo live > fie/live
376 376 $ hg bookmark fo
377 377 $ hg branch -q fie
378 378 $ hg ci -q -Amfie
379 379 $ echo fo > fo
380 380 $ hg branch -qf default
381 381 $ hg ci -q -Amfo
382 382 $ echo Fum > Fum
383 383 $ hg ci -q -AmFum
384 384 $ hg bookmark Fum
385 385
386 386 Test debugpathcomplete
387 387
388 388 $ hg debugpathcomplete f
389 389 fee
390 390 fie
391 391 fo
392 392 $ hg debugpathcomplete -f f
393 393 fee
394 394 fie/dead
395 395 fie/live
396 396 fo
397 397
398 398 $ hg rm Fum
399 399 $ hg debugpathcomplete -r F
400 400 Fum
401 401
402 402 Test debugnamecomplete
403 403
404 404 $ hg debugnamecomplete
405 405 Fum
406 406 default
407 407 fee
408 408 fie
409 409 fo
410 410 tip
411 411 $ hg debugnamecomplete f
412 412 fee
413 413 fie
414 414 fo
415 415
416 416 Test debuglabelcomplete, a deprecated name for debugnamecomplete that is still
417 417 used for completions in some shells.
418 418
419 419 $ hg debuglabelcomplete
420 420 Fum
421 421 default
422 422 fee
423 423 fie
424 424 fo
425 425 tip
426 426 $ hg debuglabelcomplete f
427 427 fee
428 428 fie
429 429 fo
@@ -1,67 +1,71 b''
1 1 ===================================
2 2 Test the persistent on-disk nodemap
3 3 ===================================
4 4
5 5
6 6 $ hg init test-repo
7 7 $ cd test-repo
8 8 $ cat << EOF >> .hg/hgrc
9 9 > [experimental]
10 10 > exp-persistent-nodemap=yes
11 11 > [devel]
12 12 > persistent-nodemap=yes
13 13 > EOF
14 14 $ hg debugbuilddag .+5000
15 $ hg debugnodemap --metadata
16 uid: ???????????????? (glob)
15 17 $ f --size .hg/store/00changelog.n
16 18 .hg/store/00changelog.n: size=18
17 19 $ f --sha256 .hg/store/00changelog-*.nd
18 20 .hg/store/00changelog-????????????????.nd: sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7 (glob)
19 21 $ hg debugnodemap --dump-new | f --sha256 --size
20 22 size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
21 23 $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
22 24 size=122880, sha256=b961925120e1c9bc345c199b2cc442abc477029fdece37ef9d99cbe59c0558b7
23 25 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
24 26 0010: ff ff ff ff ff ff ff ff ff ff fa c2 ff ff ff ff |................|
25 27 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
26 28 0030: ff ff ff ff ff ff ed b3 ff ff ff ff ff ff ff ff |................|
27 29 0040: ff ff ff ff ff ff ee 34 00 00 00 00 ff ff ff ff |.......4........|
28 30 0050: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
29 31 0060: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
30 32 0070: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
31 33 0080: ff ff ff ff ff ff f8 50 ff ff ff ff ff ff ff ff |.......P........|
32 34 0090: ff ff ff ff ff ff ff ff ff ff ec c7 ff ff ff ff |................|
33 35 00a0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
34 36 00b0: ff ff ff ff ff ff fa be ff ff f2 fc ff ff ff ff |................|
35 37 00c0: ff ff ff ff ff ff ef ea ff ff ff ff ff ff f9 17 |................|
36 38 00d0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
37 39 00e0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
38 40 00f0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
39 41 $ hg debugnodemap --check
40 42 revision in index: 5001
41 43 revision in nodemap: 5001
42 44
43 45 add a new commit
44 46
45 47 $ hg up
46 48 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
47 49 $ echo foo > foo
48 50 $ hg add foo
49 51 $ hg ci -m 'foo'
52 $ hg debugnodemap --metadata
53 uid: ???????????????? (glob)
50 54 $ f --size .hg/store/00changelog.n
51 55 .hg/store/00changelog.n: size=18
52 56
53 57 (The pure code use the debug code that perform incremental update, the C code reencode from scratch)
54 58
55 59 #if pure
56 60 $ f --sha256 .hg/store/00changelog-*.nd --size
57 61 .hg/store/00changelog-????????????????.nd: size=123072, sha256=136472751566c8198ff09e306a7d2f9bd18bd32298d614752b73da4d6df23340 (glob)
58 62
59 63 #else
60 64 $ f --sha256 .hg/store/00changelog-*.nd --size
61 65 .hg/store/00changelog-????????????????.nd: size=122880, sha256=bfafebd751c4f6d116a76a37a1dee2a251747affe7efbcc4f4842ccc746d4db9 (glob)
62 66
63 67 #endif
64 68
65 69 $ hg debugnodemap --check
66 70 revision in index: 5002
67 71 revision in nodemap: 5002
General Comments 0
You need to be logged in to leave comments. Login now