##// END OF EJS Templates
debugdeltachain: use the symbolic constant to access entry information...
marmoute -
r50114:0a86cb15 default
parent child Browse files
Show More
@@ -1,4932 +1,4937
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 constants as revlog_constants,
106 107 deltas as deltautil,
107 108 nodemap,
108 109 rewrite,
109 110 sidedata,
110 111 )
111 112
112 113 release = lockmod.release
113 114
114 115 table = {}
115 116 table.update(strip.command._table)
116 117 command = registrar.command(table)
117 118
118 119
119 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 121 def debugancestor(ui, repo, *args):
121 122 """find the ancestor revision of two revisions in a given index"""
122 123 if len(args) == 3:
123 124 index, rev1, rev2 = args
124 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 126 lookup = r.lookup
126 127 elif len(args) == 2:
127 128 if not repo:
128 129 raise error.Abort(
129 130 _(b'there is no Mercurial repository here (.hg not found)')
130 131 )
131 132 rev1, rev2 = args
132 133 r = repo.changelog
133 134 lookup = repo.lookup
134 135 else:
135 136 raise error.Abort(_(b'either two or three arguments required'))
136 137 a = r.ancestor(lookup(rev1), lookup(rev2))
137 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 139
139 140
140 141 @command(b'debugantivirusrunning', [])
141 142 def debugantivirusrunning(ui, repo):
142 143 """attempt to trigger an antivirus scanner to see if one is active"""
143 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 145 f.write(
145 146 util.b85decode(
146 147 # This is a base85-armored version of the EICAR test file. See
147 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 151 )
151 152 )
152 153 # Give an AV engine time to scan the file.
153 154 time.sleep(2)
154 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 156
156 157
157 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 159 def debugapplystreamclonebundle(ui, repo, fname):
159 160 """apply a stream clone bundle file"""
160 161 f = hg.openpath(ui, fname)
161 162 gen = exchange.readbundle(ui, f, fname)
162 163 gen.apply(repo)
163 164
164 165
165 166 @command(
166 167 b'debugbuilddag',
167 168 [
168 169 (
169 170 b'm',
170 171 b'mergeable-file',
171 172 None,
172 173 _(b'add single file mergeable changes'),
173 174 ),
174 175 (
175 176 b'o',
176 177 b'overwritten-file',
177 178 None,
178 179 _(b'add single file all revs overwrite'),
179 180 ),
180 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 182 (
182 183 b'',
183 184 b'from-existing',
184 185 None,
185 186 _(b'continue from a non-empty repository'),
186 187 ),
187 188 ],
188 189 _(b'[OPTION]... [TEXT]'),
189 190 )
190 191 def debugbuilddag(
191 192 ui,
192 193 repo,
193 194 text=None,
194 195 mergeable_file=False,
195 196 overwritten_file=False,
196 197 new_file=False,
197 198 from_existing=False,
198 199 ):
199 200 """builds a repo with a given DAG from scratch in the current empty repo
200 201
201 202 The description of the DAG is read from stdin if not given on the
202 203 command line.
203 204
204 205 Elements:
205 206
206 207 - "+n" is a linear run of n nodes based on the current default parent
207 208 - "." is a single node based on the current default parent
208 209 - "$" resets the default parent to null (implied at the start);
209 210 otherwise the default parent is always the last node created
210 211 - "<p" sets the default parent to the backref p
211 212 - "*p" is a fork at parent p, which is a backref
212 213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 214 - "/p2" is a merge of the preceding node and p2
214 215 - ":tag" defines a local tag for the preceding node
215 216 - "@branch" sets the named branch for subsequent nodes
216 217 - "#...\\n" is a comment up to the end of the line
217 218
218 219 Whitespace between the above elements is ignored.
219 220
220 221 A backref is either
221 222
222 223 - a number n, which references the node curr-n, where curr is the current
223 224 node, or
224 225 - the name of a local tag you placed earlier using ":tag", or
225 226 - empty to denote the default parent.
226 227
227 228 All string valued-elements are either strictly alphanumeric, or must
228 229 be enclosed in double quotes ("..."), with "\\" as escape character.
229 230 """
230 231
231 232 if text is None:
232 233 ui.status(_(b"reading DAG from stdin\n"))
233 234 text = ui.fin.read()
234 235
235 236 cl = repo.changelog
236 237 if len(cl) > 0 and not from_existing:
237 238 raise error.Abort(_(b'repository is not empty'))
238 239
239 240 # determine number of revs in DAG
240 241 total = 0
241 242 for type, data in dagparser.parsedag(text):
242 243 if type == b'n':
243 244 total += 1
244 245
245 246 if mergeable_file:
246 247 linesperrev = 2
247 248 # make a file with k lines per rev
248 249 initialmergedlines = [
249 250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 251 ]
251 252 initialmergedlines.append(b"")
252 253
253 254 tags = []
254 255 progress = ui.makeprogress(
255 256 _(b'building'), unit=_(b'revisions'), total=total
256 257 )
257 258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 259 at = -1
259 260 atbranch = b'default'
260 261 nodeids = []
261 262 id = 0
262 263 progress.update(id)
263 264 for type, data in dagparser.parsedag(text):
264 265 if type == b'n':
265 266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 267 id, ps = data
267 268
268 269 files = []
269 270 filecontent = {}
270 271
271 272 p2 = None
272 273 if mergeable_file:
273 274 fn = b"mf"
274 275 p1 = repo[ps[0]]
275 276 if len(ps) > 1:
276 277 p2 = repo[ps[1]]
277 278 pa = p1.ancestor(p2)
278 279 base, local, other = [
279 280 x[fn].data() for x in (pa, p1, p2)
280 281 ]
281 282 m3 = simplemerge.Merge3Text(base, local, other)
282 283 ml = [
283 284 l.strip()
284 285 for l in simplemerge.render_minimized(m3)[0]
285 286 ]
286 287 ml.append(b"")
287 288 elif at > 0:
288 289 ml = p1[fn].data().split(b"\n")
289 290 else:
290 291 ml = initialmergedlines
291 292 ml[id * linesperrev] += b" r%i" % id
292 293 mergedtext = b"\n".join(ml)
293 294 files.append(fn)
294 295 filecontent[fn] = mergedtext
295 296
296 297 if overwritten_file:
297 298 fn = b"of"
298 299 files.append(fn)
299 300 filecontent[fn] = b"r%i\n" % id
300 301
301 302 if new_file:
302 303 fn = b"nf%i" % id
303 304 files.append(fn)
304 305 filecontent[fn] = b"r%i\n" % id
305 306 if len(ps) > 1:
306 307 if not p2:
307 308 p2 = repo[ps[1]]
308 309 for fn in p2:
309 310 if fn.startswith(b"nf"):
310 311 files.append(fn)
311 312 filecontent[fn] = p2[fn].data()
312 313
313 314 def fctxfn(repo, cx, path):
314 315 if path in filecontent:
315 316 return context.memfilectx(
316 317 repo, cx, path, filecontent[path]
317 318 )
318 319 return None
319 320
320 321 if len(ps) == 0 or ps[0] < 0:
321 322 pars = [None, None]
322 323 elif len(ps) == 1:
323 324 pars = [nodeids[ps[0]], None]
324 325 else:
325 326 pars = [nodeids[p] for p in ps]
326 327 cx = context.memctx(
327 328 repo,
328 329 pars,
329 330 b"r%i" % id,
330 331 files,
331 332 fctxfn,
332 333 date=(id, 0),
333 334 user=b"debugbuilddag",
334 335 extra={b'branch': atbranch},
335 336 )
336 337 nodeid = repo.commitctx(cx)
337 338 nodeids.append(nodeid)
338 339 at = id
339 340 elif type == b'l':
340 341 id, name = data
341 342 ui.note((b'tag %s\n' % name))
342 343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 344 elif type == b'a':
344 345 ui.note((b'branch %s\n' % data))
345 346 atbranch = data
346 347 progress.update(id)
347 348
348 349 if tags:
349 350 repo.vfs.write(b"localtags", b"".join(tags))
350 351
351 352
352 353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 354 indent_string = b' ' * indent
354 355 if all:
355 356 ui.writenoi18n(
356 357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 358 % indent_string
358 359 )
359 360
360 361 def showchunks(named):
361 362 ui.write(b"\n%s%s\n" % (indent_string, named))
362 363 for deltadata in gen.deltaiter():
363 364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 365 ui.write(
365 366 b"%s%s %s %s %s %s %d\n"
366 367 % (
367 368 indent_string,
368 369 hex(node),
369 370 hex(p1),
370 371 hex(p2),
371 372 hex(cs),
372 373 hex(deltabase),
373 374 len(delta),
374 375 )
375 376 )
376 377
377 378 gen.changelogheader()
378 379 showchunks(b"changelog")
379 380 gen.manifestheader()
380 381 showchunks(b"manifest")
381 382 for chunkdata in iter(gen.filelogheader, {}):
382 383 fname = chunkdata[b'filename']
383 384 showchunks(fname)
384 385 else:
385 386 if isinstance(gen, bundle2.unbundle20):
386 387 raise error.Abort(_(b'use debugbundle2 for this file'))
387 388 gen.changelogheader()
388 389 for deltadata in gen.deltaiter():
389 390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 392
392 393
393 394 def _debugobsmarkers(ui, part, indent=0, **opts):
394 395 """display version and markers contained in 'data'"""
395 396 opts = pycompat.byteskwargs(opts)
396 397 data = part.read()
397 398 indent_string = b' ' * indent
398 399 try:
399 400 version, markers = obsolete._readmarkers(data)
400 401 except error.UnknownVersion as exc:
401 402 msg = b"%sunsupported version: %s (%d bytes)\n"
402 403 msg %= indent_string, exc.version, len(data)
403 404 ui.write(msg)
404 405 else:
405 406 msg = b"%sversion: %d (%d bytes)\n"
406 407 msg %= indent_string, version, len(data)
407 408 ui.write(msg)
408 409 fm = ui.formatter(b'debugobsolete', opts)
409 410 for rawmarker in sorted(markers):
410 411 m = obsutil.marker(None, rawmarker)
411 412 fm.startitem()
412 413 fm.plain(indent_string)
413 414 cmdutil.showmarker(fm, m)
414 415 fm.end()
415 416
416 417
417 418 def _debugphaseheads(ui, data, indent=0):
418 419 """display version and markers contained in 'data'"""
419 420 indent_string = b' ' * indent
420 421 headsbyphase = phases.binarydecode(data)
421 422 for phase in phases.allphases:
422 423 for head in headsbyphase[phase]:
423 424 ui.write(indent_string)
424 425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 426
426 427
427 428 def _quasirepr(thing):
428 429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 430 return b'{%s}' % (
430 431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 432 )
432 433 return pycompat.bytestr(repr(thing))
433 434
434 435
435 436 def _debugbundle2(ui, gen, all=None, **opts):
436 437 """lists the contents of a bundle2"""
437 438 if not isinstance(gen, bundle2.unbundle20):
438 439 raise error.Abort(_(b'not a bundle2 file'))
439 440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 441 parttypes = opts.get('part_type', [])
441 442 for part in gen.iterparts():
442 443 if parttypes and part.type not in parttypes:
443 444 continue
444 445 msg = b'%s -- %s (mandatory: %r)\n'
445 446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 447 if part.type == b'changegroup':
447 448 version = part.params.get(b'version', b'01')
448 449 cg = changegroup.getunbundler(version, part, b'UN')
449 450 if not ui.quiet:
450 451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 452 if part.type == b'obsmarkers':
452 453 if not ui.quiet:
453 454 _debugobsmarkers(ui, part, indent=4, **opts)
454 455 if part.type == b'phase-heads':
455 456 if not ui.quiet:
456 457 _debugphaseheads(ui, part, indent=4)
457 458
458 459
459 460 @command(
460 461 b'debugbundle',
461 462 [
462 463 (b'a', b'all', None, _(b'show all details')),
463 464 (b'', b'part-type', [], _(b'show only the named part type')),
464 465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 466 ],
466 467 _(b'FILE'),
467 468 norepo=True,
468 469 )
469 470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 471 """lists the contents of a bundle"""
471 472 with hg.openpath(ui, bundlepath) as f:
472 473 if spec:
473 474 spec = exchange.getbundlespec(ui, f)
474 475 ui.write(b'%s\n' % spec)
475 476 return
476 477
477 478 gen = exchange.readbundle(ui, f, bundlepath)
478 479 if isinstance(gen, bundle2.unbundle20):
479 480 return _debugbundle2(ui, gen, all=all, **opts)
480 481 _debugchangegroup(ui, gen, all=all, **opts)
481 482
482 483
483 484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 485 def debugcapabilities(ui, path, **opts):
485 486 """lists the capabilities of a remote peer"""
486 487 opts = pycompat.byteskwargs(opts)
487 488 peer = hg.peer(ui, opts, path)
488 489 try:
489 490 caps = peer.capabilities()
490 491 ui.writenoi18n(b'Main capabilities:\n')
491 492 for c in sorted(caps):
492 493 ui.write(b' %s\n' % c)
493 494 b2caps = bundle2.bundle2caps(peer)
494 495 if b2caps:
495 496 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 497 for key, values in sorted(b2caps.items()):
497 498 ui.write(b' %s\n' % key)
498 499 for v in values:
499 500 ui.write(b' %s\n' % v)
500 501 finally:
501 502 peer.close()
502 503
503 504
504 505 @command(
505 506 b'debugchangedfiles',
506 507 [
507 508 (
508 509 b'',
509 510 b'compute',
510 511 False,
511 512 b"compute information instead of reading it from storage",
512 513 ),
513 514 ],
514 515 b'REV',
515 516 )
516 517 def debugchangedfiles(ui, repo, rev, **opts):
517 518 """list the stored files changes for a revision"""
518 519 ctx = logcmdutil.revsingle(repo, rev, None)
519 520 files = None
520 521
521 522 if opts['compute']:
522 523 files = metadata.compute_all_files_changes(ctx)
523 524 else:
524 525 sd = repo.changelog.sidedata(ctx.rev())
525 526 files_block = sd.get(sidedata.SD_FILES)
526 527 if files_block is not None:
527 528 files = metadata.decode_files_sidedata(sd)
528 529 if files is not None:
529 530 for f in sorted(files.touched):
530 531 if f in files.added:
531 532 action = b"added"
532 533 elif f in files.removed:
533 534 action = b"removed"
534 535 elif f in files.merged:
535 536 action = b"merged"
536 537 elif f in files.salvaged:
537 538 action = b"salvaged"
538 539 else:
539 540 action = b"touched"
540 541
541 542 copy_parent = b""
542 543 copy_source = b""
543 544 if f in files.copied_from_p1:
544 545 copy_parent = b"p1"
545 546 copy_source = files.copied_from_p1[f]
546 547 elif f in files.copied_from_p2:
547 548 copy_parent = b"p2"
548 549 copy_source = files.copied_from_p2[f]
549 550
550 551 data = (action, copy_parent, f, copy_source)
551 552 template = b"%-8s %2s: %s, %s;\n"
552 553 ui.write(template % data)
553 554
554 555
555 556 @command(b'debugcheckstate', [], b'')
556 557 def debugcheckstate(ui, repo):
557 558 """validate the correctness of the current dirstate"""
558 559 parent1, parent2 = repo.dirstate.parents()
559 560 m1 = repo[parent1].manifest()
560 561 m2 = repo[parent2].manifest()
561 562 errors = 0
562 563 for err in repo.dirstate.verify(m1, m2):
563 564 ui.warn(err[0] % err[1:])
564 565 errors += 1
565 566 if errors:
566 567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 568 raise error.Abort(errstr)
568 569
569 570
570 571 @command(
571 572 b'debugcolor',
572 573 [(b'', b'style', None, _(b'show all configured styles'))],
573 574 b'hg debugcolor',
574 575 )
575 576 def debugcolor(ui, repo, **opts):
576 577 """show available color, effects or style"""
577 578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 579 if opts.get('style'):
579 580 return _debugdisplaystyle(ui)
580 581 else:
581 582 return _debugdisplaycolor(ui)
582 583
583 584
584 585 def _debugdisplaycolor(ui):
585 586 ui = ui.copy()
586 587 ui._styles.clear()
587 588 for effect in color._activeeffects(ui).keys():
588 589 ui._styles[effect] = effect
589 590 if ui._terminfoparams:
590 591 for k, v in ui.configitems(b'color'):
591 592 if k.startswith(b'color.'):
592 593 ui._styles[k] = k[6:]
593 594 elif k.startswith(b'terminfo.'):
594 595 ui._styles[k] = k[9:]
595 596 ui.write(_(b'available colors:\n'))
596 597 # sort label with a '_' after the other to group '_background' entry.
597 598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 599 for colorname, label in items:
599 600 ui.write(b'%s\n' % colorname, label=label)
600 601
601 602
602 603 def _debugdisplaystyle(ui):
603 604 ui.write(_(b'available style:\n'))
604 605 if not ui._styles:
605 606 return
606 607 width = max(len(s) for s in ui._styles)
607 608 for label, effects in sorted(ui._styles.items()):
608 609 ui.write(b'%s' % label, label=label)
609 610 if effects:
610 611 # 50
611 612 ui.write(b': ')
612 613 ui.write(b' ' * (max(0, width - len(label))))
613 614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 615 ui.write(b'\n')
615 616
616 617
617 618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 619 def debugcreatestreamclonebundle(ui, repo, fname):
619 620 """create a stream clone bundle file
620 621
621 622 Stream bundles are special bundles that are essentially archives of
622 623 revlog files. They are commonly used for cloning very quickly.
623 624 """
624 625 # TODO we may want to turn this into an abort when this functionality
625 626 # is moved into `hg bundle`.
626 627 if phases.hassecret(repo):
627 628 ui.warn(
628 629 _(
629 630 b'(warning: stream clone bundle will contain secret '
630 631 b'revisions)\n'
631 632 )
632 633 )
633 634
634 635 requirements, gen = streamclone.generatebundlev1(repo)
635 636 changegroup.writechunks(ui, gen, fname)
636 637
637 638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 639
639 640
640 641 @command(
641 642 b'debugdag',
642 643 [
643 644 (b't', b'tags', None, _(b'use tags as labels')),
644 645 (b'b', b'branches', None, _(b'annotate with branch names')),
645 646 (b'', b'dots', None, _(b'use dots for runs')),
646 647 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 648 ],
648 649 _(b'[OPTION]... [FILE [REV]...]'),
649 650 optionalrepo=True,
650 651 )
651 652 def debugdag(ui, repo, file_=None, *revs, **opts):
652 653 """format the changelog or an index DAG as a concise textual description
653 654
654 655 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 656 revision numbers, they get labeled in the output as rN.
656 657
657 658 Otherwise, the changelog DAG of the current repo is emitted.
658 659 """
659 660 spaces = opts.get('spaces')
660 661 dots = opts.get('dots')
661 662 if file_:
662 663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 664 revs = {int(r) for r in revs}
664 665
665 666 def events():
666 667 for r in rlog:
667 668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 669 if r in revs:
669 670 yield b'l', (r, b"r%i" % r)
670 671
671 672 elif repo:
672 673 cl = repo.changelog
673 674 tags = opts.get('tags')
674 675 branches = opts.get('branches')
675 676 if tags:
676 677 labels = {}
677 678 for l, n in repo.tags().items():
678 679 labels.setdefault(cl.rev(n), []).append(l)
679 680
680 681 def events():
681 682 b = b"default"
682 683 for r in cl:
683 684 if branches:
684 685 newb = cl.read(cl.node(r))[5][b'branch']
685 686 if newb != b:
686 687 yield b'a', newb
687 688 b = newb
688 689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 690 if tags:
690 691 ls = labels.get(r)
691 692 if ls:
692 693 for l in ls:
693 694 yield b'l', (r, l)
694 695
695 696 else:
696 697 raise error.Abort(_(b'need repo for changelog dag'))
697 698
698 699 for line in dagparser.dagtextlines(
699 700 events(),
700 701 addspaces=spaces,
701 702 wraplabels=True,
702 703 wrapannotations=True,
703 704 wrapnonlinear=dots,
704 705 usedots=dots,
705 706 maxlinewidth=70,
706 707 ):
707 708 ui.write(line)
708 709 ui.write(b"\n")
709 710
710 711
711 712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 713 def debugdata(ui, repo, file_, rev=None, **opts):
713 714 """dump the contents of a data file revision"""
714 715 opts = pycompat.byteskwargs(opts)
715 716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 717 if rev is not None:
717 718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 719 file_, rev = None, file_
719 720 elif rev is None:
720 721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 723 try:
723 724 ui.write(r.rawdata(r.lookup(rev)))
724 725 except KeyError:
725 726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 727
727 728
728 729 @command(
729 730 b'debugdate',
730 731 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 732 _(b'[-e] DATE [RANGE]'),
732 733 norepo=True,
733 734 optionalrepo=True,
734 735 )
735 736 def debugdate(ui, date, range=None, **opts):
736 737 """parse and display a date"""
737 738 if opts["extended"]:
738 739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 740 else:
740 741 d = dateutil.parsedate(date)
741 742 ui.writenoi18n(b"internal: %d %d\n" % d)
742 743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 744 if range:
744 745 m = dateutil.matchdate(range)
745 746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 747
747 748
748 749 @command(
749 750 b'debugdeltachain',
750 751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 752 _(b'-c|-m|FILE'),
752 753 optionalrepo=True,
753 754 )
754 755 def debugdeltachain(ui, repo, file_=None, **opts):
755 756 """dump information about delta chains in a revlog
756 757
757 758 Output can be templatized. Available template keywords are:
758 759
759 760 :``rev``: revision number
760 761 :``chainid``: delta chain identifier (numbered by unique base)
761 762 :``chainlen``: delta chain length to this revision
762 763 :``prevrev``: previous revision in delta chain
763 764 :``deltatype``: role of delta / how it was computed
764 765 :``compsize``: compressed size of revision
765 766 :``uncompsize``: uncompressed size of revision
766 767 :``chainsize``: total size of compressed revisions in chain
767 768 :``chainratio``: total chain size divided by uncompressed revision size
768 769 (new delta chains typically start at ratio 2.00)
769 770 :``lindist``: linear distance from base revision in delta chain to end
770 771 of this revision
771 772 :``extradist``: total size of revisions not part of this delta chain from
772 773 base of delta chain to end of this revision; a measurement
773 774 of how much extra data we need to read/seek across to read
774 775 the delta chain for this revision
775 776 :``extraratio``: extradist divided by chainsize; another representation of
776 777 how much unrelated data is needed to load this delta chain
777 778
778 779 If the repository is configured to use the sparse read, additional keywords
779 780 are available:
780 781
781 782 :``readsize``: total size of data read from the disk for a revision
782 783 (sum of the sizes of all the blocks)
783 784 :``largestblock``: size of the largest block of data read from the disk
784 785 :``readdensity``: density of useful bytes in the data read from the disk
785 786 :``srchunks``: in how many data hunks the whole revision would be read
786 787
787 788 The sparse read can be enabled with experimental.sparse-read = True
788 789 """
789 790 opts = pycompat.byteskwargs(opts)
790 791 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
791 792 index = r.index
792 793 start = r.start
793 794 length = r.length
794 795 generaldelta = r._generaldelta
795 796 withsparseread = getattr(r, '_withsparseread', False)
796 797
797 798 def revinfo(rev):
798 799 e = index[rev]
799 compsize = e[1]
800 uncompsize = e[2]
800 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
801 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
801 802 chainsize = 0
802 803
804 base = e[revlog_constants.ENTRY_DELTA_BASE]
805 p1 = e[revlog_constants.ENTRY_PARENT_1]
806 p2 = e[revlog_constants.ENTRY_PARENT_2]
807
803 808 if generaldelta:
804 if e[3] == e[5]:
809 if base == p1:
805 810 deltatype = b'p1'
806 elif e[3] == e[6]:
811 elif base == p2:
807 812 deltatype = b'p2'
808 elif e[3] == rev:
813 elif base == rev:
809 814 deltatype = b'base'
810 815 elif r.issnapshot(rev):
811 816 deltatype = b'snap'
812 elif e[3] == rev - 1:
817 elif base == rev - 1:
813 818 deltatype = b'prev'
814 819 else:
815 820 deltatype = b'other'
816 821 else:
817 if e[3] == rev:
822 if base == rev:
818 823 deltatype = b'base'
819 824 else:
820 825 deltatype = b'prev'
821 826
822 827 chain = r._deltachain(rev)[0]
823 828 for iterrev in chain:
824 829 e = index[iterrev]
825 chainsize += e[1]
830 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
826 831
827 832 return compsize, uncompsize, deltatype, chain, chainsize
828 833
829 834 fm = ui.formatter(b'debugdeltachain', opts)
830 835
831 836 fm.plain(
832 837 b' rev chain# chainlen prev delta '
833 838 b'size rawsize chainsize ratio lindist extradist '
834 839 b'extraratio'
835 840 )
836 841 if withsparseread:
837 842 fm.plain(b' readsize largestblk rddensity srchunks')
838 843 fm.plain(b'\n')
839 844
840 845 chainbases = {}
841 846 for rev in r:
842 847 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 848 chainbase = chain[0]
844 849 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 850 basestart = start(chainbase)
846 851 revstart = start(rev)
847 852 lineardist = revstart + comp - basestart
848 853 extradist = lineardist - chainsize
849 854 try:
850 855 prevrev = chain[-2]
851 856 except IndexError:
852 857 prevrev = -1
853 858
854 859 if uncomp != 0:
855 860 chainratio = float(chainsize) / float(uncomp)
856 861 else:
857 862 chainratio = chainsize
858 863
859 864 if chainsize != 0:
860 865 extraratio = float(extradist) / float(chainsize)
861 866 else:
862 867 extraratio = extradist
863 868
864 869 fm.startitem()
865 870 fm.write(
866 871 b'rev chainid chainlen prevrev deltatype compsize '
867 872 b'uncompsize chainsize chainratio lindist extradist '
868 873 b'extraratio',
869 874 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 875 rev,
871 876 chainid,
872 877 len(chain),
873 878 prevrev,
874 879 deltatype,
875 880 comp,
876 881 uncomp,
877 882 chainsize,
878 883 chainratio,
879 884 lineardist,
880 885 extradist,
881 886 extraratio,
882 887 rev=rev,
883 888 chainid=chainid,
884 889 chainlen=len(chain),
885 890 prevrev=prevrev,
886 891 deltatype=deltatype,
887 892 compsize=comp,
888 893 uncompsize=uncomp,
889 894 chainsize=chainsize,
890 895 chainratio=chainratio,
891 896 lindist=lineardist,
892 897 extradist=extradist,
893 898 extraratio=extraratio,
894 899 )
895 900 if withsparseread:
896 901 readsize = 0
897 902 largestblock = 0
898 903 srchunks = 0
899 904
900 905 for revschunk in deltautil.slicechunk(r, chain):
901 906 srchunks += 1
902 907 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 908 blksize = blkend - start(revschunk[0])
904 909
905 910 readsize += blksize
906 911 if largestblock < blksize:
907 912 largestblock = blksize
908 913
909 914 if readsize:
910 915 readdensity = float(chainsize) / float(readsize)
911 916 else:
912 917 readdensity = 1
913 918
914 919 fm.write(
915 920 b'readsize largestblock readdensity srchunks',
916 921 b' %10d %10d %9.5f %8d',
917 922 readsize,
918 923 largestblock,
919 924 readdensity,
920 925 srchunks,
921 926 readsize=readsize,
922 927 largestblock=largestblock,
923 928 readdensity=readdensity,
924 929 srchunks=srchunks,
925 930 )
926 931
927 932 fm.plain(b'\n')
928 933
929 934 fm.end()
930 935
931 936
932 937 @command(
933 938 b'debugdirstate|debugstate',
934 939 [
935 940 (
936 941 b'',
937 942 b'nodates',
938 943 None,
939 944 _(b'do not display the saved mtime (DEPRECATED)'),
940 945 ),
941 946 (b'', b'dates', True, _(b'display the saved mtime')),
942 947 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 948 (
944 949 b'',
945 950 b'docket',
946 951 False,
947 952 _(b'display the docket (metadata file) instead'),
948 953 ),
949 954 (
950 955 b'',
951 956 b'all',
952 957 False,
953 958 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
954 959 ),
955 960 ],
956 961 _(b'[OPTION]...'),
957 962 )
958 963 def debugstate(ui, repo, **opts):
959 964 """show the contents of the current dirstate"""
960 965
961 966 if opts.get("docket"):
962 967 if not repo.dirstate._use_dirstate_v2:
963 968 raise error.Abort(_(b'dirstate v1 does not have a docket'))
964 969
965 970 docket = repo.dirstate._map.docket
966 971 (
967 972 start_offset,
968 973 root_nodes,
969 974 nodes_with_entry,
970 975 nodes_with_copy,
971 976 unused_bytes,
972 977 _unused,
973 978 ignore_pattern,
974 979 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
975 980
976 981 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
977 982 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
978 983 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
979 984 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
980 985 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
981 986 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
982 987 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
983 988 ui.write(
984 989 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
985 990 )
986 991 return
987 992
988 993 nodates = not opts['dates']
989 994 if opts.get('nodates') is not None:
990 995 nodates = True
991 996 datesort = opts.get('datesort')
992 997
993 998 if datesort:
994 999
995 1000 def keyfunc(entry):
996 1001 filename, _state, _mode, _size, mtime = entry
997 1002 return (mtime, filename)
998 1003
999 1004 else:
1000 1005 keyfunc = None # sort by filename
1001 1006 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1002 1007 entries.sort(key=keyfunc)
1003 1008 for entry in entries:
1004 1009 filename, state, mode, size, mtime = entry
1005 1010 if mtime == -1:
1006 1011 timestr = b'unset '
1007 1012 elif nodates:
1008 1013 timestr = b'set '
1009 1014 else:
1010 1015 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1011 1016 timestr = encoding.strtolocal(timestr)
1012 1017 if mode & 0o20000:
1013 1018 mode = b'lnk'
1014 1019 else:
1015 1020 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1016 1021 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1017 1022 for f in repo.dirstate.copies():
1018 1023 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1019 1024
1020 1025
1021 1026 @command(
1022 1027 b'debugdirstateignorepatternshash',
1023 1028 [],
1024 1029 _(b''),
1025 1030 )
1026 1031 def debugdirstateignorepatternshash(ui, repo, **opts):
1027 1032 """show the hash of ignore patterns stored in dirstate if v2,
1028 1033 or nothing for dirstate-v2
1029 1034 """
1030 1035 if repo.dirstate._use_dirstate_v2:
1031 1036 docket = repo.dirstate._map.docket
1032 1037 hash_len = 20 # 160 bits for SHA-1
1033 1038 hash_bytes = docket.tree_metadata[-hash_len:]
1034 1039 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1035 1040
1036 1041
1037 1042 @command(
1038 1043 b'debugdiscovery',
1039 1044 [
1040 1045 (b'', b'old', None, _(b'use old-style discovery')),
1041 1046 (
1042 1047 b'',
1043 1048 b'nonheads',
1044 1049 None,
1045 1050 _(b'use old-style discovery with non-heads included'),
1046 1051 ),
1047 1052 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1048 1053 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1049 1054 (
1050 1055 b'',
1051 1056 b'local-as-revs',
1052 1057 b"",
1053 1058 b'treat local has having these revisions only',
1054 1059 ),
1055 1060 (
1056 1061 b'',
1057 1062 b'remote-as-revs',
1058 1063 b"",
1059 1064 b'use local as remote, with only these revisions',
1060 1065 ),
1061 1066 ]
1062 1067 + cmdutil.remoteopts
1063 1068 + cmdutil.formatteropts,
1064 1069 _(b'[--rev REV] [OTHER]'),
1065 1070 )
1066 1071 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1067 1072 """runs the changeset discovery protocol in isolation
1068 1073
1069 1074 The local peer can be "replaced" by a subset of the local repository by
1070 1075 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1071 1076 be "replaced" by a subset of the local repository using the
1072 1077 `--local-as-revs` flag. This is useful to efficiently debug pathological
1073 1078 discovery situation.
1074 1079
1075 1080 The following developer oriented config are relevant for people playing with this command:
1076 1081
1077 1082 * devel.discovery.exchange-heads=True
1078 1083
1079 1084 If False, the discovery will not start with
1080 1085 remote head fetching and local head querying.
1081 1086
1082 1087 * devel.discovery.grow-sample=True
1083 1088
1084 1089 If False, the sample size used in set discovery will not be increased
1085 1090 through the process
1086 1091
1087 1092 * devel.discovery.grow-sample.dynamic=True
1088 1093
1089 1094 When discovery.grow-sample.dynamic is True, the default, the sample size is
1090 1095 adapted to the shape of the undecided set (it is set to the max of:
1091 1096 <target-size>, len(roots(undecided)), len(heads(undecided)
1092 1097
1093 1098 * devel.discovery.grow-sample.rate=1.05
1094 1099
1095 1100 the rate at which the sample grow
1096 1101
1097 1102 * devel.discovery.randomize=True
1098 1103
1099 1104 If andom sampling during discovery are deterministic. It is meant for
1100 1105 integration tests.
1101 1106
1102 1107 * devel.discovery.sample-size=200
1103 1108
1104 1109 Control the initial size of the discovery sample
1105 1110
1106 1111 * devel.discovery.sample-size.initial=100
1107 1112
1108 1113 Control the initial size of the discovery for initial change
1109 1114 """
1110 1115 opts = pycompat.byteskwargs(opts)
1111 1116 unfi = repo.unfiltered()
1112 1117
1113 1118 # setup potential extra filtering
1114 1119 local_revs = opts[b"local_as_revs"]
1115 1120 remote_revs = opts[b"remote_as_revs"]
1116 1121
1117 1122 # make sure tests are repeatable
1118 1123 random.seed(int(opts[b'seed']))
1119 1124
1120 1125 if not remote_revs:
1121 1126
1122 1127 remoteurl, branches = urlutil.get_unique_pull_path(
1123 1128 b'debugdiscovery', repo, ui, remoteurl
1124 1129 )
1125 1130 remote = hg.peer(repo, opts, remoteurl)
1126 1131 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1127 1132 else:
1128 1133 branches = (None, [])
1129 1134 remote_filtered_revs = logcmdutil.revrange(
1130 1135 unfi, [b"not (::(%s))" % remote_revs]
1131 1136 )
1132 1137 remote_filtered_revs = frozenset(remote_filtered_revs)
1133 1138
1134 1139 def remote_func(x):
1135 1140 return remote_filtered_revs
1136 1141
1137 1142 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1138 1143
1139 1144 remote = repo.peer()
1140 1145 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1141 1146
1142 1147 if local_revs:
1143 1148 local_filtered_revs = logcmdutil.revrange(
1144 1149 unfi, [b"not (::(%s))" % local_revs]
1145 1150 )
1146 1151 local_filtered_revs = frozenset(local_filtered_revs)
1147 1152
1148 1153 def local_func(x):
1149 1154 return local_filtered_revs
1150 1155
1151 1156 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1152 1157 repo = repo.filtered(b'debug-discovery-local-filter')
1153 1158
1154 1159 data = {}
1155 1160 if opts.get(b'old'):
1156 1161
1157 1162 def doit(pushedrevs, remoteheads, remote=remote):
1158 1163 if not util.safehasattr(remote, b'branches'):
1159 1164 # enable in-client legacy support
1160 1165 remote = localrepo.locallegacypeer(remote.local())
1161 1166 common, _in, hds = treediscovery.findcommonincoming(
1162 1167 repo, remote, force=True, audit=data
1163 1168 )
1164 1169 common = set(common)
1165 1170 if not opts.get(b'nonheads'):
1166 1171 ui.writenoi18n(
1167 1172 b"unpruned common: %s\n"
1168 1173 % b" ".join(sorted(short(n) for n in common))
1169 1174 )
1170 1175
1171 1176 clnode = repo.changelog.node
1172 1177 common = repo.revs(b'heads(::%ln)', common)
1173 1178 common = {clnode(r) for r in common}
1174 1179 return common, hds
1175 1180
1176 1181 else:
1177 1182
1178 1183 def doit(pushedrevs, remoteheads, remote=remote):
1179 1184 nodes = None
1180 1185 if pushedrevs:
1181 1186 revs = logcmdutil.revrange(repo, pushedrevs)
1182 1187 nodes = [repo[r].node() for r in revs]
1183 1188 common, any, hds = setdiscovery.findcommonheads(
1184 1189 ui, repo, remote, ancestorsof=nodes, audit=data
1185 1190 )
1186 1191 return common, hds
1187 1192
1188 1193 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1189 1194 localrevs = opts[b'rev']
1190 1195
1191 1196 fm = ui.formatter(b'debugdiscovery', opts)
1192 1197 if fm.strict_format:
1193 1198
1194 1199 @contextlib.contextmanager
1195 1200 def may_capture_output():
1196 1201 ui.pushbuffer()
1197 1202 yield
1198 1203 data[b'output'] = ui.popbuffer()
1199 1204
1200 1205 else:
1201 1206 may_capture_output = util.nullcontextmanager
1202 1207 with may_capture_output():
1203 1208 with util.timedcm('debug-discovery') as t:
1204 1209 common, hds = doit(localrevs, remoterevs)
1205 1210
1206 1211 # compute all statistics
1207 1212 heads_common = set(common)
1208 1213 heads_remote = set(hds)
1209 1214 heads_local = set(repo.heads())
1210 1215 # note: they cannot be a local or remote head that is in common and not
1211 1216 # itself a head of common.
1212 1217 heads_common_local = heads_common & heads_local
1213 1218 heads_common_remote = heads_common & heads_remote
1214 1219 heads_common_both = heads_common & heads_remote & heads_local
1215 1220
1216 1221 all = repo.revs(b'all()')
1217 1222 common = repo.revs(b'::%ln', common)
1218 1223 roots_common = repo.revs(b'roots(::%ld)', common)
1219 1224 missing = repo.revs(b'not ::%ld', common)
1220 1225 heads_missing = repo.revs(b'heads(%ld)', missing)
1221 1226 roots_missing = repo.revs(b'roots(%ld)', missing)
1222 1227 assert len(common) + len(missing) == len(all)
1223 1228
1224 1229 initial_undecided = repo.revs(
1225 1230 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1226 1231 )
1227 1232 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1228 1233 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1229 1234 common_initial_undecided = initial_undecided & common
1230 1235 missing_initial_undecided = initial_undecided & missing
1231 1236
1232 1237 data[b'elapsed'] = t.elapsed
1233 1238 data[b'nb-common-heads'] = len(heads_common)
1234 1239 data[b'nb-common-heads-local'] = len(heads_common_local)
1235 1240 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1236 1241 data[b'nb-common-heads-both'] = len(heads_common_both)
1237 1242 data[b'nb-common-roots'] = len(roots_common)
1238 1243 data[b'nb-head-local'] = len(heads_local)
1239 1244 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1240 1245 data[b'nb-head-remote'] = len(heads_remote)
1241 1246 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1242 1247 heads_common_remote
1243 1248 )
1244 1249 data[b'nb-revs'] = len(all)
1245 1250 data[b'nb-revs-common'] = len(common)
1246 1251 data[b'nb-revs-missing'] = len(missing)
1247 1252 data[b'nb-missing-heads'] = len(heads_missing)
1248 1253 data[b'nb-missing-roots'] = len(roots_missing)
1249 1254 data[b'nb-ini_und'] = len(initial_undecided)
1250 1255 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1251 1256 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1252 1257 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1253 1258 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1254 1259
1255 1260 fm.startitem()
1256 1261 fm.data(**pycompat.strkwargs(data))
1257 1262 # display discovery summary
1258 1263 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1259 1264 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1260 1265 fm.plain(b"queries: %(total-queries)9d\n" % data)
1261 1266 fm.plain(b"heads summary:\n")
1262 1267 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1263 1268 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1264 1269 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1265 1270 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1266 1271 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1267 1272 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1268 1273 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1269 1274 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1270 1275 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1271 1276 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1272 1277 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1273 1278 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1274 1279 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1275 1280 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1276 1281 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1277 1282 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1278 1283 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1279 1284 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1280 1285 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1281 1286 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1282 1287 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1283 1288 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1284 1289
1285 1290 if ui.verbose:
1286 1291 fm.plain(
1287 1292 b"common heads: %s\n"
1288 1293 % b" ".join(sorted(short(n) for n in heads_common))
1289 1294 )
1290 1295 fm.end()
1291 1296
1292 1297
1293 1298 _chunksize = 4 << 10
1294 1299
1295 1300
1296 1301 @command(
1297 1302 b'debugdownload',
1298 1303 [
1299 1304 (b'o', b'output', b'', _(b'path')),
1300 1305 ],
1301 1306 optionalrepo=True,
1302 1307 )
1303 1308 def debugdownload(ui, repo, url, output=None, **opts):
1304 1309 """download a resource using Mercurial logic and config"""
1305 1310 fh = urlmod.open(ui, url, output)
1306 1311
1307 1312 dest = ui
1308 1313 if output:
1309 1314 dest = open(output, b"wb", _chunksize)
1310 1315 try:
1311 1316 data = fh.read(_chunksize)
1312 1317 while data:
1313 1318 dest.write(data)
1314 1319 data = fh.read(_chunksize)
1315 1320 finally:
1316 1321 if output:
1317 1322 dest.close()
1318 1323
1319 1324
1320 1325 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1321 1326 def debugextensions(ui, repo, **opts):
1322 1327 '''show information about active extensions'''
1323 1328 opts = pycompat.byteskwargs(opts)
1324 1329 exts = extensions.extensions(ui)
1325 1330 hgver = util.version()
1326 1331 fm = ui.formatter(b'debugextensions', opts)
1327 1332 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1328 1333 isinternal = extensions.ismoduleinternal(extmod)
1329 1334 extsource = None
1330 1335
1331 1336 if util.safehasattr(extmod, '__file__'):
1332 1337 extsource = pycompat.fsencode(extmod.__file__)
1333 1338 elif getattr(sys, 'oxidized', False):
1334 1339 extsource = pycompat.sysexecutable
1335 1340 if isinternal:
1336 1341 exttestedwith = [] # never expose magic string to users
1337 1342 else:
1338 1343 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1339 1344 extbuglink = getattr(extmod, 'buglink', None)
1340 1345
1341 1346 fm.startitem()
1342 1347
1343 1348 if ui.quiet or ui.verbose:
1344 1349 fm.write(b'name', b'%s\n', extname)
1345 1350 else:
1346 1351 fm.write(b'name', b'%s', extname)
1347 1352 if isinternal or hgver in exttestedwith:
1348 1353 fm.plain(b'\n')
1349 1354 elif not exttestedwith:
1350 1355 fm.plain(_(b' (untested!)\n'))
1351 1356 else:
1352 1357 lasttestedversion = exttestedwith[-1]
1353 1358 fm.plain(b' (%s!)\n' % lasttestedversion)
1354 1359
1355 1360 fm.condwrite(
1356 1361 ui.verbose and extsource,
1357 1362 b'source',
1358 1363 _(b' location: %s\n'),
1359 1364 extsource or b"",
1360 1365 )
1361 1366
1362 1367 if ui.verbose:
1363 1368 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1364 1369 fm.data(bundled=isinternal)
1365 1370
1366 1371 fm.condwrite(
1367 1372 ui.verbose and exttestedwith,
1368 1373 b'testedwith',
1369 1374 _(b' tested with: %s\n'),
1370 1375 fm.formatlist(exttestedwith, name=b'ver'),
1371 1376 )
1372 1377
1373 1378 fm.condwrite(
1374 1379 ui.verbose and extbuglink,
1375 1380 b'buglink',
1376 1381 _(b' bug reporting: %s\n'),
1377 1382 extbuglink or b"",
1378 1383 )
1379 1384
1380 1385 fm.end()
1381 1386
1382 1387
1383 1388 @command(
1384 1389 b'debugfileset',
1385 1390 [
1386 1391 (
1387 1392 b'r',
1388 1393 b'rev',
1389 1394 b'',
1390 1395 _(b'apply the filespec on this revision'),
1391 1396 _(b'REV'),
1392 1397 ),
1393 1398 (
1394 1399 b'',
1395 1400 b'all-files',
1396 1401 False,
1397 1402 _(b'test files from all revisions and working directory'),
1398 1403 ),
1399 1404 (
1400 1405 b's',
1401 1406 b'show-matcher',
1402 1407 None,
1403 1408 _(b'print internal representation of matcher'),
1404 1409 ),
1405 1410 (
1406 1411 b'p',
1407 1412 b'show-stage',
1408 1413 [],
1409 1414 _(b'print parsed tree at the given stage'),
1410 1415 _(b'NAME'),
1411 1416 ),
1412 1417 ],
1413 1418 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1414 1419 )
1415 1420 def debugfileset(ui, repo, expr, **opts):
1416 1421 '''parse and apply a fileset specification'''
1417 1422 from . import fileset
1418 1423
1419 1424 fileset.symbols # force import of fileset so we have predicates to optimize
1420 1425 opts = pycompat.byteskwargs(opts)
1421 1426 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1422 1427
1423 1428 stages = [
1424 1429 (b'parsed', pycompat.identity),
1425 1430 (b'analyzed', filesetlang.analyze),
1426 1431 (b'optimized', filesetlang.optimize),
1427 1432 ]
1428 1433 stagenames = {n for n, f in stages}
1429 1434
1430 1435 showalways = set()
1431 1436 if ui.verbose and not opts[b'show_stage']:
1432 1437 # show parsed tree by --verbose (deprecated)
1433 1438 showalways.add(b'parsed')
1434 1439 if opts[b'show_stage'] == [b'all']:
1435 1440 showalways.update(stagenames)
1436 1441 else:
1437 1442 for n in opts[b'show_stage']:
1438 1443 if n not in stagenames:
1439 1444 raise error.Abort(_(b'invalid stage name: %s') % n)
1440 1445 showalways.update(opts[b'show_stage'])
1441 1446
1442 1447 tree = filesetlang.parse(expr)
1443 1448 for n, f in stages:
1444 1449 tree = f(tree)
1445 1450 if n in showalways:
1446 1451 if opts[b'show_stage'] or n != b'parsed':
1447 1452 ui.write(b"* %s:\n" % n)
1448 1453 ui.write(filesetlang.prettyformat(tree), b"\n")
1449 1454
1450 1455 files = set()
1451 1456 if opts[b'all_files']:
1452 1457 for r in repo:
1453 1458 c = repo[r]
1454 1459 files.update(c.files())
1455 1460 files.update(c.substate)
1456 1461 if opts[b'all_files'] or ctx.rev() is None:
1457 1462 wctx = repo[None]
1458 1463 files.update(
1459 1464 repo.dirstate.walk(
1460 1465 scmutil.matchall(repo),
1461 1466 subrepos=list(wctx.substate),
1462 1467 unknown=True,
1463 1468 ignored=True,
1464 1469 )
1465 1470 )
1466 1471 files.update(wctx.substate)
1467 1472 else:
1468 1473 files.update(ctx.files())
1469 1474 files.update(ctx.substate)
1470 1475
1471 1476 m = ctx.matchfileset(repo.getcwd(), expr)
1472 1477 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1473 1478 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1474 1479 for f in sorted(files):
1475 1480 if not m(f):
1476 1481 continue
1477 1482 ui.write(b"%s\n" % f)
1478 1483
1479 1484
1480 1485 @command(
1481 1486 b"debug-repair-issue6528",
1482 1487 [
1483 1488 (
1484 1489 b'',
1485 1490 b'to-report',
1486 1491 b'',
1487 1492 _(b'build a report of affected revisions to this file'),
1488 1493 _(b'FILE'),
1489 1494 ),
1490 1495 (
1491 1496 b'',
1492 1497 b'from-report',
1493 1498 b'',
1494 1499 _(b'repair revisions listed in this report file'),
1495 1500 _(b'FILE'),
1496 1501 ),
1497 1502 (
1498 1503 b'',
1499 1504 b'paranoid',
1500 1505 False,
1501 1506 _(b'check that both detection methods do the same thing'),
1502 1507 ),
1503 1508 ]
1504 1509 + cmdutil.dryrunopts,
1505 1510 )
1506 1511 def debug_repair_issue6528(ui, repo, **opts):
1507 1512 """find affected revisions and repair them. See issue6528 for more details.
1508 1513
1509 1514 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1510 1515 computation of affected revisions for a given repository across clones.
1511 1516 The report format is line-based (with empty lines ignored):
1512 1517
1513 1518 ```
1514 1519 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1515 1520 ```
1516 1521
1517 1522 There can be multiple broken revisions per filelog, they are separated by
1518 1523 a comma with no spaces. The only space is between the revision(s) and the
1519 1524 filename.
1520 1525
1521 1526 Note that this does *not* mean that this repairs future affected revisions,
1522 1527 that needs a separate fix at the exchange level that was introduced in
1523 1528 Mercurial 5.9.1.
1524 1529
1525 1530 There is a `--paranoid` flag to test that the fast implementation is correct
1526 1531 by checking it against the slow implementation. Since this matter is quite
1527 1532 urgent and testing every edge-case is probably quite costly, we use this
1528 1533 method to test on large repositories as a fuzzing method of sorts.
1529 1534 """
1530 1535 cmdutil.check_incompatible_arguments(
1531 1536 opts, 'to_report', ['from_report', 'dry_run']
1532 1537 )
1533 1538 dry_run = opts.get('dry_run')
1534 1539 to_report = opts.get('to_report')
1535 1540 from_report = opts.get('from_report')
1536 1541 paranoid = opts.get('paranoid')
1537 1542 # TODO maybe add filelog pattern and revision pattern parameters to help
1538 1543 # narrow down the search for users that know what they're looking for?
1539 1544
1540 1545 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1541 1546 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1542 1547 raise error.Abort(_(msg))
1543 1548
1544 1549 rewrite.repair_issue6528(
1545 1550 ui,
1546 1551 repo,
1547 1552 dry_run=dry_run,
1548 1553 to_report=to_report,
1549 1554 from_report=from_report,
1550 1555 paranoid=paranoid,
1551 1556 )
1552 1557
1553 1558
1554 1559 @command(b'debugformat', [] + cmdutil.formatteropts)
1555 1560 def debugformat(ui, repo, **opts):
1556 1561 """display format information about the current repository
1557 1562
1558 1563 Use --verbose to get extra information about current config value and
1559 1564 Mercurial default."""
1560 1565 opts = pycompat.byteskwargs(opts)
1561 1566 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1562 1567 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1563 1568
1564 1569 def makeformatname(name):
1565 1570 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1566 1571
1567 1572 fm = ui.formatter(b'debugformat', opts)
1568 1573 if fm.isplain():
1569 1574
1570 1575 def formatvalue(value):
1571 1576 if util.safehasattr(value, b'startswith'):
1572 1577 return value
1573 1578 if value:
1574 1579 return b'yes'
1575 1580 else:
1576 1581 return b'no'
1577 1582
1578 1583 else:
1579 1584 formatvalue = pycompat.identity
1580 1585
1581 1586 fm.plain(b'format-variant')
1582 1587 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1583 1588 fm.plain(b' repo')
1584 1589 if ui.verbose:
1585 1590 fm.plain(b' config default')
1586 1591 fm.plain(b'\n')
1587 1592 for fv in upgrade.allformatvariant:
1588 1593 fm.startitem()
1589 1594 repovalue = fv.fromrepo(repo)
1590 1595 configvalue = fv.fromconfig(repo)
1591 1596
1592 1597 if repovalue != configvalue:
1593 1598 namelabel = b'formatvariant.name.mismatchconfig'
1594 1599 repolabel = b'formatvariant.repo.mismatchconfig'
1595 1600 elif repovalue != fv.default:
1596 1601 namelabel = b'formatvariant.name.mismatchdefault'
1597 1602 repolabel = b'formatvariant.repo.mismatchdefault'
1598 1603 else:
1599 1604 namelabel = b'formatvariant.name.uptodate'
1600 1605 repolabel = b'formatvariant.repo.uptodate'
1601 1606
1602 1607 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1603 1608 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1604 1609 if fv.default != configvalue:
1605 1610 configlabel = b'formatvariant.config.special'
1606 1611 else:
1607 1612 configlabel = b'formatvariant.config.default'
1608 1613 fm.condwrite(
1609 1614 ui.verbose,
1610 1615 b'config',
1611 1616 b' %6s',
1612 1617 formatvalue(configvalue),
1613 1618 label=configlabel,
1614 1619 )
1615 1620 fm.condwrite(
1616 1621 ui.verbose,
1617 1622 b'default',
1618 1623 b' %7s',
1619 1624 formatvalue(fv.default),
1620 1625 label=b'formatvariant.default',
1621 1626 )
1622 1627 fm.plain(b'\n')
1623 1628 fm.end()
1624 1629
1625 1630
1626 1631 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1627 1632 def debugfsinfo(ui, path=b"."):
1628 1633 """show information detected about current filesystem"""
1629 1634 ui.writenoi18n(b'path: %s\n' % path)
1630 1635 ui.writenoi18n(
1631 1636 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1632 1637 )
1633 1638 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1634 1639 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1635 1640 ui.writenoi18n(
1636 1641 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1637 1642 )
1638 1643 ui.writenoi18n(
1639 1644 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1640 1645 )
1641 1646 casesensitive = b'(unknown)'
1642 1647 try:
1643 1648 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1644 1649 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1645 1650 except OSError:
1646 1651 pass
1647 1652 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1648 1653
1649 1654
1650 1655 @command(
1651 1656 b'debuggetbundle',
1652 1657 [
1653 1658 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1654 1659 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1655 1660 (
1656 1661 b't',
1657 1662 b'type',
1658 1663 b'bzip2',
1659 1664 _(b'bundle compression type to use'),
1660 1665 _(b'TYPE'),
1661 1666 ),
1662 1667 ],
1663 1668 _(b'REPO FILE [-H|-C ID]...'),
1664 1669 norepo=True,
1665 1670 )
1666 1671 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1667 1672 """retrieves a bundle from a repo
1668 1673
1669 1674 Every ID must be a full-length hex node id string. Saves the bundle to the
1670 1675 given file.
1671 1676 """
1672 1677 opts = pycompat.byteskwargs(opts)
1673 1678 repo = hg.peer(ui, opts, repopath)
1674 1679 if not repo.capable(b'getbundle'):
1675 1680 raise error.Abort(b"getbundle() not supported by target repository")
1676 1681 args = {}
1677 1682 if common:
1678 1683 args['common'] = [bin(s) for s in common]
1679 1684 if head:
1680 1685 args['heads'] = [bin(s) for s in head]
1681 1686 # TODO: get desired bundlecaps from command line.
1682 1687 args['bundlecaps'] = None
1683 1688 bundle = repo.getbundle(b'debug', **args)
1684 1689
1685 1690 bundletype = opts.get(b'type', b'bzip2').lower()
1686 1691 btypes = {
1687 1692 b'none': b'HG10UN',
1688 1693 b'bzip2': b'HG10BZ',
1689 1694 b'gzip': b'HG10GZ',
1690 1695 b'bundle2': b'HG20',
1691 1696 }
1692 1697 bundletype = btypes.get(bundletype)
1693 1698 if bundletype not in bundle2.bundletypes:
1694 1699 raise error.Abort(_(b'unknown bundle type specified with --type'))
1695 1700 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1696 1701
1697 1702
1698 1703 @command(b'debugignore', [], b'[FILE]')
1699 1704 def debugignore(ui, repo, *files, **opts):
1700 1705 """display the combined ignore pattern and information about ignored files
1701 1706
1702 1707 With no argument display the combined ignore pattern.
1703 1708
1704 1709 Given space separated file names, shows if the given file is ignored and
1705 1710 if so, show the ignore rule (file and line number) that matched it.
1706 1711 """
1707 1712 ignore = repo.dirstate._ignore
1708 1713 if not files:
1709 1714 # Show all the patterns
1710 1715 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1711 1716 else:
1712 1717 m = scmutil.match(repo[None], pats=files)
1713 1718 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1714 1719 for f in m.files():
1715 1720 nf = util.normpath(f)
1716 1721 ignored = None
1717 1722 ignoredata = None
1718 1723 if nf != b'.':
1719 1724 if ignore(nf):
1720 1725 ignored = nf
1721 1726 ignoredata = repo.dirstate._ignorefileandline(nf)
1722 1727 else:
1723 1728 for p in pathutil.finddirs(nf):
1724 1729 if ignore(p):
1725 1730 ignored = p
1726 1731 ignoredata = repo.dirstate._ignorefileandline(p)
1727 1732 break
1728 1733 if ignored:
1729 1734 if ignored == nf:
1730 1735 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1731 1736 else:
1732 1737 ui.write(
1733 1738 _(
1734 1739 b"%s is ignored because of "
1735 1740 b"containing directory %s\n"
1736 1741 )
1737 1742 % (uipathfn(f), ignored)
1738 1743 )
1739 1744 ignorefile, lineno, line = ignoredata
1740 1745 ui.write(
1741 1746 _(b"(ignore rule in %s, line %d: '%s')\n")
1742 1747 % (ignorefile, lineno, line)
1743 1748 )
1744 1749 else:
1745 1750 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1746 1751
1747 1752
1748 1753 @command(
1749 1754 b'debugindex',
1750 1755 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1751 1756 _(b'-c|-m|FILE'),
1752 1757 )
1753 1758 def debugindex(ui, repo, file_=None, **opts):
1754 1759 """dump index data for a storage primitive"""
1755 1760 opts = pycompat.byteskwargs(opts)
1756 1761 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1757 1762
1758 1763 if ui.debugflag:
1759 1764 shortfn = hex
1760 1765 else:
1761 1766 shortfn = short
1762 1767
1763 1768 idlen = 12
1764 1769 for i in store:
1765 1770 idlen = len(shortfn(store.node(i)))
1766 1771 break
1767 1772
1768 1773 fm = ui.formatter(b'debugindex', opts)
1769 1774 fm.plain(
1770 1775 b' rev linkrev %s %s p2\n'
1771 1776 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1772 1777 )
1773 1778
1774 1779 for rev in store:
1775 1780 node = store.node(rev)
1776 1781 parents = store.parents(node)
1777 1782
1778 1783 fm.startitem()
1779 1784 fm.write(b'rev', b'%6d ', rev)
1780 1785 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1781 1786 fm.write(b'node', b'%s ', shortfn(node))
1782 1787 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1783 1788 fm.write(b'p2', b'%s', shortfn(parents[1]))
1784 1789 fm.plain(b'\n')
1785 1790
1786 1791 fm.end()
1787 1792
1788 1793
1789 1794 @command(
1790 1795 b'debugindexdot',
1791 1796 cmdutil.debugrevlogopts,
1792 1797 _(b'-c|-m|FILE'),
1793 1798 optionalrepo=True,
1794 1799 )
1795 1800 def debugindexdot(ui, repo, file_=None, **opts):
1796 1801 """dump an index DAG as a graphviz dot file"""
1797 1802 opts = pycompat.byteskwargs(opts)
1798 1803 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1799 1804 ui.writenoi18n(b"digraph G {\n")
1800 1805 for i in r:
1801 1806 node = r.node(i)
1802 1807 pp = r.parents(node)
1803 1808 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1804 1809 if pp[1] != repo.nullid:
1805 1810 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1806 1811 ui.write(b"}\n")
1807 1812
1808 1813
1809 1814 @command(b'debugindexstats', [])
1810 1815 def debugindexstats(ui, repo):
1811 1816 """show stats related to the changelog index"""
1812 1817 repo.changelog.shortest(repo.nullid, 1)
1813 1818 index = repo.changelog.index
1814 1819 if not util.safehasattr(index, b'stats'):
1815 1820 raise error.Abort(_(b'debugindexstats only works with native code'))
1816 1821 for k, v in sorted(index.stats().items()):
1817 1822 ui.write(b'%s: %d\n' % (k, v))
1818 1823
1819 1824
1820 1825 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1821 1826 def debuginstall(ui, **opts):
1822 1827 """test Mercurial installation
1823 1828
1824 1829 Returns 0 on success.
1825 1830 """
1826 1831 opts = pycompat.byteskwargs(opts)
1827 1832
1828 1833 problems = 0
1829 1834
1830 1835 fm = ui.formatter(b'debuginstall', opts)
1831 1836 fm.startitem()
1832 1837
1833 1838 # encoding might be unknown or wrong. don't translate these messages.
1834 1839 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1835 1840 err = None
1836 1841 try:
1837 1842 codecs.lookup(pycompat.sysstr(encoding.encoding))
1838 1843 except LookupError as inst:
1839 1844 err = stringutil.forcebytestr(inst)
1840 1845 problems += 1
1841 1846 fm.condwrite(
1842 1847 err,
1843 1848 b'encodingerror',
1844 1849 b" %s\n (check that your locale is properly set)\n",
1845 1850 err,
1846 1851 )
1847 1852
1848 1853 # Python
1849 1854 pythonlib = None
1850 1855 if util.safehasattr(os, '__file__'):
1851 1856 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1852 1857 elif getattr(sys, 'oxidized', False):
1853 1858 pythonlib = pycompat.sysexecutable
1854 1859
1855 1860 fm.write(
1856 1861 b'pythonexe',
1857 1862 _(b"checking Python executable (%s)\n"),
1858 1863 pycompat.sysexecutable or _(b"unknown"),
1859 1864 )
1860 1865 fm.write(
1861 1866 b'pythonimplementation',
1862 1867 _(b"checking Python implementation (%s)\n"),
1863 1868 pycompat.sysbytes(platform.python_implementation()),
1864 1869 )
1865 1870 fm.write(
1866 1871 b'pythonver',
1867 1872 _(b"checking Python version (%s)\n"),
1868 1873 (b"%d.%d.%d" % sys.version_info[:3]),
1869 1874 )
1870 1875 fm.write(
1871 1876 b'pythonlib',
1872 1877 _(b"checking Python lib (%s)...\n"),
1873 1878 pythonlib or _(b"unknown"),
1874 1879 )
1875 1880
1876 1881 try:
1877 1882 from . import rustext # pytype: disable=import-error
1878 1883
1879 1884 rustext.__doc__ # trigger lazy import
1880 1885 except ImportError:
1881 1886 rustext = None
1882 1887
1883 1888 security = set(sslutil.supportedprotocols)
1884 1889 if sslutil.hassni:
1885 1890 security.add(b'sni')
1886 1891
1887 1892 fm.write(
1888 1893 b'pythonsecurity',
1889 1894 _(b"checking Python security support (%s)\n"),
1890 1895 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1891 1896 )
1892 1897
1893 1898 # These are warnings, not errors. So don't increment problem count. This
1894 1899 # may change in the future.
1895 1900 if b'tls1.2' not in security:
1896 1901 fm.plain(
1897 1902 _(
1898 1903 b' TLS 1.2 not supported by Python install; '
1899 1904 b'network connections lack modern security\n'
1900 1905 )
1901 1906 )
1902 1907 if b'sni' not in security:
1903 1908 fm.plain(
1904 1909 _(
1905 1910 b' SNI not supported by Python install; may have '
1906 1911 b'connectivity issues with some servers\n'
1907 1912 )
1908 1913 )
1909 1914
1910 1915 fm.plain(
1911 1916 _(
1912 1917 b"checking Rust extensions (%s)\n"
1913 1918 % (b'missing' if rustext is None else b'installed')
1914 1919 ),
1915 1920 )
1916 1921
1917 1922 # TODO print CA cert info
1918 1923
1919 1924 # hg version
1920 1925 hgver = util.version()
1921 1926 fm.write(
1922 1927 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1923 1928 )
1924 1929 fm.write(
1925 1930 b'hgverextra',
1926 1931 _(b"checking Mercurial custom build (%s)\n"),
1927 1932 b'+'.join(hgver.split(b'+')[1:]),
1928 1933 )
1929 1934
1930 1935 # compiled modules
1931 1936 hgmodules = None
1932 1937 if util.safehasattr(sys.modules[__name__], '__file__'):
1933 1938 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1934 1939 elif getattr(sys, 'oxidized', False):
1935 1940 hgmodules = pycompat.sysexecutable
1936 1941
1937 1942 fm.write(
1938 1943 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1939 1944 )
1940 1945 fm.write(
1941 1946 b'hgmodules',
1942 1947 _(b"checking installed modules (%s)...\n"),
1943 1948 hgmodules or _(b"unknown"),
1944 1949 )
1945 1950
1946 1951 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1947 1952 rustext = rustandc # for now, that's the only case
1948 1953 cext = policy.policy in (b'c', b'allow') or rustandc
1949 1954 nopure = cext or rustext
1950 1955 if nopure:
1951 1956 err = None
1952 1957 try:
1953 1958 if cext:
1954 1959 from .cext import ( # pytype: disable=import-error
1955 1960 base85,
1956 1961 bdiff,
1957 1962 mpatch,
1958 1963 osutil,
1959 1964 )
1960 1965
1961 1966 # quiet pyflakes
1962 1967 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1963 1968 if rustext:
1964 1969 from .rustext import ( # pytype: disable=import-error
1965 1970 ancestor,
1966 1971 dirstate,
1967 1972 )
1968 1973
1969 1974 dir(ancestor), dir(dirstate) # quiet pyflakes
1970 1975 except Exception as inst:
1971 1976 err = stringutil.forcebytestr(inst)
1972 1977 problems += 1
1973 1978 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1974 1979
1975 1980 compengines = util.compengines._engines.values()
1976 1981 fm.write(
1977 1982 b'compengines',
1978 1983 _(b'checking registered compression engines (%s)\n'),
1979 1984 fm.formatlist(
1980 1985 sorted(e.name() for e in compengines),
1981 1986 name=b'compengine',
1982 1987 fmt=b'%s',
1983 1988 sep=b', ',
1984 1989 ),
1985 1990 )
1986 1991 fm.write(
1987 1992 b'compenginesavail',
1988 1993 _(b'checking available compression engines (%s)\n'),
1989 1994 fm.formatlist(
1990 1995 sorted(e.name() for e in compengines if e.available()),
1991 1996 name=b'compengine',
1992 1997 fmt=b'%s',
1993 1998 sep=b', ',
1994 1999 ),
1995 2000 )
1996 2001 wirecompengines = compression.compengines.supportedwireengines(
1997 2002 compression.SERVERROLE
1998 2003 )
1999 2004 fm.write(
2000 2005 b'compenginesserver',
2001 2006 _(
2002 2007 b'checking available compression engines '
2003 2008 b'for wire protocol (%s)\n'
2004 2009 ),
2005 2010 fm.formatlist(
2006 2011 [e.name() for e in wirecompengines if e.wireprotosupport()],
2007 2012 name=b'compengine',
2008 2013 fmt=b'%s',
2009 2014 sep=b', ',
2010 2015 ),
2011 2016 )
2012 2017 re2 = b'missing'
2013 2018 if util._re2:
2014 2019 re2 = b'available'
2015 2020 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2016 2021 fm.data(re2=bool(util._re2))
2017 2022
2018 2023 # templates
2019 2024 p = templater.templatedir()
2020 2025 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2021 2026 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2022 2027 if p:
2023 2028 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2024 2029 if m:
2025 2030 # template found, check if it is working
2026 2031 err = None
2027 2032 try:
2028 2033 templater.templater.frommapfile(m)
2029 2034 except Exception as inst:
2030 2035 err = stringutil.forcebytestr(inst)
2031 2036 p = None
2032 2037 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2033 2038 else:
2034 2039 p = None
2035 2040 fm.condwrite(
2036 2041 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2037 2042 )
2038 2043 fm.condwrite(
2039 2044 not m,
2040 2045 b'defaulttemplatenotfound',
2041 2046 _(b" template '%s' not found\n"),
2042 2047 b"default",
2043 2048 )
2044 2049 if not p:
2045 2050 problems += 1
2046 2051 fm.condwrite(
2047 2052 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2048 2053 )
2049 2054
2050 2055 # editor
2051 2056 editor = ui.geteditor()
2052 2057 editor = util.expandpath(editor)
2053 2058 editorbin = procutil.shellsplit(editor)[0]
2054 2059 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2055 2060 cmdpath = procutil.findexe(editorbin)
2056 2061 fm.condwrite(
2057 2062 not cmdpath and editor == b'vi',
2058 2063 b'vinotfound',
2059 2064 _(
2060 2065 b" No commit editor set and can't find %s in PATH\n"
2061 2066 b" (specify a commit editor in your configuration"
2062 2067 b" file)\n"
2063 2068 ),
2064 2069 not cmdpath and editor == b'vi' and editorbin,
2065 2070 )
2066 2071 fm.condwrite(
2067 2072 not cmdpath and editor != b'vi',
2068 2073 b'editornotfound',
2069 2074 _(
2070 2075 b" Can't find editor '%s' in PATH\n"
2071 2076 b" (specify a commit editor in your configuration"
2072 2077 b" file)\n"
2073 2078 ),
2074 2079 not cmdpath and editorbin,
2075 2080 )
2076 2081 if not cmdpath and editor != b'vi':
2077 2082 problems += 1
2078 2083
2079 2084 # check username
2080 2085 username = None
2081 2086 err = None
2082 2087 try:
2083 2088 username = ui.username()
2084 2089 except error.Abort as e:
2085 2090 err = e.message
2086 2091 problems += 1
2087 2092
2088 2093 fm.condwrite(
2089 2094 username, b'username', _(b"checking username (%s)\n"), username
2090 2095 )
2091 2096 fm.condwrite(
2092 2097 err,
2093 2098 b'usernameerror',
2094 2099 _(
2095 2100 b"checking username...\n %s\n"
2096 2101 b" (specify a username in your configuration file)\n"
2097 2102 ),
2098 2103 err,
2099 2104 )
2100 2105
2101 2106 for name, mod in extensions.extensions():
2102 2107 handler = getattr(mod, 'debuginstall', None)
2103 2108 if handler is not None:
2104 2109 problems += handler(ui, fm)
2105 2110
2106 2111 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2107 2112 if not problems:
2108 2113 fm.data(problems=problems)
2109 2114 fm.condwrite(
2110 2115 problems,
2111 2116 b'problems',
2112 2117 _(b"%d problems detected, please check your install!\n"),
2113 2118 problems,
2114 2119 )
2115 2120 fm.end()
2116 2121
2117 2122 return problems
2118 2123
2119 2124
2120 2125 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2121 2126 def debugknown(ui, repopath, *ids, **opts):
2122 2127 """test whether node ids are known to a repo
2123 2128
2124 2129 Every ID must be a full-length hex node id string. Returns a list of 0s
2125 2130 and 1s indicating unknown/known.
2126 2131 """
2127 2132 opts = pycompat.byteskwargs(opts)
2128 2133 repo = hg.peer(ui, opts, repopath)
2129 2134 if not repo.capable(b'known'):
2130 2135 raise error.Abort(b"known() not supported by target repository")
2131 2136 flags = repo.known([bin(s) for s in ids])
2132 2137 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2133 2138
2134 2139
2135 2140 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2136 2141 def debuglabelcomplete(ui, repo, *args):
2137 2142 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2138 2143 debugnamecomplete(ui, repo, *args)
2139 2144
2140 2145
2141 2146 @command(
2142 2147 b'debuglocks',
2143 2148 [
2144 2149 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2145 2150 (
2146 2151 b'W',
2147 2152 b'force-free-wlock',
2148 2153 None,
2149 2154 _(b'free the working state lock (DANGEROUS)'),
2150 2155 ),
2151 2156 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2152 2157 (
2153 2158 b'S',
2154 2159 b'set-wlock',
2155 2160 None,
2156 2161 _(b'set the working state lock until stopped'),
2157 2162 ),
2158 2163 ],
2159 2164 _(b'[OPTION]...'),
2160 2165 )
2161 2166 def debuglocks(ui, repo, **opts):
2162 2167 """show or modify state of locks
2163 2168
2164 2169 By default, this command will show which locks are held. This
2165 2170 includes the user and process holding the lock, the amount of time
2166 2171 the lock has been held, and the machine name where the process is
2167 2172 running if it's not local.
2168 2173
2169 2174 Locks protect the integrity of Mercurial's data, so should be
2170 2175 treated with care. System crashes or other interruptions may cause
2171 2176 locks to not be properly released, though Mercurial will usually
2172 2177 detect and remove such stale locks automatically.
2173 2178
2174 2179 However, detecting stale locks may not always be possible (for
2175 2180 instance, on a shared filesystem). Removing locks may also be
2176 2181 blocked by filesystem permissions.
2177 2182
2178 2183 Setting a lock will prevent other commands from changing the data.
2179 2184 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2180 2185 The set locks are removed when the command exits.
2181 2186
2182 2187 Returns 0 if no locks are held.
2183 2188
2184 2189 """
2185 2190
2186 2191 if opts.get('force_free_lock'):
2187 2192 repo.svfs.tryunlink(b'lock')
2188 2193 if opts.get('force_free_wlock'):
2189 2194 repo.vfs.tryunlink(b'wlock')
2190 2195 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2191 2196 return 0
2192 2197
2193 2198 locks = []
2194 2199 try:
2195 2200 if opts.get('set_wlock'):
2196 2201 try:
2197 2202 locks.append(repo.wlock(False))
2198 2203 except error.LockHeld:
2199 2204 raise error.Abort(_(b'wlock is already held'))
2200 2205 if opts.get('set_lock'):
2201 2206 try:
2202 2207 locks.append(repo.lock(False))
2203 2208 except error.LockHeld:
2204 2209 raise error.Abort(_(b'lock is already held'))
2205 2210 if len(locks):
2206 2211 try:
2207 2212 if ui.interactive():
2208 2213 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2209 2214 ui.promptchoice(prompt)
2210 2215 else:
2211 2216 msg = b"%d locks held, waiting for signal\n"
2212 2217 msg %= len(locks)
2213 2218 ui.status(msg)
2214 2219 while True: # XXX wait for a signal
2215 2220 time.sleep(0.1)
2216 2221 except KeyboardInterrupt:
2217 2222 msg = b"signal-received releasing locks\n"
2218 2223 ui.status(msg)
2219 2224 return 0
2220 2225 finally:
2221 2226 release(*locks)
2222 2227
2223 2228 now = time.time()
2224 2229 held = 0
2225 2230
2226 2231 def report(vfs, name, method):
2227 2232 # this causes stale locks to get reaped for more accurate reporting
2228 2233 try:
2229 2234 l = method(False)
2230 2235 except error.LockHeld:
2231 2236 l = None
2232 2237
2233 2238 if l:
2234 2239 l.release()
2235 2240 else:
2236 2241 try:
2237 2242 st = vfs.lstat(name)
2238 2243 age = now - st[stat.ST_MTIME]
2239 2244 user = util.username(st.st_uid)
2240 2245 locker = vfs.readlock(name)
2241 2246 if b":" in locker:
2242 2247 host, pid = locker.split(b':')
2243 2248 if host == socket.gethostname():
2244 2249 locker = b'user %s, process %s' % (user or b'None', pid)
2245 2250 else:
2246 2251 locker = b'user %s, process %s, host %s' % (
2247 2252 user or b'None',
2248 2253 pid,
2249 2254 host,
2250 2255 )
2251 2256 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2252 2257 return 1
2253 2258 except OSError as e:
2254 2259 if e.errno != errno.ENOENT:
2255 2260 raise
2256 2261
2257 2262 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2258 2263 return 0
2259 2264
2260 2265 held += report(repo.svfs, b"lock", repo.lock)
2261 2266 held += report(repo.vfs, b"wlock", repo.wlock)
2262 2267
2263 2268 return held
2264 2269
2265 2270
2266 2271 @command(
2267 2272 b'debugmanifestfulltextcache',
2268 2273 [
2269 2274 (b'', b'clear', False, _(b'clear the cache')),
2270 2275 (
2271 2276 b'a',
2272 2277 b'add',
2273 2278 [],
2274 2279 _(b'add the given manifest nodes to the cache'),
2275 2280 _(b'NODE'),
2276 2281 ),
2277 2282 ],
2278 2283 b'',
2279 2284 )
2280 2285 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2281 2286 """show, clear or amend the contents of the manifest fulltext cache"""
2282 2287
2283 2288 def getcache():
2284 2289 r = repo.manifestlog.getstorage(b'')
2285 2290 try:
2286 2291 return r._fulltextcache
2287 2292 except AttributeError:
2288 2293 msg = _(
2289 2294 b"Current revlog implementation doesn't appear to have a "
2290 2295 b"manifest fulltext cache\n"
2291 2296 )
2292 2297 raise error.Abort(msg)
2293 2298
2294 2299 if opts.get('clear'):
2295 2300 with repo.wlock():
2296 2301 cache = getcache()
2297 2302 cache.clear(clear_persisted_data=True)
2298 2303 return
2299 2304
2300 2305 if add:
2301 2306 with repo.wlock():
2302 2307 m = repo.manifestlog
2303 2308 store = m.getstorage(b'')
2304 2309 for n in add:
2305 2310 try:
2306 2311 manifest = m[store.lookup(n)]
2307 2312 except error.LookupError as e:
2308 2313 raise error.Abort(
2309 2314 bytes(e), hint=b"Check your manifest node id"
2310 2315 )
2311 2316 manifest.read() # stores revisision in cache too
2312 2317 return
2313 2318
2314 2319 cache = getcache()
2315 2320 if not len(cache):
2316 2321 ui.write(_(b'cache empty\n'))
2317 2322 else:
2318 2323 ui.write(
2319 2324 _(
2320 2325 b'cache contains %d manifest entries, in order of most to '
2321 2326 b'least recent:\n'
2322 2327 )
2323 2328 % (len(cache),)
2324 2329 )
2325 2330 totalsize = 0
2326 2331 for nodeid in cache:
2327 2332 # Use cache.get to not update the LRU order
2328 2333 data = cache.peek(nodeid)
2329 2334 size = len(data)
2330 2335 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2331 2336 ui.write(
2332 2337 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2333 2338 )
2334 2339 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2335 2340 ui.write(
2336 2341 _(b'total cache data size %s, on-disk %s\n')
2337 2342 % (util.bytecount(totalsize), util.bytecount(ondisk))
2338 2343 )
2339 2344
2340 2345
2341 2346 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2342 2347 def debugmergestate(ui, repo, *args, **opts):
2343 2348 """print merge state
2344 2349
2345 2350 Use --verbose to print out information about whether v1 or v2 merge state
2346 2351 was chosen."""
2347 2352
2348 2353 if ui.verbose:
2349 2354 ms = mergestatemod.mergestate(repo)
2350 2355
2351 2356 # sort so that reasonable information is on top
2352 2357 v1records = ms._readrecordsv1()
2353 2358 v2records = ms._readrecordsv2()
2354 2359
2355 2360 if not v1records and not v2records:
2356 2361 pass
2357 2362 elif not v2records:
2358 2363 ui.writenoi18n(b'no version 2 merge state\n')
2359 2364 elif ms._v1v2match(v1records, v2records):
2360 2365 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2361 2366 else:
2362 2367 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2363 2368
2364 2369 opts = pycompat.byteskwargs(opts)
2365 2370 if not opts[b'template']:
2366 2371 opts[b'template'] = (
2367 2372 b'{if(commits, "", "no merge state found\n")}'
2368 2373 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2369 2374 b'{files % "file: {path} (state \\"{state}\\")\n'
2370 2375 b'{if(local_path, "'
2371 2376 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2372 2377 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2373 2378 b' other path: {other_path} (node {other_node})\n'
2374 2379 b'")}'
2375 2380 b'{if(rename_side, "'
2376 2381 b' rename side: {rename_side}\n'
2377 2382 b' renamed path: {renamed_path}\n'
2378 2383 b'")}'
2379 2384 b'{extras % " extra: {key} = {value}\n"}'
2380 2385 b'"}'
2381 2386 b'{extras % "extra: {file} ({key} = {value})\n"}'
2382 2387 )
2383 2388
2384 2389 ms = mergestatemod.mergestate.read(repo)
2385 2390
2386 2391 fm = ui.formatter(b'debugmergestate', opts)
2387 2392 fm.startitem()
2388 2393
2389 2394 fm_commits = fm.nested(b'commits')
2390 2395 if ms.active():
2391 2396 for name, node, label_index in (
2392 2397 (b'local', ms.local, 0),
2393 2398 (b'other', ms.other, 1),
2394 2399 ):
2395 2400 fm_commits.startitem()
2396 2401 fm_commits.data(name=name)
2397 2402 fm_commits.data(node=hex(node))
2398 2403 if ms._labels and len(ms._labels) > label_index:
2399 2404 fm_commits.data(label=ms._labels[label_index])
2400 2405 fm_commits.end()
2401 2406
2402 2407 fm_files = fm.nested(b'files')
2403 2408 if ms.active():
2404 2409 for f in ms:
2405 2410 fm_files.startitem()
2406 2411 fm_files.data(path=f)
2407 2412 state = ms._state[f]
2408 2413 fm_files.data(state=state[0])
2409 2414 if state[0] in (
2410 2415 mergestatemod.MERGE_RECORD_UNRESOLVED,
2411 2416 mergestatemod.MERGE_RECORD_RESOLVED,
2412 2417 ):
2413 2418 fm_files.data(local_key=state[1])
2414 2419 fm_files.data(local_path=state[2])
2415 2420 fm_files.data(ancestor_path=state[3])
2416 2421 fm_files.data(ancestor_node=state[4])
2417 2422 fm_files.data(other_path=state[5])
2418 2423 fm_files.data(other_node=state[6])
2419 2424 fm_files.data(local_flags=state[7])
2420 2425 elif state[0] in (
2421 2426 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2422 2427 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2423 2428 ):
2424 2429 fm_files.data(renamed_path=state[1])
2425 2430 fm_files.data(rename_side=state[2])
2426 2431 fm_extras = fm_files.nested(b'extras')
2427 2432 for k, v in sorted(ms.extras(f).items()):
2428 2433 fm_extras.startitem()
2429 2434 fm_extras.data(key=k)
2430 2435 fm_extras.data(value=v)
2431 2436 fm_extras.end()
2432 2437
2433 2438 fm_files.end()
2434 2439
2435 2440 fm_extras = fm.nested(b'extras')
2436 2441 for f, d in sorted(ms.allextras().items()):
2437 2442 if f in ms:
2438 2443 # If file is in mergestate, we have already processed it's extras
2439 2444 continue
2440 2445 for k, v in d.items():
2441 2446 fm_extras.startitem()
2442 2447 fm_extras.data(file=f)
2443 2448 fm_extras.data(key=k)
2444 2449 fm_extras.data(value=v)
2445 2450 fm_extras.end()
2446 2451
2447 2452 fm.end()
2448 2453
2449 2454
2450 2455 @command(b'debugnamecomplete', [], _(b'NAME...'))
2451 2456 def debugnamecomplete(ui, repo, *args):
2452 2457 '''complete "names" - tags, open branch names, bookmark names'''
2453 2458
2454 2459 names = set()
2455 2460 # since we previously only listed open branches, we will handle that
2456 2461 # specially (after this for loop)
2457 2462 for name, ns in repo.names.items():
2458 2463 if name != b'branches':
2459 2464 names.update(ns.listnames(repo))
2460 2465 names.update(
2461 2466 tag
2462 2467 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2463 2468 if not closed
2464 2469 )
2465 2470 completions = set()
2466 2471 if not args:
2467 2472 args = [b'']
2468 2473 for a in args:
2469 2474 completions.update(n for n in names if n.startswith(a))
2470 2475 ui.write(b'\n'.join(sorted(completions)))
2471 2476 ui.write(b'\n')
2472 2477
2473 2478
2474 2479 @command(
2475 2480 b'debugnodemap',
2476 2481 [
2477 2482 (
2478 2483 b'',
2479 2484 b'dump-new',
2480 2485 False,
2481 2486 _(b'write a (new) persistent binary nodemap on stdout'),
2482 2487 ),
2483 2488 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2484 2489 (
2485 2490 b'',
2486 2491 b'check',
2487 2492 False,
2488 2493 _(b'check that the data on disk data are correct.'),
2489 2494 ),
2490 2495 (
2491 2496 b'',
2492 2497 b'metadata',
2493 2498 False,
2494 2499 _(b'display the on disk meta data for the nodemap'),
2495 2500 ),
2496 2501 ],
2497 2502 )
2498 2503 def debugnodemap(ui, repo, **opts):
2499 2504 """write and inspect on disk nodemap"""
2500 2505 if opts['dump_new']:
2501 2506 unfi = repo.unfiltered()
2502 2507 cl = unfi.changelog
2503 2508 if util.safehasattr(cl.index, "nodemap_data_all"):
2504 2509 data = cl.index.nodemap_data_all()
2505 2510 else:
2506 2511 data = nodemap.persistent_data(cl.index)
2507 2512 ui.write(data)
2508 2513 elif opts['dump_disk']:
2509 2514 unfi = repo.unfiltered()
2510 2515 cl = unfi.changelog
2511 2516 nm_data = nodemap.persisted_data(cl)
2512 2517 if nm_data is not None:
2513 2518 docket, data = nm_data
2514 2519 ui.write(data[:])
2515 2520 elif opts['check']:
2516 2521 unfi = repo.unfiltered()
2517 2522 cl = unfi.changelog
2518 2523 nm_data = nodemap.persisted_data(cl)
2519 2524 if nm_data is not None:
2520 2525 docket, data = nm_data
2521 2526 return nodemap.check_data(ui, cl.index, data)
2522 2527 elif opts['metadata']:
2523 2528 unfi = repo.unfiltered()
2524 2529 cl = unfi.changelog
2525 2530 nm_data = nodemap.persisted_data(cl)
2526 2531 if nm_data is not None:
2527 2532 docket, data = nm_data
2528 2533 ui.write((b"uid: %s\n") % docket.uid)
2529 2534 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2530 2535 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2531 2536 ui.write((b"data-length: %d\n") % docket.data_length)
2532 2537 ui.write((b"data-unused: %d\n") % docket.data_unused)
2533 2538 unused_perc = docket.data_unused * 100.0 / docket.data_length
2534 2539 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2535 2540
2536 2541
2537 2542 @command(
2538 2543 b'debugobsolete',
2539 2544 [
2540 2545 (b'', b'flags', 0, _(b'markers flag')),
2541 2546 (
2542 2547 b'',
2543 2548 b'record-parents',
2544 2549 False,
2545 2550 _(b'record parent information for the precursor'),
2546 2551 ),
2547 2552 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2548 2553 (
2549 2554 b'',
2550 2555 b'exclusive',
2551 2556 False,
2552 2557 _(b'restrict display to markers only relevant to REV'),
2553 2558 ),
2554 2559 (b'', b'index', False, _(b'display index of the marker')),
2555 2560 (b'', b'delete', [], _(b'delete markers specified by indices')),
2556 2561 ]
2557 2562 + cmdutil.commitopts2
2558 2563 + cmdutil.formatteropts,
2559 2564 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2560 2565 )
2561 2566 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2562 2567 """create arbitrary obsolete marker
2563 2568
2564 2569 With no arguments, displays the list of obsolescence markers."""
2565 2570
2566 2571 opts = pycompat.byteskwargs(opts)
2567 2572
2568 2573 def parsenodeid(s):
2569 2574 try:
2570 2575 # We do not use revsingle/revrange functions here to accept
2571 2576 # arbitrary node identifiers, possibly not present in the
2572 2577 # local repository.
2573 2578 n = bin(s)
2574 2579 if len(n) != repo.nodeconstants.nodelen:
2575 2580 raise TypeError()
2576 2581 return n
2577 2582 except TypeError:
2578 2583 raise error.InputError(
2579 2584 b'changeset references must be full hexadecimal '
2580 2585 b'node identifiers'
2581 2586 )
2582 2587
2583 2588 if opts.get(b'delete'):
2584 2589 indices = []
2585 2590 for v in opts.get(b'delete'):
2586 2591 try:
2587 2592 indices.append(int(v))
2588 2593 except ValueError:
2589 2594 raise error.InputError(
2590 2595 _(b'invalid index value: %r') % v,
2591 2596 hint=_(b'use integers for indices'),
2592 2597 )
2593 2598
2594 2599 if repo.currenttransaction():
2595 2600 raise error.Abort(
2596 2601 _(b'cannot delete obsmarkers in the middle of transaction.')
2597 2602 )
2598 2603
2599 2604 with repo.lock():
2600 2605 n = repair.deleteobsmarkers(repo.obsstore, indices)
2601 2606 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2602 2607
2603 2608 return
2604 2609
2605 2610 if precursor is not None:
2606 2611 if opts[b'rev']:
2607 2612 raise error.InputError(
2608 2613 b'cannot select revision when creating marker'
2609 2614 )
2610 2615 metadata = {}
2611 2616 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2612 2617 succs = tuple(parsenodeid(succ) for succ in successors)
2613 2618 l = repo.lock()
2614 2619 try:
2615 2620 tr = repo.transaction(b'debugobsolete')
2616 2621 try:
2617 2622 date = opts.get(b'date')
2618 2623 if date:
2619 2624 date = dateutil.parsedate(date)
2620 2625 else:
2621 2626 date = None
2622 2627 prec = parsenodeid(precursor)
2623 2628 parents = None
2624 2629 if opts[b'record_parents']:
2625 2630 if prec not in repo.unfiltered():
2626 2631 raise error.Abort(
2627 2632 b'cannot used --record-parents on '
2628 2633 b'unknown changesets'
2629 2634 )
2630 2635 parents = repo.unfiltered()[prec].parents()
2631 2636 parents = tuple(p.node() for p in parents)
2632 2637 repo.obsstore.create(
2633 2638 tr,
2634 2639 prec,
2635 2640 succs,
2636 2641 opts[b'flags'],
2637 2642 parents=parents,
2638 2643 date=date,
2639 2644 metadata=metadata,
2640 2645 ui=ui,
2641 2646 )
2642 2647 tr.close()
2643 2648 except ValueError as exc:
2644 2649 raise error.Abort(
2645 2650 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2646 2651 )
2647 2652 finally:
2648 2653 tr.release()
2649 2654 finally:
2650 2655 l.release()
2651 2656 else:
2652 2657 if opts[b'rev']:
2653 2658 revs = logcmdutil.revrange(repo, opts[b'rev'])
2654 2659 nodes = [repo[r].node() for r in revs]
2655 2660 markers = list(
2656 2661 obsutil.getmarkers(
2657 2662 repo, nodes=nodes, exclusive=opts[b'exclusive']
2658 2663 )
2659 2664 )
2660 2665 markers.sort(key=lambda x: x._data)
2661 2666 else:
2662 2667 markers = obsutil.getmarkers(repo)
2663 2668
2664 2669 markerstoiter = markers
2665 2670 isrelevant = lambda m: True
2666 2671 if opts.get(b'rev') and opts.get(b'index'):
2667 2672 markerstoiter = obsutil.getmarkers(repo)
2668 2673 markerset = set(markers)
2669 2674 isrelevant = lambda m: m in markerset
2670 2675
2671 2676 fm = ui.formatter(b'debugobsolete', opts)
2672 2677 for i, m in enumerate(markerstoiter):
2673 2678 if not isrelevant(m):
2674 2679 # marker can be irrelevant when we're iterating over a set
2675 2680 # of markers (markerstoiter) which is bigger than the set
2676 2681 # of markers we want to display (markers)
2677 2682 # this can happen if both --index and --rev options are
2678 2683 # provided and thus we need to iterate over all of the markers
2679 2684 # to get the correct indices, but only display the ones that
2680 2685 # are relevant to --rev value
2681 2686 continue
2682 2687 fm.startitem()
2683 2688 ind = i if opts.get(b'index') else None
2684 2689 cmdutil.showmarker(fm, m, index=ind)
2685 2690 fm.end()
2686 2691
2687 2692
2688 2693 @command(
2689 2694 b'debugp1copies',
2690 2695 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2691 2696 _(b'[-r REV]'),
2692 2697 )
2693 2698 def debugp1copies(ui, repo, **opts):
2694 2699 """dump copy information compared to p1"""
2695 2700
2696 2701 opts = pycompat.byteskwargs(opts)
2697 2702 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2698 2703 for dst, src in ctx.p1copies().items():
2699 2704 ui.write(b'%s -> %s\n' % (src, dst))
2700 2705
2701 2706
2702 2707 @command(
2703 2708 b'debugp2copies',
2704 2709 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2705 2710 _(b'[-r REV]'),
2706 2711 )
2707 2712 def debugp1copies(ui, repo, **opts):
2708 2713 """dump copy information compared to p2"""
2709 2714
2710 2715 opts = pycompat.byteskwargs(opts)
2711 2716 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2712 2717 for dst, src in ctx.p2copies().items():
2713 2718 ui.write(b'%s -> %s\n' % (src, dst))
2714 2719
2715 2720
2716 2721 @command(
2717 2722 b'debugpathcomplete',
2718 2723 [
2719 2724 (b'f', b'full', None, _(b'complete an entire path')),
2720 2725 (b'n', b'normal', None, _(b'show only normal files')),
2721 2726 (b'a', b'added', None, _(b'show only added files')),
2722 2727 (b'r', b'removed', None, _(b'show only removed files')),
2723 2728 ],
2724 2729 _(b'FILESPEC...'),
2725 2730 )
2726 2731 def debugpathcomplete(ui, repo, *specs, **opts):
2727 2732 """complete part or all of a tracked path
2728 2733
2729 2734 This command supports shells that offer path name completion. It
2730 2735 currently completes only files already known to the dirstate.
2731 2736
2732 2737 Completion extends only to the next path segment unless
2733 2738 --full is specified, in which case entire paths are used."""
2734 2739
2735 2740 def complete(path, acceptable):
2736 2741 dirstate = repo.dirstate
2737 2742 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2738 2743 rootdir = repo.root + pycompat.ossep
2739 2744 if spec != repo.root and not spec.startswith(rootdir):
2740 2745 return [], []
2741 2746 if os.path.isdir(spec):
2742 2747 spec += b'/'
2743 2748 spec = spec[len(rootdir) :]
2744 2749 fixpaths = pycompat.ossep != b'/'
2745 2750 if fixpaths:
2746 2751 spec = spec.replace(pycompat.ossep, b'/')
2747 2752 speclen = len(spec)
2748 2753 fullpaths = opts['full']
2749 2754 files, dirs = set(), set()
2750 2755 adddir, addfile = dirs.add, files.add
2751 2756 for f, st in dirstate.items():
2752 2757 if f.startswith(spec) and st.state in acceptable:
2753 2758 if fixpaths:
2754 2759 f = f.replace(b'/', pycompat.ossep)
2755 2760 if fullpaths:
2756 2761 addfile(f)
2757 2762 continue
2758 2763 s = f.find(pycompat.ossep, speclen)
2759 2764 if s >= 0:
2760 2765 adddir(f[:s])
2761 2766 else:
2762 2767 addfile(f)
2763 2768 return files, dirs
2764 2769
2765 2770 acceptable = b''
2766 2771 if opts['normal']:
2767 2772 acceptable += b'nm'
2768 2773 if opts['added']:
2769 2774 acceptable += b'a'
2770 2775 if opts['removed']:
2771 2776 acceptable += b'r'
2772 2777 cwd = repo.getcwd()
2773 2778 if not specs:
2774 2779 specs = [b'.']
2775 2780
2776 2781 files, dirs = set(), set()
2777 2782 for spec in specs:
2778 2783 f, d = complete(spec, acceptable or b'nmar')
2779 2784 files.update(f)
2780 2785 dirs.update(d)
2781 2786 files.update(dirs)
2782 2787 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2783 2788 ui.write(b'\n')
2784 2789
2785 2790
2786 2791 @command(
2787 2792 b'debugpathcopies',
2788 2793 cmdutil.walkopts,
2789 2794 b'hg debugpathcopies REV1 REV2 [FILE]',
2790 2795 inferrepo=True,
2791 2796 )
2792 2797 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2793 2798 """show copies between two revisions"""
2794 2799 ctx1 = scmutil.revsingle(repo, rev1)
2795 2800 ctx2 = scmutil.revsingle(repo, rev2)
2796 2801 m = scmutil.match(ctx1, pats, opts)
2797 2802 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2798 2803 ui.write(b'%s -> %s\n' % (src, dst))
2799 2804
2800 2805
2801 2806 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2802 2807 def debugpeer(ui, path):
2803 2808 """establish a connection to a peer repository"""
2804 2809 # Always enable peer request logging. Requires --debug to display
2805 2810 # though.
2806 2811 overrides = {
2807 2812 (b'devel', b'debug.peer-request'): True,
2808 2813 }
2809 2814
2810 2815 with ui.configoverride(overrides):
2811 2816 peer = hg.peer(ui, {}, path)
2812 2817
2813 2818 try:
2814 2819 local = peer.local() is not None
2815 2820 canpush = peer.canpush()
2816 2821
2817 2822 ui.write(_(b'url: %s\n') % peer.url())
2818 2823 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2819 2824 ui.write(
2820 2825 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2821 2826 )
2822 2827 finally:
2823 2828 peer.close()
2824 2829
2825 2830
2826 2831 @command(
2827 2832 b'debugpickmergetool',
2828 2833 [
2829 2834 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2830 2835 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2831 2836 ]
2832 2837 + cmdutil.walkopts
2833 2838 + cmdutil.mergetoolopts,
2834 2839 _(b'[PATTERN]...'),
2835 2840 inferrepo=True,
2836 2841 )
2837 2842 def debugpickmergetool(ui, repo, *pats, **opts):
2838 2843 """examine which merge tool is chosen for specified file
2839 2844
2840 2845 As described in :hg:`help merge-tools`, Mercurial examines
2841 2846 configurations below in this order to decide which merge tool is
2842 2847 chosen for specified file.
2843 2848
2844 2849 1. ``--tool`` option
2845 2850 2. ``HGMERGE`` environment variable
2846 2851 3. configurations in ``merge-patterns`` section
2847 2852 4. configuration of ``ui.merge``
2848 2853 5. configurations in ``merge-tools`` section
2849 2854 6. ``hgmerge`` tool (for historical reason only)
2850 2855 7. default tool for fallback (``:merge`` or ``:prompt``)
2851 2856
2852 2857 This command writes out examination result in the style below::
2853 2858
2854 2859 FILE = MERGETOOL
2855 2860
2856 2861 By default, all files known in the first parent context of the
2857 2862 working directory are examined. Use file patterns and/or -I/-X
2858 2863 options to limit target files. -r/--rev is also useful to examine
2859 2864 files in another context without actual updating to it.
2860 2865
2861 2866 With --debug, this command shows warning messages while matching
2862 2867 against ``merge-patterns`` and so on, too. It is recommended to
2863 2868 use this option with explicit file patterns and/or -I/-X options,
2864 2869 because this option increases amount of output per file according
2865 2870 to configurations in hgrc.
2866 2871
2867 2872 With -v/--verbose, this command shows configurations below at
2868 2873 first (only if specified).
2869 2874
2870 2875 - ``--tool`` option
2871 2876 - ``HGMERGE`` environment variable
2872 2877 - configuration of ``ui.merge``
2873 2878
2874 2879 If merge tool is chosen before matching against
2875 2880 ``merge-patterns``, this command can't show any helpful
2876 2881 information, even with --debug. In such case, information above is
2877 2882 useful to know why a merge tool is chosen.
2878 2883 """
2879 2884 opts = pycompat.byteskwargs(opts)
2880 2885 overrides = {}
2881 2886 if opts[b'tool']:
2882 2887 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2883 2888 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2884 2889
2885 2890 with ui.configoverride(overrides, b'debugmergepatterns'):
2886 2891 hgmerge = encoding.environ.get(b"HGMERGE")
2887 2892 if hgmerge is not None:
2888 2893 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2889 2894 uimerge = ui.config(b"ui", b"merge")
2890 2895 if uimerge:
2891 2896 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2892 2897
2893 2898 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2894 2899 m = scmutil.match(ctx, pats, opts)
2895 2900 changedelete = opts[b'changedelete']
2896 2901 for path in ctx.walk(m):
2897 2902 fctx = ctx[path]
2898 2903 with ui.silent(
2899 2904 error=True
2900 2905 ) if not ui.debugflag else util.nullcontextmanager():
2901 2906 tool, toolpath = filemerge._picktool(
2902 2907 repo,
2903 2908 ui,
2904 2909 path,
2905 2910 fctx.isbinary(),
2906 2911 b'l' in fctx.flags(),
2907 2912 changedelete,
2908 2913 )
2909 2914 ui.write(b'%s = %s\n' % (path, tool))
2910 2915
2911 2916
2912 2917 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2913 2918 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2914 2919 """access the pushkey key/value protocol
2915 2920
2916 2921 With two args, list the keys in the given namespace.
2917 2922
2918 2923 With five args, set a key to new if it currently is set to old.
2919 2924 Reports success or failure.
2920 2925 """
2921 2926
2922 2927 target = hg.peer(ui, {}, repopath)
2923 2928 try:
2924 2929 if keyinfo:
2925 2930 key, old, new = keyinfo
2926 2931 with target.commandexecutor() as e:
2927 2932 r = e.callcommand(
2928 2933 b'pushkey',
2929 2934 {
2930 2935 b'namespace': namespace,
2931 2936 b'key': key,
2932 2937 b'old': old,
2933 2938 b'new': new,
2934 2939 },
2935 2940 ).result()
2936 2941
2937 2942 ui.status(pycompat.bytestr(r) + b'\n')
2938 2943 return not r
2939 2944 else:
2940 2945 for k, v in sorted(target.listkeys(namespace).items()):
2941 2946 ui.write(
2942 2947 b"%s\t%s\n"
2943 2948 % (stringutil.escapestr(k), stringutil.escapestr(v))
2944 2949 )
2945 2950 finally:
2946 2951 target.close()
2947 2952
2948 2953
2949 2954 @command(b'debugpvec', [], _(b'A B'))
2950 2955 def debugpvec(ui, repo, a, b=None):
2951 2956 ca = scmutil.revsingle(repo, a)
2952 2957 cb = scmutil.revsingle(repo, b)
2953 2958 pa = pvec.ctxpvec(ca)
2954 2959 pb = pvec.ctxpvec(cb)
2955 2960 if pa == pb:
2956 2961 rel = b"="
2957 2962 elif pa > pb:
2958 2963 rel = b">"
2959 2964 elif pa < pb:
2960 2965 rel = b"<"
2961 2966 elif pa | pb:
2962 2967 rel = b"|"
2963 2968 ui.write(_(b"a: %s\n") % pa)
2964 2969 ui.write(_(b"b: %s\n") % pb)
2965 2970 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2966 2971 ui.write(
2967 2972 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2968 2973 % (
2969 2974 abs(pa._depth - pb._depth),
2970 2975 pvec._hamming(pa._vec, pb._vec),
2971 2976 pa.distance(pb),
2972 2977 rel,
2973 2978 )
2974 2979 )
2975 2980
2976 2981
2977 2982 @command(
2978 2983 b'debugrebuilddirstate|debugrebuildstate',
2979 2984 [
2980 2985 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2981 2986 (
2982 2987 b'',
2983 2988 b'minimal',
2984 2989 None,
2985 2990 _(
2986 2991 b'only rebuild files that are inconsistent with '
2987 2992 b'the working copy parent'
2988 2993 ),
2989 2994 ),
2990 2995 ],
2991 2996 _(b'[-r REV]'),
2992 2997 )
2993 2998 def debugrebuilddirstate(ui, repo, rev, **opts):
2994 2999 """rebuild the dirstate as it would look like for the given revision
2995 3000
2996 3001 If no revision is specified the first current parent will be used.
2997 3002
2998 3003 The dirstate will be set to the files of the given revision.
2999 3004 The actual working directory content or existing dirstate
3000 3005 information such as adds or removes is not considered.
3001 3006
3002 3007 ``minimal`` will only rebuild the dirstate status for files that claim to be
3003 3008 tracked but are not in the parent manifest, or that exist in the parent
3004 3009 manifest but are not in the dirstate. It will not change adds, removes, or
3005 3010 modified files that are in the working copy parent.
3006 3011
3007 3012 One use of this command is to make the next :hg:`status` invocation
3008 3013 check the actual file content.
3009 3014 """
3010 3015 ctx = scmutil.revsingle(repo, rev)
3011 3016 with repo.wlock():
3012 3017 dirstate = repo.dirstate
3013 3018 changedfiles = None
3014 3019 # See command doc for what minimal does.
3015 3020 if opts.get('minimal'):
3016 3021 manifestfiles = set(ctx.manifest().keys())
3017 3022 dirstatefiles = set(dirstate)
3018 3023 manifestonly = manifestfiles - dirstatefiles
3019 3024 dsonly = dirstatefiles - manifestfiles
3020 3025 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3021 3026 changedfiles = manifestonly | dsnotadded
3022 3027
3023 3028 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3024 3029
3025 3030
3026 3031 @command(
3027 3032 b'debugrebuildfncache',
3028 3033 [
3029 3034 (
3030 3035 b'',
3031 3036 b'only-data',
3032 3037 False,
3033 3038 _(b'only look for wrong .d files (much faster)'),
3034 3039 )
3035 3040 ],
3036 3041 b'',
3037 3042 )
3038 3043 def debugrebuildfncache(ui, repo, **opts):
3039 3044 """rebuild the fncache file"""
3040 3045 opts = pycompat.byteskwargs(opts)
3041 3046 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3042 3047
3043 3048
3044 3049 @command(
3045 3050 b'debugrename',
3046 3051 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3047 3052 _(b'[-r REV] [FILE]...'),
3048 3053 )
3049 3054 def debugrename(ui, repo, *pats, **opts):
3050 3055 """dump rename information"""
3051 3056
3052 3057 opts = pycompat.byteskwargs(opts)
3053 3058 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3054 3059 m = scmutil.match(ctx, pats, opts)
3055 3060 for abs in ctx.walk(m):
3056 3061 fctx = ctx[abs]
3057 3062 o = fctx.filelog().renamed(fctx.filenode())
3058 3063 rel = repo.pathto(abs)
3059 3064 if o:
3060 3065 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3061 3066 else:
3062 3067 ui.write(_(b"%s not renamed\n") % rel)
3063 3068
3064 3069
3065 3070 @command(b'debugrequires|debugrequirements', [], b'')
3066 3071 def debugrequirements(ui, repo):
3067 3072 """print the current repo requirements"""
3068 3073 for r in sorted(repo.requirements):
3069 3074 ui.write(b"%s\n" % r)
3070 3075
3071 3076
3072 3077 @command(
3073 3078 b'debugrevlog',
3074 3079 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3075 3080 _(b'-c|-m|FILE'),
3076 3081 optionalrepo=True,
3077 3082 )
3078 3083 def debugrevlog(ui, repo, file_=None, **opts):
3079 3084 """show data and statistics about a revlog"""
3080 3085 opts = pycompat.byteskwargs(opts)
3081 3086 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3082 3087
3083 3088 if opts.get(b"dump"):
3084 3089 numrevs = len(r)
3085 3090 ui.write(
3086 3091 (
3087 3092 b"# rev p1rev p2rev start end deltastart base p1 p2"
3088 3093 b" rawsize totalsize compression heads chainlen\n"
3089 3094 )
3090 3095 )
3091 3096 ts = 0
3092 3097 heads = set()
3093 3098
3094 3099 for rev in pycompat.xrange(numrevs):
3095 3100 dbase = r.deltaparent(rev)
3096 3101 if dbase == -1:
3097 3102 dbase = rev
3098 3103 cbase = r.chainbase(rev)
3099 3104 clen = r.chainlen(rev)
3100 3105 p1, p2 = r.parentrevs(rev)
3101 3106 rs = r.rawsize(rev)
3102 3107 ts = ts + rs
3103 3108 heads -= set(r.parentrevs(rev))
3104 3109 heads.add(rev)
3105 3110 try:
3106 3111 compression = ts / r.end(rev)
3107 3112 except ZeroDivisionError:
3108 3113 compression = 0
3109 3114 ui.write(
3110 3115 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3111 3116 b"%11d %5d %8d\n"
3112 3117 % (
3113 3118 rev,
3114 3119 p1,
3115 3120 p2,
3116 3121 r.start(rev),
3117 3122 r.end(rev),
3118 3123 r.start(dbase),
3119 3124 r.start(cbase),
3120 3125 r.start(p1),
3121 3126 r.start(p2),
3122 3127 rs,
3123 3128 ts,
3124 3129 compression,
3125 3130 len(heads),
3126 3131 clen,
3127 3132 )
3128 3133 )
3129 3134 return 0
3130 3135
3131 3136 format = r._format_version
3132 3137 v = r._format_flags
3133 3138 flags = []
3134 3139 gdelta = False
3135 3140 if v & revlog.FLAG_INLINE_DATA:
3136 3141 flags.append(b'inline')
3137 3142 if v & revlog.FLAG_GENERALDELTA:
3138 3143 gdelta = True
3139 3144 flags.append(b'generaldelta')
3140 3145 if not flags:
3141 3146 flags = [b'(none)']
3142 3147
3143 3148 ### tracks merge vs single parent
3144 3149 nummerges = 0
3145 3150
3146 3151 ### tracks ways the "delta" are build
3147 3152 # nodelta
3148 3153 numempty = 0
3149 3154 numemptytext = 0
3150 3155 numemptydelta = 0
3151 3156 # full file content
3152 3157 numfull = 0
3153 3158 # intermediate snapshot against a prior snapshot
3154 3159 numsemi = 0
3155 3160 # snapshot count per depth
3156 3161 numsnapdepth = collections.defaultdict(lambda: 0)
3157 3162 # delta against previous revision
3158 3163 numprev = 0
3159 3164 # delta against first or second parent (not prev)
3160 3165 nump1 = 0
3161 3166 nump2 = 0
3162 3167 # delta against neither prev nor parents
3163 3168 numother = 0
3164 3169 # delta against prev that are also first or second parent
3165 3170 # (details of `numprev`)
3166 3171 nump1prev = 0
3167 3172 nump2prev = 0
3168 3173
3169 3174 # data about delta chain of each revs
3170 3175 chainlengths = []
3171 3176 chainbases = []
3172 3177 chainspans = []
3173 3178
3174 3179 # data about each revision
3175 3180 datasize = [None, 0, 0]
3176 3181 fullsize = [None, 0, 0]
3177 3182 semisize = [None, 0, 0]
3178 3183 # snapshot count per depth
3179 3184 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3180 3185 deltasize = [None, 0, 0]
3181 3186 chunktypecounts = {}
3182 3187 chunktypesizes = {}
3183 3188
3184 3189 def addsize(size, l):
3185 3190 if l[0] is None or size < l[0]:
3186 3191 l[0] = size
3187 3192 if size > l[1]:
3188 3193 l[1] = size
3189 3194 l[2] += size
3190 3195
3191 3196 numrevs = len(r)
3192 3197 for rev in pycompat.xrange(numrevs):
3193 3198 p1, p2 = r.parentrevs(rev)
3194 3199 delta = r.deltaparent(rev)
3195 3200 if format > 0:
3196 3201 addsize(r.rawsize(rev), datasize)
3197 3202 if p2 != nullrev:
3198 3203 nummerges += 1
3199 3204 size = r.length(rev)
3200 3205 if delta == nullrev:
3201 3206 chainlengths.append(0)
3202 3207 chainbases.append(r.start(rev))
3203 3208 chainspans.append(size)
3204 3209 if size == 0:
3205 3210 numempty += 1
3206 3211 numemptytext += 1
3207 3212 else:
3208 3213 numfull += 1
3209 3214 numsnapdepth[0] += 1
3210 3215 addsize(size, fullsize)
3211 3216 addsize(size, snapsizedepth[0])
3212 3217 else:
3213 3218 chainlengths.append(chainlengths[delta] + 1)
3214 3219 baseaddr = chainbases[delta]
3215 3220 revaddr = r.start(rev)
3216 3221 chainbases.append(baseaddr)
3217 3222 chainspans.append((revaddr - baseaddr) + size)
3218 3223 if size == 0:
3219 3224 numempty += 1
3220 3225 numemptydelta += 1
3221 3226 elif r.issnapshot(rev):
3222 3227 addsize(size, semisize)
3223 3228 numsemi += 1
3224 3229 depth = r.snapshotdepth(rev)
3225 3230 numsnapdepth[depth] += 1
3226 3231 addsize(size, snapsizedepth[depth])
3227 3232 else:
3228 3233 addsize(size, deltasize)
3229 3234 if delta == rev - 1:
3230 3235 numprev += 1
3231 3236 if delta == p1:
3232 3237 nump1prev += 1
3233 3238 elif delta == p2:
3234 3239 nump2prev += 1
3235 3240 elif delta == p1:
3236 3241 nump1 += 1
3237 3242 elif delta == p2:
3238 3243 nump2 += 1
3239 3244 elif delta != nullrev:
3240 3245 numother += 1
3241 3246
3242 3247 # Obtain data on the raw chunks in the revlog.
3243 3248 if util.safehasattr(r, b'_getsegmentforrevs'):
3244 3249 segment = r._getsegmentforrevs(rev, rev)[1]
3245 3250 else:
3246 3251 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3247 3252 if segment:
3248 3253 chunktype = bytes(segment[0:1])
3249 3254 else:
3250 3255 chunktype = b'empty'
3251 3256
3252 3257 if chunktype not in chunktypecounts:
3253 3258 chunktypecounts[chunktype] = 0
3254 3259 chunktypesizes[chunktype] = 0
3255 3260
3256 3261 chunktypecounts[chunktype] += 1
3257 3262 chunktypesizes[chunktype] += size
3258 3263
3259 3264 # Adjust size min value for empty cases
3260 3265 for size in (datasize, fullsize, semisize, deltasize):
3261 3266 if size[0] is None:
3262 3267 size[0] = 0
3263 3268
3264 3269 numdeltas = numrevs - numfull - numempty - numsemi
3265 3270 numoprev = numprev - nump1prev - nump2prev
3266 3271 totalrawsize = datasize[2]
3267 3272 datasize[2] /= numrevs
3268 3273 fulltotal = fullsize[2]
3269 3274 if numfull == 0:
3270 3275 fullsize[2] = 0
3271 3276 else:
3272 3277 fullsize[2] /= numfull
3273 3278 semitotal = semisize[2]
3274 3279 snaptotal = {}
3275 3280 if numsemi > 0:
3276 3281 semisize[2] /= numsemi
3277 3282 for depth in snapsizedepth:
3278 3283 snaptotal[depth] = snapsizedepth[depth][2]
3279 3284 snapsizedepth[depth][2] /= numsnapdepth[depth]
3280 3285
3281 3286 deltatotal = deltasize[2]
3282 3287 if numdeltas > 0:
3283 3288 deltasize[2] /= numdeltas
3284 3289 totalsize = fulltotal + semitotal + deltatotal
3285 3290 avgchainlen = sum(chainlengths) / numrevs
3286 3291 maxchainlen = max(chainlengths)
3287 3292 maxchainspan = max(chainspans)
3288 3293 compratio = 1
3289 3294 if totalsize:
3290 3295 compratio = totalrawsize / totalsize
3291 3296
3292 3297 basedfmtstr = b'%%%dd\n'
3293 3298 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3294 3299
3295 3300 def dfmtstr(max):
3296 3301 return basedfmtstr % len(str(max))
3297 3302
3298 3303 def pcfmtstr(max, padding=0):
3299 3304 return basepcfmtstr % (len(str(max)), b' ' * padding)
3300 3305
3301 3306 def pcfmt(value, total):
3302 3307 if total:
3303 3308 return (value, 100 * float(value) / total)
3304 3309 else:
3305 3310 return value, 100.0
3306 3311
3307 3312 ui.writenoi18n(b'format : %d\n' % format)
3308 3313 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3309 3314
3310 3315 ui.write(b'\n')
3311 3316 fmt = pcfmtstr(totalsize)
3312 3317 fmt2 = dfmtstr(totalsize)
3313 3318 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3314 3319 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3315 3320 ui.writenoi18n(
3316 3321 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3317 3322 )
3318 3323 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3319 3324 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3320 3325 ui.writenoi18n(
3321 3326 b' text : '
3322 3327 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3323 3328 )
3324 3329 ui.writenoi18n(
3325 3330 b' delta : '
3326 3331 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3327 3332 )
3328 3333 ui.writenoi18n(
3329 3334 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3330 3335 )
3331 3336 for depth in sorted(numsnapdepth):
3332 3337 ui.write(
3333 3338 (b' lvl-%-3d : ' % depth)
3334 3339 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3335 3340 )
3336 3341 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3337 3342 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3338 3343 ui.writenoi18n(
3339 3344 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3340 3345 )
3341 3346 for depth in sorted(numsnapdepth):
3342 3347 ui.write(
3343 3348 (b' lvl-%-3d : ' % depth)
3344 3349 + fmt % pcfmt(snaptotal[depth], totalsize)
3345 3350 )
3346 3351 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3347 3352
3348 3353 def fmtchunktype(chunktype):
3349 3354 if chunktype == b'empty':
3350 3355 return b' %s : ' % chunktype
3351 3356 elif chunktype in pycompat.bytestr(string.ascii_letters):
3352 3357 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3353 3358 else:
3354 3359 return b' 0x%s : ' % hex(chunktype)
3355 3360
3356 3361 ui.write(b'\n')
3357 3362 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3358 3363 for chunktype in sorted(chunktypecounts):
3359 3364 ui.write(fmtchunktype(chunktype))
3360 3365 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3361 3366 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3362 3367 for chunktype in sorted(chunktypecounts):
3363 3368 ui.write(fmtchunktype(chunktype))
3364 3369 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3365 3370
3366 3371 ui.write(b'\n')
3367 3372 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3368 3373 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3369 3374 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3370 3375 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3371 3376 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3372 3377
3373 3378 if format > 0:
3374 3379 ui.write(b'\n')
3375 3380 ui.writenoi18n(
3376 3381 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3377 3382 % tuple(datasize)
3378 3383 )
3379 3384 ui.writenoi18n(
3380 3385 b'full revision size (min/max/avg) : %d / %d / %d\n'
3381 3386 % tuple(fullsize)
3382 3387 )
3383 3388 ui.writenoi18n(
3384 3389 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3385 3390 % tuple(semisize)
3386 3391 )
3387 3392 for depth in sorted(snapsizedepth):
3388 3393 if depth == 0:
3389 3394 continue
3390 3395 ui.writenoi18n(
3391 3396 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3392 3397 % ((depth,) + tuple(snapsizedepth[depth]))
3393 3398 )
3394 3399 ui.writenoi18n(
3395 3400 b'delta size (min/max/avg) : %d / %d / %d\n'
3396 3401 % tuple(deltasize)
3397 3402 )
3398 3403
3399 3404 if numdeltas > 0:
3400 3405 ui.write(b'\n')
3401 3406 fmt = pcfmtstr(numdeltas)
3402 3407 fmt2 = pcfmtstr(numdeltas, 4)
3403 3408 ui.writenoi18n(
3404 3409 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3405 3410 )
3406 3411 if numprev > 0:
3407 3412 ui.writenoi18n(
3408 3413 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3409 3414 )
3410 3415 ui.writenoi18n(
3411 3416 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3412 3417 )
3413 3418 ui.writenoi18n(
3414 3419 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3415 3420 )
3416 3421 if gdelta:
3417 3422 ui.writenoi18n(
3418 3423 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3419 3424 )
3420 3425 ui.writenoi18n(
3421 3426 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3422 3427 )
3423 3428 ui.writenoi18n(
3424 3429 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3425 3430 )
3426 3431
3427 3432
3428 3433 @command(
3429 3434 b'debugrevlogindex',
3430 3435 cmdutil.debugrevlogopts
3431 3436 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3432 3437 _(b'[-f FORMAT] -c|-m|FILE'),
3433 3438 optionalrepo=True,
3434 3439 )
3435 3440 def debugrevlogindex(ui, repo, file_=None, **opts):
3436 3441 """dump the contents of a revlog index"""
3437 3442 opts = pycompat.byteskwargs(opts)
3438 3443 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3439 3444 format = opts.get(b'format', 0)
3440 3445 if format not in (0, 1):
3441 3446 raise error.Abort(_(b"unknown format %d") % format)
3442 3447
3443 3448 if ui.debugflag:
3444 3449 shortfn = hex
3445 3450 else:
3446 3451 shortfn = short
3447 3452
3448 3453 # There might not be anything in r, so have a sane default
3449 3454 idlen = 12
3450 3455 for i in r:
3451 3456 idlen = len(shortfn(r.node(i)))
3452 3457 break
3453 3458
3454 3459 if format == 0:
3455 3460 if ui.verbose:
3456 3461 ui.writenoi18n(
3457 3462 b" rev offset length linkrev %s %s p2\n"
3458 3463 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3459 3464 )
3460 3465 else:
3461 3466 ui.writenoi18n(
3462 3467 b" rev linkrev %s %s p2\n"
3463 3468 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3464 3469 )
3465 3470 elif format == 1:
3466 3471 if ui.verbose:
3467 3472 ui.writenoi18n(
3468 3473 (
3469 3474 b" rev flag offset length size link p1"
3470 3475 b" p2 %s\n"
3471 3476 )
3472 3477 % b"nodeid".rjust(idlen)
3473 3478 )
3474 3479 else:
3475 3480 ui.writenoi18n(
3476 3481 b" rev flag size link p1 p2 %s\n"
3477 3482 % b"nodeid".rjust(idlen)
3478 3483 )
3479 3484
3480 3485 for i in r:
3481 3486 node = r.node(i)
3482 3487 if format == 0:
3483 3488 try:
3484 3489 pp = r.parents(node)
3485 3490 except Exception:
3486 3491 pp = [repo.nullid, repo.nullid]
3487 3492 if ui.verbose:
3488 3493 ui.write(
3489 3494 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3490 3495 % (
3491 3496 i,
3492 3497 r.start(i),
3493 3498 r.length(i),
3494 3499 r.linkrev(i),
3495 3500 shortfn(node),
3496 3501 shortfn(pp[0]),
3497 3502 shortfn(pp[1]),
3498 3503 )
3499 3504 )
3500 3505 else:
3501 3506 ui.write(
3502 3507 b"% 6d % 7d %s %s %s\n"
3503 3508 % (
3504 3509 i,
3505 3510 r.linkrev(i),
3506 3511 shortfn(node),
3507 3512 shortfn(pp[0]),
3508 3513 shortfn(pp[1]),
3509 3514 )
3510 3515 )
3511 3516 elif format == 1:
3512 3517 pr = r.parentrevs(i)
3513 3518 if ui.verbose:
3514 3519 ui.write(
3515 3520 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3516 3521 % (
3517 3522 i,
3518 3523 r.flags(i),
3519 3524 r.start(i),
3520 3525 r.length(i),
3521 3526 r.rawsize(i),
3522 3527 r.linkrev(i),
3523 3528 pr[0],
3524 3529 pr[1],
3525 3530 shortfn(node),
3526 3531 )
3527 3532 )
3528 3533 else:
3529 3534 ui.write(
3530 3535 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3531 3536 % (
3532 3537 i,
3533 3538 r.flags(i),
3534 3539 r.rawsize(i),
3535 3540 r.linkrev(i),
3536 3541 pr[0],
3537 3542 pr[1],
3538 3543 shortfn(node),
3539 3544 )
3540 3545 )
3541 3546
3542 3547
3543 3548 @command(
3544 3549 b'debugrevspec',
3545 3550 [
3546 3551 (
3547 3552 b'',
3548 3553 b'optimize',
3549 3554 None,
3550 3555 _(b'print parsed tree after optimizing (DEPRECATED)'),
3551 3556 ),
3552 3557 (
3553 3558 b'',
3554 3559 b'show-revs',
3555 3560 True,
3556 3561 _(b'print list of result revisions (default)'),
3557 3562 ),
3558 3563 (
3559 3564 b's',
3560 3565 b'show-set',
3561 3566 None,
3562 3567 _(b'print internal representation of result set'),
3563 3568 ),
3564 3569 (
3565 3570 b'p',
3566 3571 b'show-stage',
3567 3572 [],
3568 3573 _(b'print parsed tree at the given stage'),
3569 3574 _(b'NAME'),
3570 3575 ),
3571 3576 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3572 3577 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3573 3578 ],
3574 3579 b'REVSPEC',
3575 3580 )
3576 3581 def debugrevspec(ui, repo, expr, **opts):
3577 3582 """parse and apply a revision specification
3578 3583
3579 3584 Use -p/--show-stage option to print the parsed tree at the given stages.
3580 3585 Use -p all to print tree at every stage.
3581 3586
3582 3587 Use --no-show-revs option with -s or -p to print only the set
3583 3588 representation or the parsed tree respectively.
3584 3589
3585 3590 Use --verify-optimized to compare the optimized result with the unoptimized
3586 3591 one. Returns 1 if the optimized result differs.
3587 3592 """
3588 3593 opts = pycompat.byteskwargs(opts)
3589 3594 aliases = ui.configitems(b'revsetalias')
3590 3595 stages = [
3591 3596 (b'parsed', lambda tree: tree),
3592 3597 (
3593 3598 b'expanded',
3594 3599 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3595 3600 ),
3596 3601 (b'concatenated', revsetlang.foldconcat),
3597 3602 (b'analyzed', revsetlang.analyze),
3598 3603 (b'optimized', revsetlang.optimize),
3599 3604 ]
3600 3605 if opts[b'no_optimized']:
3601 3606 stages = stages[:-1]
3602 3607 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3603 3608 raise error.Abort(
3604 3609 _(b'cannot use --verify-optimized with --no-optimized')
3605 3610 )
3606 3611 stagenames = {n for n, f in stages}
3607 3612
3608 3613 showalways = set()
3609 3614 showchanged = set()
3610 3615 if ui.verbose and not opts[b'show_stage']:
3611 3616 # show parsed tree by --verbose (deprecated)
3612 3617 showalways.add(b'parsed')
3613 3618 showchanged.update([b'expanded', b'concatenated'])
3614 3619 if opts[b'optimize']:
3615 3620 showalways.add(b'optimized')
3616 3621 if opts[b'show_stage'] and opts[b'optimize']:
3617 3622 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3618 3623 if opts[b'show_stage'] == [b'all']:
3619 3624 showalways.update(stagenames)
3620 3625 else:
3621 3626 for n in opts[b'show_stage']:
3622 3627 if n not in stagenames:
3623 3628 raise error.Abort(_(b'invalid stage name: %s') % n)
3624 3629 showalways.update(opts[b'show_stage'])
3625 3630
3626 3631 treebystage = {}
3627 3632 printedtree = None
3628 3633 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3629 3634 for n, f in stages:
3630 3635 treebystage[n] = tree = f(tree)
3631 3636 if n in showalways or (n in showchanged and tree != printedtree):
3632 3637 if opts[b'show_stage'] or n != b'parsed':
3633 3638 ui.write(b"* %s:\n" % n)
3634 3639 ui.write(revsetlang.prettyformat(tree), b"\n")
3635 3640 printedtree = tree
3636 3641
3637 3642 if opts[b'verify_optimized']:
3638 3643 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3639 3644 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3640 3645 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3641 3646 ui.writenoi18n(
3642 3647 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3643 3648 )
3644 3649 ui.writenoi18n(
3645 3650 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3646 3651 )
3647 3652 arevs = list(arevs)
3648 3653 brevs = list(brevs)
3649 3654 if arevs == brevs:
3650 3655 return 0
3651 3656 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3652 3657 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3653 3658 sm = difflib.SequenceMatcher(None, arevs, brevs)
3654 3659 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3655 3660 if tag in ('delete', 'replace'):
3656 3661 for c in arevs[alo:ahi]:
3657 3662 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3658 3663 if tag in ('insert', 'replace'):
3659 3664 for c in brevs[blo:bhi]:
3660 3665 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3661 3666 if tag == 'equal':
3662 3667 for c in arevs[alo:ahi]:
3663 3668 ui.write(b' %d\n' % c)
3664 3669 return 1
3665 3670
3666 3671 func = revset.makematcher(tree)
3667 3672 revs = func(repo)
3668 3673 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3669 3674 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3670 3675 if not opts[b'show_revs']:
3671 3676 return
3672 3677 for c in revs:
3673 3678 ui.write(b"%d\n" % c)
3674 3679
3675 3680
3676 3681 @command(
3677 3682 b'debugserve',
3678 3683 [
3679 3684 (
3680 3685 b'',
3681 3686 b'sshstdio',
3682 3687 False,
3683 3688 _(b'run an SSH server bound to process handles'),
3684 3689 ),
3685 3690 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3686 3691 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3687 3692 ],
3688 3693 b'',
3689 3694 )
3690 3695 def debugserve(ui, repo, **opts):
3691 3696 """run a server with advanced settings
3692 3697
3693 3698 This command is similar to :hg:`serve`. It exists partially as a
3694 3699 workaround to the fact that ``hg serve --stdio`` must have specific
3695 3700 arguments for security reasons.
3696 3701 """
3697 3702 opts = pycompat.byteskwargs(opts)
3698 3703
3699 3704 if not opts[b'sshstdio']:
3700 3705 raise error.Abort(_(b'only --sshstdio is currently supported'))
3701 3706
3702 3707 logfh = None
3703 3708
3704 3709 if opts[b'logiofd'] and opts[b'logiofile']:
3705 3710 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3706 3711
3707 3712 if opts[b'logiofd']:
3708 3713 # Ideally we would be line buffered. But line buffering in binary
3709 3714 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3710 3715 # buffering could have performance impacts. But since this isn't
3711 3716 # performance critical code, it should be fine.
3712 3717 try:
3713 3718 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3714 3719 except OSError as e:
3715 3720 if e.errno != errno.ESPIPE:
3716 3721 raise
3717 3722 # can't seek a pipe, so `ab` mode fails on py3
3718 3723 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3719 3724 elif opts[b'logiofile']:
3720 3725 logfh = open(opts[b'logiofile'], b'ab', 0)
3721 3726
3722 3727 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3723 3728 s.serve_forever()
3724 3729
3725 3730
3726 3731 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3727 3732 def debugsetparents(ui, repo, rev1, rev2=None):
3728 3733 """manually set the parents of the current working directory (DANGEROUS)
3729 3734
3730 3735 This command is not what you are looking for and should not be used. Using
3731 3736 this command will most certainly results in slight corruption of the file
3732 3737 level histories withing your repository. DO NOT USE THIS COMMAND.
3733 3738
3734 3739 The command update the p1 and p2 field in the dirstate, and not touching
3735 3740 anything else. This useful for writing repository conversion tools, but
3736 3741 should be used with extreme care. For example, neither the working
3737 3742 directory nor the dirstate is updated, so file status may be incorrect
3738 3743 after running this command. Only used if you are one of the few people that
3739 3744 deeply unstand both conversion tools and file level histories. If you are
3740 3745 reading this help, you are not one of this people (most of them sailed west
3741 3746 from Mithlond anyway.
3742 3747
3743 3748 So one last time DO NOT USE THIS COMMAND.
3744 3749
3745 3750 Returns 0 on success.
3746 3751 """
3747 3752
3748 3753 node1 = scmutil.revsingle(repo, rev1).node()
3749 3754 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3750 3755
3751 3756 with repo.wlock():
3752 3757 repo.setparents(node1, node2)
3753 3758
3754 3759
3755 3760 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3756 3761 def debugsidedata(ui, repo, file_, rev=None, **opts):
3757 3762 """dump the side data for a cl/manifest/file revision
3758 3763
3759 3764 Use --verbose to dump the sidedata content."""
3760 3765 opts = pycompat.byteskwargs(opts)
3761 3766 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3762 3767 if rev is not None:
3763 3768 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3764 3769 file_, rev = None, file_
3765 3770 elif rev is None:
3766 3771 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3767 3772 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3768 3773 r = getattr(r, '_revlog', r)
3769 3774 try:
3770 3775 sidedata = r.sidedata(r.lookup(rev))
3771 3776 except KeyError:
3772 3777 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3773 3778 if sidedata:
3774 3779 sidedata = list(sidedata.items())
3775 3780 sidedata.sort()
3776 3781 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3777 3782 for key, value in sidedata:
3778 3783 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3779 3784 if ui.verbose:
3780 3785 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3781 3786
3782 3787
3783 3788 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3784 3789 def debugssl(ui, repo, source=None, **opts):
3785 3790 """test a secure connection to a server
3786 3791
3787 3792 This builds the certificate chain for the server on Windows, installing the
3788 3793 missing intermediates and trusted root via Windows Update if necessary. It
3789 3794 does nothing on other platforms.
3790 3795
3791 3796 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3792 3797 that server is used. See :hg:`help urls` for more information.
3793 3798
3794 3799 If the update succeeds, retry the original operation. Otherwise, the cause
3795 3800 of the SSL error is likely another issue.
3796 3801 """
3797 3802 if not pycompat.iswindows:
3798 3803 raise error.Abort(
3799 3804 _(b'certificate chain building is only possible on Windows')
3800 3805 )
3801 3806
3802 3807 if not source:
3803 3808 if not repo:
3804 3809 raise error.Abort(
3805 3810 _(
3806 3811 b"there is no Mercurial repository here, and no "
3807 3812 b"server specified"
3808 3813 )
3809 3814 )
3810 3815 source = b"default"
3811 3816
3812 3817 source, branches = urlutil.get_unique_pull_path(
3813 3818 b'debugssl', repo, ui, source
3814 3819 )
3815 3820 url = urlutil.url(source)
3816 3821
3817 3822 defaultport = {b'https': 443, b'ssh': 22}
3818 3823 if url.scheme in defaultport:
3819 3824 try:
3820 3825 addr = (url.host, int(url.port or defaultport[url.scheme]))
3821 3826 except ValueError:
3822 3827 raise error.Abort(_(b"malformed port number in URL"))
3823 3828 else:
3824 3829 raise error.Abort(_(b"only https and ssh connections are supported"))
3825 3830
3826 3831 from . import win32
3827 3832
3828 3833 s = ssl.wrap_socket(
3829 3834 socket.socket(),
3830 3835 ssl_version=ssl.PROTOCOL_TLS,
3831 3836 cert_reqs=ssl.CERT_NONE,
3832 3837 ca_certs=None,
3833 3838 )
3834 3839
3835 3840 try:
3836 3841 s.connect(addr)
3837 3842 cert = s.getpeercert(True)
3838 3843
3839 3844 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3840 3845
3841 3846 complete = win32.checkcertificatechain(cert, build=False)
3842 3847
3843 3848 if not complete:
3844 3849 ui.status(_(b'certificate chain is incomplete, updating... '))
3845 3850
3846 3851 if not win32.checkcertificatechain(cert):
3847 3852 ui.status(_(b'failed.\n'))
3848 3853 else:
3849 3854 ui.status(_(b'done.\n'))
3850 3855 else:
3851 3856 ui.status(_(b'full certificate chain is available\n'))
3852 3857 finally:
3853 3858 s.close()
3854 3859
3855 3860
3856 3861 @command(
3857 3862 b"debugbackupbundle",
3858 3863 [
3859 3864 (
3860 3865 b"",
3861 3866 b"recover",
3862 3867 b"",
3863 3868 b"brings the specified changeset back into the repository",
3864 3869 )
3865 3870 ]
3866 3871 + cmdutil.logopts,
3867 3872 _(b"hg debugbackupbundle [--recover HASH]"),
3868 3873 )
3869 3874 def debugbackupbundle(ui, repo, *pats, **opts):
3870 3875 """lists the changesets available in backup bundles
3871 3876
3872 3877 Without any arguments, this command prints a list of the changesets in each
3873 3878 backup bundle.
3874 3879
3875 3880 --recover takes a changeset hash and unbundles the first bundle that
3876 3881 contains that hash, which puts that changeset back in your repository.
3877 3882
3878 3883 --verbose will print the entire commit message and the bundle path for that
3879 3884 backup.
3880 3885 """
3881 3886 backups = list(
3882 3887 filter(
3883 3888 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3884 3889 )
3885 3890 )
3886 3891 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3887 3892
3888 3893 opts = pycompat.byteskwargs(opts)
3889 3894 opts[b"bundle"] = b""
3890 3895 opts[b"force"] = None
3891 3896 limit = logcmdutil.getlimit(opts)
3892 3897
3893 3898 def display(other, chlist, displayer):
3894 3899 if opts.get(b"newest_first"):
3895 3900 chlist.reverse()
3896 3901 count = 0
3897 3902 for n in chlist:
3898 3903 if limit is not None and count >= limit:
3899 3904 break
3900 3905 parents = [
3901 3906 True for p in other.changelog.parents(n) if p != repo.nullid
3902 3907 ]
3903 3908 if opts.get(b"no_merges") and len(parents) == 2:
3904 3909 continue
3905 3910 count += 1
3906 3911 displayer.show(other[n])
3907 3912
3908 3913 recovernode = opts.get(b"recover")
3909 3914 if recovernode:
3910 3915 if scmutil.isrevsymbol(repo, recovernode):
3911 3916 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3912 3917 return
3913 3918 elif backups:
3914 3919 msg = _(
3915 3920 b"Recover changesets using: hg debugbackupbundle --recover "
3916 3921 b"<changeset hash>\n\nAvailable backup changesets:"
3917 3922 )
3918 3923 ui.status(msg, label=b"status.removed")
3919 3924 else:
3920 3925 ui.status(_(b"no backup changesets found\n"))
3921 3926 return
3922 3927
3923 3928 for backup in backups:
3924 3929 # Much of this is copied from the hg incoming logic
3925 3930 source = os.path.relpath(backup, encoding.getcwd())
3926 3931 source, branches = urlutil.get_unique_pull_path(
3927 3932 b'debugbackupbundle',
3928 3933 repo,
3929 3934 ui,
3930 3935 source,
3931 3936 default_branches=opts.get(b'branch'),
3932 3937 )
3933 3938 try:
3934 3939 other = hg.peer(repo, opts, source)
3935 3940 except error.LookupError as ex:
3936 3941 msg = _(b"\nwarning: unable to open bundle %s") % source
3937 3942 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3938 3943 ui.warn(msg, hint=hint)
3939 3944 continue
3940 3945 revs, checkout = hg.addbranchrevs(
3941 3946 repo, other, branches, opts.get(b"rev")
3942 3947 )
3943 3948
3944 3949 if revs:
3945 3950 revs = [other.lookup(rev) for rev in revs]
3946 3951
3947 3952 with ui.silent():
3948 3953 try:
3949 3954 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3950 3955 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3951 3956 )
3952 3957 except error.LookupError:
3953 3958 continue
3954 3959
3955 3960 try:
3956 3961 if not chlist:
3957 3962 continue
3958 3963 if recovernode:
3959 3964 with repo.lock(), repo.transaction(b"unbundle") as tr:
3960 3965 if scmutil.isrevsymbol(other, recovernode):
3961 3966 ui.status(_(b"Unbundling %s\n") % (recovernode))
3962 3967 f = hg.openpath(ui, source)
3963 3968 gen = exchange.readbundle(ui, f, source)
3964 3969 if isinstance(gen, bundle2.unbundle20):
3965 3970 bundle2.applybundle(
3966 3971 repo,
3967 3972 gen,
3968 3973 tr,
3969 3974 source=b"unbundle",
3970 3975 url=b"bundle:" + source,
3971 3976 )
3972 3977 else:
3973 3978 gen.apply(repo, b"unbundle", b"bundle:" + source)
3974 3979 break
3975 3980 else:
3976 3981 backupdate = encoding.strtolocal(
3977 3982 time.strftime(
3978 3983 "%a %H:%M, %Y-%m-%d",
3979 3984 time.localtime(os.path.getmtime(source)),
3980 3985 )
3981 3986 )
3982 3987 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3983 3988 if ui.verbose:
3984 3989 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3985 3990 else:
3986 3991 opts[
3987 3992 b"template"
3988 3993 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3989 3994 displayer = logcmdutil.changesetdisplayer(
3990 3995 ui, other, opts, False
3991 3996 )
3992 3997 display(other, chlist, displayer)
3993 3998 displayer.close()
3994 3999 finally:
3995 4000 cleanupfn()
3996 4001
3997 4002
3998 4003 @command(
3999 4004 b'debugsub',
4000 4005 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4001 4006 _(b'[-r REV] [REV]'),
4002 4007 )
4003 4008 def debugsub(ui, repo, rev=None):
4004 4009 ctx = scmutil.revsingle(repo, rev, None)
4005 4010 for k, v in sorted(ctx.substate.items()):
4006 4011 ui.writenoi18n(b'path %s\n' % k)
4007 4012 ui.writenoi18n(b' source %s\n' % v[0])
4008 4013 ui.writenoi18n(b' revision %s\n' % v[1])
4009 4014
4010 4015
4011 4016 @command(b'debugshell', optionalrepo=True)
4012 4017 def debugshell(ui, repo):
4013 4018 """run an interactive Python interpreter
4014 4019
4015 4020 The local namespace is provided with a reference to the ui and
4016 4021 the repo instance (if available).
4017 4022 """
4018 4023 import code
4019 4024
4020 4025 imported_objects = {
4021 4026 'ui': ui,
4022 4027 'repo': repo,
4023 4028 }
4024 4029
4025 4030 code.interact(local=imported_objects)
4026 4031
4027 4032
4028 4033 @command(
4029 4034 b'debugsuccessorssets',
4030 4035 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4031 4036 _(b'[REV]'),
4032 4037 )
4033 4038 def debugsuccessorssets(ui, repo, *revs, **opts):
4034 4039 """show set of successors for revision
4035 4040
4036 4041 A successors set of changeset A is a consistent group of revisions that
4037 4042 succeed A. It contains non-obsolete changesets only unless closests
4038 4043 successors set is set.
4039 4044
4040 4045 In most cases a changeset A has a single successors set containing a single
4041 4046 successor (changeset A replaced by A').
4042 4047
4043 4048 A changeset that is made obsolete with no successors are called "pruned".
4044 4049 Such changesets have no successors sets at all.
4045 4050
4046 4051 A changeset that has been "split" will have a successors set containing
4047 4052 more than one successor.
4048 4053
4049 4054 A changeset that has been rewritten in multiple different ways is called
4050 4055 "divergent". Such changesets have multiple successor sets (each of which
4051 4056 may also be split, i.e. have multiple successors).
4052 4057
4053 4058 Results are displayed as follows::
4054 4059
4055 4060 <rev1>
4056 4061 <successors-1A>
4057 4062 <rev2>
4058 4063 <successors-2A>
4059 4064 <successors-2B1> <successors-2B2> <successors-2B3>
4060 4065
4061 4066 Here rev2 has two possible (i.e. divergent) successors sets. The first
4062 4067 holds one element, whereas the second holds three (i.e. the changeset has
4063 4068 been split).
4064 4069 """
4065 4070 # passed to successorssets caching computation from one call to another
4066 4071 cache = {}
4067 4072 ctx2str = bytes
4068 4073 node2str = short
4069 4074 for rev in logcmdutil.revrange(repo, revs):
4070 4075 ctx = repo[rev]
4071 4076 ui.write(b'%s\n' % ctx2str(ctx))
4072 4077 for succsset in obsutil.successorssets(
4073 4078 repo, ctx.node(), closest=opts['closest'], cache=cache
4074 4079 ):
4075 4080 if succsset:
4076 4081 ui.write(b' ')
4077 4082 ui.write(node2str(succsset[0]))
4078 4083 for node in succsset[1:]:
4079 4084 ui.write(b' ')
4080 4085 ui.write(node2str(node))
4081 4086 ui.write(b'\n')
4082 4087
4083 4088
4084 4089 @command(b'debugtagscache', [])
4085 4090 def debugtagscache(ui, repo):
4086 4091 """display the contents of .hg/cache/hgtagsfnodes1"""
4087 4092 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4088 4093 flog = repo.file(b'.hgtags')
4089 4094 for r in repo:
4090 4095 node = repo[r].node()
4091 4096 tagsnode = cache.getfnode(node, computemissing=False)
4092 4097 if tagsnode:
4093 4098 tagsnodedisplay = hex(tagsnode)
4094 4099 if not flog.hasnode(tagsnode):
4095 4100 tagsnodedisplay += b' (unknown node)'
4096 4101 elif tagsnode is None:
4097 4102 tagsnodedisplay = b'missing'
4098 4103 else:
4099 4104 tagsnodedisplay = b'invalid'
4100 4105
4101 4106 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4102 4107
4103 4108
4104 4109 @command(
4105 4110 b'debugtemplate',
4106 4111 [
4107 4112 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4108 4113 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4109 4114 ],
4110 4115 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4111 4116 optionalrepo=True,
4112 4117 )
4113 4118 def debugtemplate(ui, repo, tmpl, **opts):
4114 4119 """parse and apply a template
4115 4120
4116 4121 If -r/--rev is given, the template is processed as a log template and
4117 4122 applied to the given changesets. Otherwise, it is processed as a generic
4118 4123 template.
4119 4124
4120 4125 Use --verbose to print the parsed tree.
4121 4126 """
4122 4127 revs = None
4123 4128 if opts['rev']:
4124 4129 if repo is None:
4125 4130 raise error.RepoError(
4126 4131 _(b'there is no Mercurial repository here (.hg not found)')
4127 4132 )
4128 4133 revs = logcmdutil.revrange(repo, opts['rev'])
4129 4134
4130 4135 props = {}
4131 4136 for d in opts['define']:
4132 4137 try:
4133 4138 k, v = (e.strip() for e in d.split(b'=', 1))
4134 4139 if not k or k == b'ui':
4135 4140 raise ValueError
4136 4141 props[k] = v
4137 4142 except ValueError:
4138 4143 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4139 4144
4140 4145 if ui.verbose:
4141 4146 aliases = ui.configitems(b'templatealias')
4142 4147 tree = templater.parse(tmpl)
4143 4148 ui.note(templater.prettyformat(tree), b'\n')
4144 4149 newtree = templater.expandaliases(tree, aliases)
4145 4150 if newtree != tree:
4146 4151 ui.notenoi18n(
4147 4152 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4148 4153 )
4149 4154
4150 4155 if revs is None:
4151 4156 tres = formatter.templateresources(ui, repo)
4152 4157 t = formatter.maketemplater(ui, tmpl, resources=tres)
4153 4158 if ui.verbose:
4154 4159 kwds, funcs = t.symbolsuseddefault()
4155 4160 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4156 4161 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4157 4162 ui.write(t.renderdefault(props))
4158 4163 else:
4159 4164 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4160 4165 if ui.verbose:
4161 4166 kwds, funcs = displayer.t.symbolsuseddefault()
4162 4167 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4163 4168 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4164 4169 for r in revs:
4165 4170 displayer.show(repo[r], **pycompat.strkwargs(props))
4166 4171 displayer.close()
4167 4172
4168 4173
4169 4174 @command(
4170 4175 b'debuguigetpass',
4171 4176 [
4172 4177 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4173 4178 ],
4174 4179 _(b'[-p TEXT]'),
4175 4180 norepo=True,
4176 4181 )
4177 4182 def debuguigetpass(ui, prompt=b''):
4178 4183 """show prompt to type password"""
4179 4184 r = ui.getpass(prompt)
4180 4185 if r is None:
4181 4186 r = b"<default response>"
4182 4187 ui.writenoi18n(b'response: %s\n' % r)
4183 4188
4184 4189
4185 4190 @command(
4186 4191 b'debuguiprompt',
4187 4192 [
4188 4193 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4189 4194 ],
4190 4195 _(b'[-p TEXT]'),
4191 4196 norepo=True,
4192 4197 )
4193 4198 def debuguiprompt(ui, prompt=b''):
4194 4199 """show plain prompt"""
4195 4200 r = ui.prompt(prompt)
4196 4201 ui.writenoi18n(b'response: %s\n' % r)
4197 4202
4198 4203
4199 4204 @command(b'debugupdatecaches', [])
4200 4205 def debugupdatecaches(ui, repo, *pats, **opts):
4201 4206 """warm all known caches in the repository"""
4202 4207 with repo.wlock(), repo.lock():
4203 4208 repo.updatecaches(caches=repository.CACHES_ALL)
4204 4209
4205 4210
4206 4211 @command(
4207 4212 b'debugupgraderepo',
4208 4213 [
4209 4214 (
4210 4215 b'o',
4211 4216 b'optimize',
4212 4217 [],
4213 4218 _(b'extra optimization to perform'),
4214 4219 _(b'NAME'),
4215 4220 ),
4216 4221 (b'', b'run', False, _(b'performs an upgrade')),
4217 4222 (b'', b'backup', True, _(b'keep the old repository content around')),
4218 4223 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4219 4224 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4220 4225 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4221 4226 ],
4222 4227 )
4223 4228 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4224 4229 """upgrade a repository to use different features
4225 4230
4226 4231 If no arguments are specified, the repository is evaluated for upgrade
4227 4232 and a list of problems and potential optimizations is printed.
4228 4233
4229 4234 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4230 4235 can be influenced via additional arguments. More details will be provided
4231 4236 by the command output when run without ``--run``.
4232 4237
4233 4238 During the upgrade, the repository will be locked and no writes will be
4234 4239 allowed.
4235 4240
4236 4241 At the end of the upgrade, the repository may not be readable while new
4237 4242 repository data is swapped in. This window will be as long as it takes to
4238 4243 rename some directories inside the ``.hg`` directory. On most machines, this
4239 4244 should complete almost instantaneously and the chances of a consumer being
4240 4245 unable to access the repository should be low.
4241 4246
4242 4247 By default, all revlogs will be upgraded. You can restrict this using flags
4243 4248 such as `--manifest`:
4244 4249
4245 4250 * `--manifest`: only optimize the manifest
4246 4251 * `--no-manifest`: optimize all revlog but the manifest
4247 4252 * `--changelog`: optimize the changelog only
4248 4253 * `--no-changelog --no-manifest`: optimize filelogs only
4249 4254 * `--filelogs`: optimize the filelogs only
4250 4255 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4251 4256 """
4252 4257 return upgrade.upgraderepo(
4253 4258 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4254 4259 )
4255 4260
4256 4261
4257 4262 @command(
4258 4263 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4259 4264 )
4260 4265 def debugwalk(ui, repo, *pats, **opts):
4261 4266 """show how files match on given patterns"""
4262 4267 opts = pycompat.byteskwargs(opts)
4263 4268 m = scmutil.match(repo[None], pats, opts)
4264 4269 if ui.verbose:
4265 4270 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4266 4271 items = list(repo[None].walk(m))
4267 4272 if not items:
4268 4273 return
4269 4274 f = lambda fn: fn
4270 4275 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4271 4276 f = lambda fn: util.normpath(fn)
4272 4277 fmt = b'f %%-%ds %%-%ds %%s' % (
4273 4278 max([len(abs) for abs in items]),
4274 4279 max([len(repo.pathto(abs)) for abs in items]),
4275 4280 )
4276 4281 for abs in items:
4277 4282 line = fmt % (
4278 4283 abs,
4279 4284 f(repo.pathto(abs)),
4280 4285 m.exact(abs) and b'exact' or b'',
4281 4286 )
4282 4287 ui.write(b"%s\n" % line.rstrip())
4283 4288
4284 4289
4285 4290 @command(b'debugwhyunstable', [], _(b'REV'))
4286 4291 def debugwhyunstable(ui, repo, rev):
4287 4292 """explain instabilities of a changeset"""
4288 4293 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4289 4294 dnodes = b''
4290 4295 if entry.get(b'divergentnodes'):
4291 4296 dnodes = (
4292 4297 b' '.join(
4293 4298 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4294 4299 for ctx in entry[b'divergentnodes']
4295 4300 )
4296 4301 + b' '
4297 4302 )
4298 4303 ui.write(
4299 4304 b'%s: %s%s %s\n'
4300 4305 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4301 4306 )
4302 4307
4303 4308
4304 4309 @command(
4305 4310 b'debugwireargs',
4306 4311 [
4307 4312 (b'', b'three', b'', b'three'),
4308 4313 (b'', b'four', b'', b'four'),
4309 4314 (b'', b'five', b'', b'five'),
4310 4315 ]
4311 4316 + cmdutil.remoteopts,
4312 4317 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4313 4318 norepo=True,
4314 4319 )
4315 4320 def debugwireargs(ui, repopath, *vals, **opts):
4316 4321 opts = pycompat.byteskwargs(opts)
4317 4322 repo = hg.peer(ui, opts, repopath)
4318 4323 try:
4319 4324 for opt in cmdutil.remoteopts:
4320 4325 del opts[opt[1]]
4321 4326 args = {}
4322 4327 for k, v in opts.items():
4323 4328 if v:
4324 4329 args[k] = v
4325 4330 args = pycompat.strkwargs(args)
4326 4331 # run twice to check that we don't mess up the stream for the next command
4327 4332 res1 = repo.debugwireargs(*vals, **args)
4328 4333 res2 = repo.debugwireargs(*vals, **args)
4329 4334 ui.write(b"%s\n" % res1)
4330 4335 if res1 != res2:
4331 4336 ui.warn(b"%s\n" % res2)
4332 4337 finally:
4333 4338 repo.close()
4334 4339
4335 4340
4336 4341 def _parsewirelangblocks(fh):
4337 4342 activeaction = None
4338 4343 blocklines = []
4339 4344 lastindent = 0
4340 4345
4341 4346 for line in fh:
4342 4347 line = line.rstrip()
4343 4348 if not line:
4344 4349 continue
4345 4350
4346 4351 if line.startswith(b'#'):
4347 4352 continue
4348 4353
4349 4354 if not line.startswith(b' '):
4350 4355 # New block. Flush previous one.
4351 4356 if activeaction:
4352 4357 yield activeaction, blocklines
4353 4358
4354 4359 activeaction = line
4355 4360 blocklines = []
4356 4361 lastindent = 0
4357 4362 continue
4358 4363
4359 4364 # Else we start with an indent.
4360 4365
4361 4366 if not activeaction:
4362 4367 raise error.Abort(_(b'indented line outside of block'))
4363 4368
4364 4369 indent = len(line) - len(line.lstrip())
4365 4370
4366 4371 # If this line is indented more than the last line, concatenate it.
4367 4372 if indent > lastindent and blocklines:
4368 4373 blocklines[-1] += line.lstrip()
4369 4374 else:
4370 4375 blocklines.append(line)
4371 4376 lastindent = indent
4372 4377
4373 4378 # Flush last block.
4374 4379 if activeaction:
4375 4380 yield activeaction, blocklines
4376 4381
4377 4382
4378 4383 @command(
4379 4384 b'debugwireproto',
4380 4385 [
4381 4386 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4382 4387 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4383 4388 (
4384 4389 b'',
4385 4390 b'noreadstderr',
4386 4391 False,
4387 4392 _(b'do not read from stderr of the remote'),
4388 4393 ),
4389 4394 (
4390 4395 b'',
4391 4396 b'nologhandshake',
4392 4397 False,
4393 4398 _(b'do not log I/O related to the peer handshake'),
4394 4399 ),
4395 4400 ]
4396 4401 + cmdutil.remoteopts,
4397 4402 _(b'[PATH]'),
4398 4403 optionalrepo=True,
4399 4404 )
4400 4405 def debugwireproto(ui, repo, path=None, **opts):
4401 4406 """send wire protocol commands to a server
4402 4407
4403 4408 This command can be used to issue wire protocol commands to remote
4404 4409 peers and to debug the raw data being exchanged.
4405 4410
4406 4411 ``--localssh`` will start an SSH server against the current repository
4407 4412 and connect to that. By default, the connection will perform a handshake
4408 4413 and establish an appropriate peer instance.
4409 4414
4410 4415 ``--peer`` can be used to bypass the handshake protocol and construct a
4411 4416 peer instance using the specified class type. Valid values are ``raw``,
4412 4417 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4413 4418 don't support higher-level command actions.
4414 4419
4415 4420 ``--noreadstderr`` can be used to disable automatic reading from stderr
4416 4421 of the peer (for SSH connections only). Disabling automatic reading of
4417 4422 stderr is useful for making output more deterministic.
4418 4423
4419 4424 Commands are issued via a mini language which is specified via stdin.
4420 4425 The language consists of individual actions to perform. An action is
4421 4426 defined by a block. A block is defined as a line with no leading
4422 4427 space followed by 0 or more lines with leading space. Blocks are
4423 4428 effectively a high-level command with additional metadata.
4424 4429
4425 4430 Lines beginning with ``#`` are ignored.
4426 4431
4427 4432 The following sections denote available actions.
4428 4433
4429 4434 raw
4430 4435 ---
4431 4436
4432 4437 Send raw data to the server.
4433 4438
4434 4439 The block payload contains the raw data to send as one atomic send
4435 4440 operation. The data may not actually be delivered in a single system
4436 4441 call: it depends on the abilities of the transport being used.
4437 4442
4438 4443 Each line in the block is de-indented and concatenated. Then, that
4439 4444 value is evaluated as a Python b'' literal. This allows the use of
4440 4445 backslash escaping, etc.
4441 4446
4442 4447 raw+
4443 4448 ----
4444 4449
4445 4450 Behaves like ``raw`` except flushes output afterwards.
4446 4451
4447 4452 command <X>
4448 4453 -----------
4449 4454
4450 4455 Send a request to run a named command, whose name follows the ``command``
4451 4456 string.
4452 4457
4453 4458 Arguments to the command are defined as lines in this block. The format of
4454 4459 each line is ``<key> <value>``. e.g.::
4455 4460
4456 4461 command listkeys
4457 4462 namespace bookmarks
4458 4463
4459 4464 If the value begins with ``eval:``, it will be interpreted as a Python
4460 4465 literal expression. Otherwise values are interpreted as Python b'' literals.
4461 4466 This allows sending complex types and encoding special byte sequences via
4462 4467 backslash escaping.
4463 4468
4464 4469 The following arguments have special meaning:
4465 4470
4466 4471 ``PUSHFILE``
4467 4472 When defined, the *push* mechanism of the peer will be used instead
4468 4473 of the static request-response mechanism and the content of the
4469 4474 file specified in the value of this argument will be sent as the
4470 4475 command payload.
4471 4476
4472 4477 This can be used to submit a local bundle file to the remote.
4473 4478
4474 4479 batchbegin
4475 4480 ----------
4476 4481
4477 4482 Instruct the peer to begin a batched send.
4478 4483
4479 4484 All ``command`` blocks are queued for execution until the next
4480 4485 ``batchsubmit`` block.
4481 4486
4482 4487 batchsubmit
4483 4488 -----------
4484 4489
4485 4490 Submit previously queued ``command`` blocks as a batch request.
4486 4491
4487 4492 This action MUST be paired with a ``batchbegin`` action.
4488 4493
4489 4494 httprequest <method> <path>
4490 4495 ---------------------------
4491 4496
4492 4497 (HTTP peer only)
4493 4498
4494 4499 Send an HTTP request to the peer.
4495 4500
4496 4501 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4497 4502
4498 4503 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4499 4504 headers to add to the request. e.g. ``Accept: foo``.
4500 4505
4501 4506 The following arguments are special:
4502 4507
4503 4508 ``BODYFILE``
4504 4509 The content of the file defined as the value to this argument will be
4505 4510 transferred verbatim as the HTTP request body.
4506 4511
4507 4512 ``frame <type> <flags> <payload>``
4508 4513 Send a unified protocol frame as part of the request body.
4509 4514
4510 4515 All frames will be collected and sent as the body to the HTTP
4511 4516 request.
4512 4517
4513 4518 close
4514 4519 -----
4515 4520
4516 4521 Close the connection to the server.
4517 4522
4518 4523 flush
4519 4524 -----
4520 4525
4521 4526 Flush data written to the server.
4522 4527
4523 4528 readavailable
4524 4529 -------------
4525 4530
4526 4531 Close the write end of the connection and read all available data from
4527 4532 the server.
4528 4533
4529 4534 If the connection to the server encompasses multiple pipes, we poll both
4530 4535 pipes and read available data.
4531 4536
4532 4537 readline
4533 4538 --------
4534 4539
4535 4540 Read a line of output from the server. If there are multiple output
4536 4541 pipes, reads only the main pipe.
4537 4542
4538 4543 ereadline
4539 4544 ---------
4540 4545
4541 4546 Like ``readline``, but read from the stderr pipe, if available.
4542 4547
4543 4548 read <X>
4544 4549 --------
4545 4550
4546 4551 ``read()`` N bytes from the server's main output pipe.
4547 4552
4548 4553 eread <X>
4549 4554 ---------
4550 4555
4551 4556 ``read()`` N bytes from the server's stderr pipe, if available.
4552 4557
4553 4558 Specifying Unified Frame-Based Protocol Frames
4554 4559 ----------------------------------------------
4555 4560
4556 4561 It is possible to emit a *Unified Frame-Based Protocol* by using special
4557 4562 syntax.
4558 4563
4559 4564 A frame is composed as a type, flags, and payload. These can be parsed
4560 4565 from a string of the form:
4561 4566
4562 4567 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4563 4568
4564 4569 ``request-id`` and ``stream-id`` are integers defining the request and
4565 4570 stream identifiers.
4566 4571
4567 4572 ``type`` can be an integer value for the frame type or the string name
4568 4573 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4569 4574 ``command-name``.
4570 4575
4571 4576 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4572 4577 components. Each component (and there can be just one) can be an integer
4573 4578 or a flag name for stream flags or frame flags, respectively. Values are
4574 4579 resolved to integers and then bitwise OR'd together.
4575 4580
4576 4581 ``payload`` represents the raw frame payload. If it begins with
4577 4582 ``cbor:``, the following string is evaluated as Python code and the
4578 4583 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4579 4584 as a Python byte string literal.
4580 4585 """
4581 4586 opts = pycompat.byteskwargs(opts)
4582 4587
4583 4588 if opts[b'localssh'] and not repo:
4584 4589 raise error.Abort(_(b'--localssh requires a repository'))
4585 4590
4586 4591 if opts[b'peer'] and opts[b'peer'] not in (
4587 4592 b'raw',
4588 4593 b'ssh1',
4589 4594 ):
4590 4595 raise error.Abort(
4591 4596 _(b'invalid value for --peer'),
4592 4597 hint=_(b'valid values are "raw" and "ssh1"'),
4593 4598 )
4594 4599
4595 4600 if path and opts[b'localssh']:
4596 4601 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4597 4602
4598 4603 if ui.interactive():
4599 4604 ui.write(_(b'(waiting for commands on stdin)\n'))
4600 4605
4601 4606 blocks = list(_parsewirelangblocks(ui.fin))
4602 4607
4603 4608 proc = None
4604 4609 stdin = None
4605 4610 stdout = None
4606 4611 stderr = None
4607 4612 opener = None
4608 4613
4609 4614 if opts[b'localssh']:
4610 4615 # We start the SSH server in its own process so there is process
4611 4616 # separation. This prevents a whole class of potential bugs around
4612 4617 # shared state from interfering with server operation.
4613 4618 args = procutil.hgcmd() + [
4614 4619 b'-R',
4615 4620 repo.root,
4616 4621 b'debugserve',
4617 4622 b'--sshstdio',
4618 4623 ]
4619 4624 proc = subprocess.Popen(
4620 4625 pycompat.rapply(procutil.tonativestr, args),
4621 4626 stdin=subprocess.PIPE,
4622 4627 stdout=subprocess.PIPE,
4623 4628 stderr=subprocess.PIPE,
4624 4629 bufsize=0,
4625 4630 )
4626 4631
4627 4632 stdin = proc.stdin
4628 4633 stdout = proc.stdout
4629 4634 stderr = proc.stderr
4630 4635
4631 4636 # We turn the pipes into observers so we can log I/O.
4632 4637 if ui.verbose or opts[b'peer'] == b'raw':
4633 4638 stdin = util.makeloggingfileobject(
4634 4639 ui, proc.stdin, b'i', logdata=True
4635 4640 )
4636 4641 stdout = util.makeloggingfileobject(
4637 4642 ui, proc.stdout, b'o', logdata=True
4638 4643 )
4639 4644 stderr = util.makeloggingfileobject(
4640 4645 ui, proc.stderr, b'e', logdata=True
4641 4646 )
4642 4647
4643 4648 # --localssh also implies the peer connection settings.
4644 4649
4645 4650 url = b'ssh://localserver'
4646 4651 autoreadstderr = not opts[b'noreadstderr']
4647 4652
4648 4653 if opts[b'peer'] == b'ssh1':
4649 4654 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4650 4655 peer = sshpeer.sshv1peer(
4651 4656 ui,
4652 4657 url,
4653 4658 proc,
4654 4659 stdin,
4655 4660 stdout,
4656 4661 stderr,
4657 4662 None,
4658 4663 autoreadstderr=autoreadstderr,
4659 4664 )
4660 4665 elif opts[b'peer'] == b'raw':
4661 4666 ui.write(_(b'using raw connection to peer\n'))
4662 4667 peer = None
4663 4668 else:
4664 4669 ui.write(_(b'creating ssh peer from handshake results\n'))
4665 4670 peer = sshpeer.makepeer(
4666 4671 ui,
4667 4672 url,
4668 4673 proc,
4669 4674 stdin,
4670 4675 stdout,
4671 4676 stderr,
4672 4677 autoreadstderr=autoreadstderr,
4673 4678 )
4674 4679
4675 4680 elif path:
4676 4681 # We bypass hg.peer() so we can proxy the sockets.
4677 4682 # TODO consider not doing this because we skip
4678 4683 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4679 4684 u = urlutil.url(path)
4680 4685 if u.scheme != b'http':
4681 4686 raise error.Abort(_(b'only http:// paths are currently supported'))
4682 4687
4683 4688 url, authinfo = u.authinfo()
4684 4689 openerargs = {
4685 4690 'useragent': b'Mercurial debugwireproto',
4686 4691 }
4687 4692
4688 4693 # Turn pipes/sockets into observers so we can log I/O.
4689 4694 if ui.verbose:
4690 4695 openerargs.update(
4691 4696 {
4692 4697 'loggingfh': ui,
4693 4698 'loggingname': b's',
4694 4699 'loggingopts': {
4695 4700 'logdata': True,
4696 4701 'logdataapis': False,
4697 4702 },
4698 4703 }
4699 4704 )
4700 4705
4701 4706 if ui.debugflag:
4702 4707 openerargs['loggingopts']['logdataapis'] = True
4703 4708
4704 4709 # Don't send default headers when in raw mode. This allows us to
4705 4710 # bypass most of the behavior of our URL handling code so we can
4706 4711 # have near complete control over what's sent on the wire.
4707 4712 if opts[b'peer'] == b'raw':
4708 4713 openerargs['sendaccept'] = False
4709 4714
4710 4715 opener = urlmod.opener(ui, authinfo, **openerargs)
4711 4716
4712 4717 if opts[b'peer'] == b'raw':
4713 4718 ui.write(_(b'using raw connection to peer\n'))
4714 4719 peer = None
4715 4720 elif opts[b'peer']:
4716 4721 raise error.Abort(
4717 4722 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4718 4723 )
4719 4724 else:
4720 4725 peer = httppeer.makepeer(ui, path, opener=opener)
4721 4726
4722 4727 # We /could/ populate stdin/stdout with sock.makefile()...
4723 4728 else:
4724 4729 raise error.Abort(_(b'unsupported connection configuration'))
4725 4730
4726 4731 batchedcommands = None
4727 4732
4728 4733 # Now perform actions based on the parsed wire language instructions.
4729 4734 for action, lines in blocks:
4730 4735 if action in (b'raw', b'raw+'):
4731 4736 if not stdin:
4732 4737 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4733 4738
4734 4739 # Concatenate the data together.
4735 4740 data = b''.join(l.lstrip() for l in lines)
4736 4741 data = stringutil.unescapestr(data)
4737 4742 stdin.write(data)
4738 4743
4739 4744 if action == b'raw+':
4740 4745 stdin.flush()
4741 4746 elif action == b'flush':
4742 4747 if not stdin:
4743 4748 raise error.Abort(_(b'cannot call flush on this peer'))
4744 4749 stdin.flush()
4745 4750 elif action.startswith(b'command'):
4746 4751 if not peer:
4747 4752 raise error.Abort(
4748 4753 _(
4749 4754 b'cannot send commands unless peer instance '
4750 4755 b'is available'
4751 4756 )
4752 4757 )
4753 4758
4754 4759 command = action.split(b' ', 1)[1]
4755 4760
4756 4761 args = {}
4757 4762 for line in lines:
4758 4763 # We need to allow empty values.
4759 4764 fields = line.lstrip().split(b' ', 1)
4760 4765 if len(fields) == 1:
4761 4766 key = fields[0]
4762 4767 value = b''
4763 4768 else:
4764 4769 key, value = fields
4765 4770
4766 4771 if value.startswith(b'eval:'):
4767 4772 value = stringutil.evalpythonliteral(value[5:])
4768 4773 else:
4769 4774 value = stringutil.unescapestr(value)
4770 4775
4771 4776 args[key] = value
4772 4777
4773 4778 if batchedcommands is not None:
4774 4779 batchedcommands.append((command, args))
4775 4780 continue
4776 4781
4777 4782 ui.status(_(b'sending %s command\n') % command)
4778 4783
4779 4784 if b'PUSHFILE' in args:
4780 4785 with open(args[b'PUSHFILE'], 'rb') as fh:
4781 4786 del args[b'PUSHFILE']
4782 4787 res, output = peer._callpush(
4783 4788 command, fh, **pycompat.strkwargs(args)
4784 4789 )
4785 4790 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4786 4791 ui.status(
4787 4792 _(b'remote output: %s\n') % stringutil.escapestr(output)
4788 4793 )
4789 4794 else:
4790 4795 with peer.commandexecutor() as e:
4791 4796 res = e.callcommand(command, args).result()
4792 4797
4793 4798 ui.status(
4794 4799 _(b'response: %s\n')
4795 4800 % stringutil.pprint(res, bprefix=True, indent=2)
4796 4801 )
4797 4802
4798 4803 elif action == b'batchbegin':
4799 4804 if batchedcommands is not None:
4800 4805 raise error.Abort(_(b'nested batchbegin not allowed'))
4801 4806
4802 4807 batchedcommands = []
4803 4808 elif action == b'batchsubmit':
4804 4809 # There is a batching API we could go through. But it would be
4805 4810 # difficult to normalize requests into function calls. It is easier
4806 4811 # to bypass this layer and normalize to commands + args.
4807 4812 ui.status(
4808 4813 _(b'sending batch with %d sub-commands\n')
4809 4814 % len(batchedcommands)
4810 4815 )
4811 4816 assert peer is not None
4812 4817 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4813 4818 ui.status(
4814 4819 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4815 4820 )
4816 4821
4817 4822 batchedcommands = None
4818 4823
4819 4824 elif action.startswith(b'httprequest '):
4820 4825 if not opener:
4821 4826 raise error.Abort(
4822 4827 _(b'cannot use httprequest without an HTTP peer')
4823 4828 )
4824 4829
4825 4830 request = action.split(b' ', 2)
4826 4831 if len(request) != 3:
4827 4832 raise error.Abort(
4828 4833 _(
4829 4834 b'invalid httprequest: expected format is '
4830 4835 b'"httprequest <method> <path>'
4831 4836 )
4832 4837 )
4833 4838
4834 4839 method, httppath = request[1:]
4835 4840 headers = {}
4836 4841 body = None
4837 4842 frames = []
4838 4843 for line in lines:
4839 4844 line = line.lstrip()
4840 4845 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4841 4846 if m:
4842 4847 # Headers need to use native strings.
4843 4848 key = pycompat.strurl(m.group(1))
4844 4849 value = pycompat.strurl(m.group(2))
4845 4850 headers[key] = value
4846 4851 continue
4847 4852
4848 4853 if line.startswith(b'BODYFILE '):
4849 4854 with open(line.split(b' ', 1), b'rb') as fh:
4850 4855 body = fh.read()
4851 4856 elif line.startswith(b'frame '):
4852 4857 frame = wireprotoframing.makeframefromhumanstring(
4853 4858 line[len(b'frame ') :]
4854 4859 )
4855 4860
4856 4861 frames.append(frame)
4857 4862 else:
4858 4863 raise error.Abort(
4859 4864 _(b'unknown argument to httprequest: %s') % line
4860 4865 )
4861 4866
4862 4867 url = path + httppath
4863 4868
4864 4869 if frames:
4865 4870 body = b''.join(bytes(f) for f in frames)
4866 4871
4867 4872 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4868 4873
4869 4874 # urllib.Request insists on using has_data() as a proxy for
4870 4875 # determining the request method. Override that to use our
4871 4876 # explicitly requested method.
4872 4877 req.get_method = lambda: pycompat.sysstr(method)
4873 4878
4874 4879 try:
4875 4880 res = opener.open(req)
4876 4881 body = res.read()
4877 4882 except util.urlerr.urlerror as e:
4878 4883 # read() method must be called, but only exists in Python 2
4879 4884 getattr(e, 'read', lambda: None)()
4880 4885 continue
4881 4886
4882 4887 ct = res.headers.get('Content-Type')
4883 4888 if ct == 'application/mercurial-cbor':
4884 4889 ui.write(
4885 4890 _(b'cbor> %s\n')
4886 4891 % stringutil.pprint(
4887 4892 cborutil.decodeall(body), bprefix=True, indent=2
4888 4893 )
4889 4894 )
4890 4895
4891 4896 elif action == b'close':
4892 4897 assert peer is not None
4893 4898 peer.close()
4894 4899 elif action == b'readavailable':
4895 4900 if not stdout or not stderr:
4896 4901 raise error.Abort(
4897 4902 _(b'readavailable not available on this peer')
4898 4903 )
4899 4904
4900 4905 stdin.close()
4901 4906 stdout.read()
4902 4907 stderr.read()
4903 4908
4904 4909 elif action == b'readline':
4905 4910 if not stdout:
4906 4911 raise error.Abort(_(b'readline not available on this peer'))
4907 4912 stdout.readline()
4908 4913 elif action == b'ereadline':
4909 4914 if not stderr:
4910 4915 raise error.Abort(_(b'ereadline not available on this peer'))
4911 4916 stderr.readline()
4912 4917 elif action.startswith(b'read '):
4913 4918 count = int(action.split(b' ', 1)[1])
4914 4919 if not stdout:
4915 4920 raise error.Abort(_(b'read not available on this peer'))
4916 4921 stdout.read(count)
4917 4922 elif action.startswith(b'eread '):
4918 4923 count = int(action.split(b' ', 1)[1])
4919 4924 if not stderr:
4920 4925 raise error.Abort(_(b'eread not available on this peer'))
4921 4926 stderr.read(count)
4922 4927 else:
4923 4928 raise error.Abort(_(b'unknown action: %s') % action)
4924 4929
4925 4930 if batchedcommands is not None:
4926 4931 raise error.Abort(_(b'unclosed "batchbegin" request'))
4927 4932
4928 4933 if peer:
4929 4934 peer.close()
4930 4935
4931 4936 if proc:
4932 4937 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now