##// END OF EJS Templates
debugdeltachain: document the possible value for deltatype...
marmoute -
r50117:e7d23c51 default
parent child Browse files
Show More
@@ -1,4941 +1,4947 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 106 constants as revlog_constants,
107 107 deltas as deltautil,
108 108 nodemap,
109 109 rewrite,
110 110 sidedata,
111 111 )
112 112
113 113 release = lockmod.release
114 114
115 115 table = {}
116 116 table.update(strip.command._table)
117 117 command = registrar.command(table)
118 118
119 119
120 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 121 def debugancestor(ui, repo, *args):
122 122 """find the ancestor revision of two revisions in a given index"""
123 123 if len(args) == 3:
124 124 index, rev1, rev2 = args
125 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 126 lookup = r.lookup
127 127 elif len(args) == 2:
128 128 if not repo:
129 129 raise error.Abort(
130 130 _(b'there is no Mercurial repository here (.hg not found)')
131 131 )
132 132 rev1, rev2 = args
133 133 r = repo.changelog
134 134 lookup = repo.lookup
135 135 else:
136 136 raise error.Abort(_(b'either two or three arguments required'))
137 137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 139
140 140
141 141 @command(b'debugantivirusrunning', [])
142 142 def debugantivirusrunning(ui, repo):
143 143 """attempt to trigger an antivirus scanner to see if one is active"""
144 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 145 f.write(
146 146 util.b85decode(
147 147 # This is a base85-armored version of the EICAR test file. See
148 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 151 )
152 152 )
153 153 # Give an AV engine time to scan the file.
154 154 time.sleep(2)
155 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 156
157 157
158 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 159 def debugapplystreamclonebundle(ui, repo, fname):
160 160 """apply a stream clone bundle file"""
161 161 f = hg.openpath(ui, fname)
162 162 gen = exchange.readbundle(ui, f, fname)
163 163 gen.apply(repo)
164 164
165 165
166 166 @command(
167 167 b'debugbuilddag',
168 168 [
169 169 (
170 170 b'm',
171 171 b'mergeable-file',
172 172 None,
173 173 _(b'add single file mergeable changes'),
174 174 ),
175 175 (
176 176 b'o',
177 177 b'overwritten-file',
178 178 None,
179 179 _(b'add single file all revs overwrite'),
180 180 ),
181 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 182 (
183 183 b'',
184 184 b'from-existing',
185 185 None,
186 186 _(b'continue from a non-empty repository'),
187 187 ),
188 188 ],
189 189 _(b'[OPTION]... [TEXT]'),
190 190 )
191 191 def debugbuilddag(
192 192 ui,
193 193 repo,
194 194 text=None,
195 195 mergeable_file=False,
196 196 overwritten_file=False,
197 197 new_file=False,
198 198 from_existing=False,
199 199 ):
200 200 """builds a repo with a given DAG from scratch in the current empty repo
201 201
202 202 The description of the DAG is read from stdin if not given on the
203 203 command line.
204 204
205 205 Elements:
206 206
207 207 - "+n" is a linear run of n nodes based on the current default parent
208 208 - "." is a single node based on the current default parent
209 209 - "$" resets the default parent to null (implied at the start);
210 210 otherwise the default parent is always the last node created
211 211 - "<p" sets the default parent to the backref p
212 212 - "*p" is a fork at parent p, which is a backref
213 213 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
214 214 - "/p2" is a merge of the preceding node and p2
215 215 - ":tag" defines a local tag for the preceding node
216 216 - "@branch" sets the named branch for subsequent nodes
217 217 - "#...\\n" is a comment up to the end of the line
218 218
219 219 Whitespace between the above elements is ignored.
220 220
221 221 A backref is either
222 222
223 223 - a number n, which references the node curr-n, where curr is the current
224 224 node, or
225 225 - the name of a local tag you placed earlier using ":tag", or
226 226 - empty to denote the default parent.
227 227
228 228 All string valued-elements are either strictly alphanumeric, or must
229 229 be enclosed in double quotes ("..."), with "\\" as escape character.
230 230 """
231 231
232 232 if text is None:
233 233 ui.status(_(b"reading DAG from stdin\n"))
234 234 text = ui.fin.read()
235 235
236 236 cl = repo.changelog
237 237 if len(cl) > 0 and not from_existing:
238 238 raise error.Abort(_(b'repository is not empty'))
239 239
240 240 # determine number of revs in DAG
241 241 total = 0
242 242 for type, data in dagparser.parsedag(text):
243 243 if type == b'n':
244 244 total += 1
245 245
246 246 if mergeable_file:
247 247 linesperrev = 2
248 248 # make a file with k lines per rev
249 249 initialmergedlines = [
250 250 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
251 251 ]
252 252 initialmergedlines.append(b"")
253 253
254 254 tags = []
255 255 progress = ui.makeprogress(
256 256 _(b'building'), unit=_(b'revisions'), total=total
257 257 )
258 258 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
259 259 at = -1
260 260 atbranch = b'default'
261 261 nodeids = []
262 262 id = 0
263 263 progress.update(id)
264 264 for type, data in dagparser.parsedag(text):
265 265 if type == b'n':
266 266 ui.note((b'node %s\n' % pycompat.bytestr(data)))
267 267 id, ps = data
268 268
269 269 files = []
270 270 filecontent = {}
271 271
272 272 p2 = None
273 273 if mergeable_file:
274 274 fn = b"mf"
275 275 p1 = repo[ps[0]]
276 276 if len(ps) > 1:
277 277 p2 = repo[ps[1]]
278 278 pa = p1.ancestor(p2)
279 279 base, local, other = [
280 280 x[fn].data() for x in (pa, p1, p2)
281 281 ]
282 282 m3 = simplemerge.Merge3Text(base, local, other)
283 283 ml = [
284 284 l.strip()
285 285 for l in simplemerge.render_minimized(m3)[0]
286 286 ]
287 287 ml.append(b"")
288 288 elif at > 0:
289 289 ml = p1[fn].data().split(b"\n")
290 290 else:
291 291 ml = initialmergedlines
292 292 ml[id * linesperrev] += b" r%i" % id
293 293 mergedtext = b"\n".join(ml)
294 294 files.append(fn)
295 295 filecontent[fn] = mergedtext
296 296
297 297 if overwritten_file:
298 298 fn = b"of"
299 299 files.append(fn)
300 300 filecontent[fn] = b"r%i\n" % id
301 301
302 302 if new_file:
303 303 fn = b"nf%i" % id
304 304 files.append(fn)
305 305 filecontent[fn] = b"r%i\n" % id
306 306 if len(ps) > 1:
307 307 if not p2:
308 308 p2 = repo[ps[1]]
309 309 for fn in p2:
310 310 if fn.startswith(b"nf"):
311 311 files.append(fn)
312 312 filecontent[fn] = p2[fn].data()
313 313
314 314 def fctxfn(repo, cx, path):
315 315 if path in filecontent:
316 316 return context.memfilectx(
317 317 repo, cx, path, filecontent[path]
318 318 )
319 319 return None
320 320
321 321 if len(ps) == 0 or ps[0] < 0:
322 322 pars = [None, None]
323 323 elif len(ps) == 1:
324 324 pars = [nodeids[ps[0]], None]
325 325 else:
326 326 pars = [nodeids[p] for p in ps]
327 327 cx = context.memctx(
328 328 repo,
329 329 pars,
330 330 b"r%i" % id,
331 331 files,
332 332 fctxfn,
333 333 date=(id, 0),
334 334 user=b"debugbuilddag",
335 335 extra={b'branch': atbranch},
336 336 )
337 337 nodeid = repo.commitctx(cx)
338 338 nodeids.append(nodeid)
339 339 at = id
340 340 elif type == b'l':
341 341 id, name = data
342 342 ui.note((b'tag %s\n' % name))
343 343 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
344 344 elif type == b'a':
345 345 ui.note((b'branch %s\n' % data))
346 346 atbranch = data
347 347 progress.update(id)
348 348
349 349 if tags:
350 350 repo.vfs.write(b"localtags", b"".join(tags))
351 351
352 352
353 353 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
354 354 indent_string = b' ' * indent
355 355 if all:
356 356 ui.writenoi18n(
357 357 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
358 358 % indent_string
359 359 )
360 360
361 361 def showchunks(named):
362 362 ui.write(b"\n%s%s\n" % (indent_string, named))
363 363 for deltadata in gen.deltaiter():
364 364 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
365 365 ui.write(
366 366 b"%s%s %s %s %s %s %d\n"
367 367 % (
368 368 indent_string,
369 369 hex(node),
370 370 hex(p1),
371 371 hex(p2),
372 372 hex(cs),
373 373 hex(deltabase),
374 374 len(delta),
375 375 )
376 376 )
377 377
378 378 gen.changelogheader()
379 379 showchunks(b"changelog")
380 380 gen.manifestheader()
381 381 showchunks(b"manifest")
382 382 for chunkdata in iter(gen.filelogheader, {}):
383 383 fname = chunkdata[b'filename']
384 384 showchunks(fname)
385 385 else:
386 386 if isinstance(gen, bundle2.unbundle20):
387 387 raise error.Abort(_(b'use debugbundle2 for this file'))
388 388 gen.changelogheader()
389 389 for deltadata in gen.deltaiter():
390 390 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
391 391 ui.write(b"%s%s\n" % (indent_string, hex(node)))
392 392
393 393
394 394 def _debugobsmarkers(ui, part, indent=0, **opts):
395 395 """display version and markers contained in 'data'"""
396 396 opts = pycompat.byteskwargs(opts)
397 397 data = part.read()
398 398 indent_string = b' ' * indent
399 399 try:
400 400 version, markers = obsolete._readmarkers(data)
401 401 except error.UnknownVersion as exc:
402 402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 403 msg %= indent_string, exc.version, len(data)
404 404 ui.write(msg)
405 405 else:
406 406 msg = b"%sversion: %d (%d bytes)\n"
407 407 msg %= indent_string, version, len(data)
408 408 ui.write(msg)
409 409 fm = ui.formatter(b'debugobsolete', opts)
410 410 for rawmarker in sorted(markers):
411 411 m = obsutil.marker(None, rawmarker)
412 412 fm.startitem()
413 413 fm.plain(indent_string)
414 414 cmdutil.showmarker(fm, m)
415 415 fm.end()
416 416
417 417
418 418 def _debugphaseheads(ui, data, indent=0):
419 419 """display version and markers contained in 'data'"""
420 420 indent_string = b' ' * indent
421 421 headsbyphase = phases.binarydecode(data)
422 422 for phase in phases.allphases:
423 423 for head in headsbyphase[phase]:
424 424 ui.write(indent_string)
425 425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 426
427 427
428 428 def _quasirepr(thing):
429 429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 430 return b'{%s}' % (
431 431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 432 )
433 433 return pycompat.bytestr(repr(thing))
434 434
435 435
436 436 def _debugbundle2(ui, gen, all=None, **opts):
437 437 """lists the contents of a bundle2"""
438 438 if not isinstance(gen, bundle2.unbundle20):
439 439 raise error.Abort(_(b'not a bundle2 file'))
440 440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 441 parttypes = opts.get('part_type', [])
442 442 for part in gen.iterparts():
443 443 if parttypes and part.type not in parttypes:
444 444 continue
445 445 msg = b'%s -- %s (mandatory: %r)\n'
446 446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 447 if part.type == b'changegroup':
448 448 version = part.params.get(b'version', b'01')
449 449 cg = changegroup.getunbundler(version, part, b'UN')
450 450 if not ui.quiet:
451 451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 452 if part.type == b'obsmarkers':
453 453 if not ui.quiet:
454 454 _debugobsmarkers(ui, part, indent=4, **opts)
455 455 if part.type == b'phase-heads':
456 456 if not ui.quiet:
457 457 _debugphaseheads(ui, part, indent=4)
458 458
459 459
460 460 @command(
461 461 b'debugbundle',
462 462 [
463 463 (b'a', b'all', None, _(b'show all details')),
464 464 (b'', b'part-type', [], _(b'show only the named part type')),
465 465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 466 ],
467 467 _(b'FILE'),
468 468 norepo=True,
469 469 )
470 470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 471 """lists the contents of a bundle"""
472 472 with hg.openpath(ui, bundlepath) as f:
473 473 if spec:
474 474 spec = exchange.getbundlespec(ui, f)
475 475 ui.write(b'%s\n' % spec)
476 476 return
477 477
478 478 gen = exchange.readbundle(ui, f, bundlepath)
479 479 if isinstance(gen, bundle2.unbundle20):
480 480 return _debugbundle2(ui, gen, all=all, **opts)
481 481 _debugchangegroup(ui, gen, all=all, **opts)
482 482
483 483
484 484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 485 def debugcapabilities(ui, path, **opts):
486 486 """lists the capabilities of a remote peer"""
487 487 opts = pycompat.byteskwargs(opts)
488 488 peer = hg.peer(ui, opts, path)
489 489 try:
490 490 caps = peer.capabilities()
491 491 ui.writenoi18n(b'Main capabilities:\n')
492 492 for c in sorted(caps):
493 493 ui.write(b' %s\n' % c)
494 494 b2caps = bundle2.bundle2caps(peer)
495 495 if b2caps:
496 496 ui.writenoi18n(b'Bundle2 capabilities:\n')
497 497 for key, values in sorted(b2caps.items()):
498 498 ui.write(b' %s\n' % key)
499 499 for v in values:
500 500 ui.write(b' %s\n' % v)
501 501 finally:
502 502 peer.close()
503 503
504 504
505 505 @command(
506 506 b'debugchangedfiles',
507 507 [
508 508 (
509 509 b'',
510 510 b'compute',
511 511 False,
512 512 b"compute information instead of reading it from storage",
513 513 ),
514 514 ],
515 515 b'REV',
516 516 )
517 517 def debugchangedfiles(ui, repo, rev, **opts):
518 518 """list the stored files changes for a revision"""
519 519 ctx = logcmdutil.revsingle(repo, rev, None)
520 520 files = None
521 521
522 522 if opts['compute']:
523 523 files = metadata.compute_all_files_changes(ctx)
524 524 else:
525 525 sd = repo.changelog.sidedata(ctx.rev())
526 526 files_block = sd.get(sidedata.SD_FILES)
527 527 if files_block is not None:
528 528 files = metadata.decode_files_sidedata(sd)
529 529 if files is not None:
530 530 for f in sorted(files.touched):
531 531 if f in files.added:
532 532 action = b"added"
533 533 elif f in files.removed:
534 534 action = b"removed"
535 535 elif f in files.merged:
536 536 action = b"merged"
537 537 elif f in files.salvaged:
538 538 action = b"salvaged"
539 539 else:
540 540 action = b"touched"
541 541
542 542 copy_parent = b""
543 543 copy_source = b""
544 544 if f in files.copied_from_p1:
545 545 copy_parent = b"p1"
546 546 copy_source = files.copied_from_p1[f]
547 547 elif f in files.copied_from_p2:
548 548 copy_parent = b"p2"
549 549 copy_source = files.copied_from_p2[f]
550 550
551 551 data = (action, copy_parent, f, copy_source)
552 552 template = b"%-8s %2s: %s, %s;\n"
553 553 ui.write(template % data)
554 554
555 555
556 556 @command(b'debugcheckstate', [], b'')
557 557 def debugcheckstate(ui, repo):
558 558 """validate the correctness of the current dirstate"""
559 559 parent1, parent2 = repo.dirstate.parents()
560 560 m1 = repo[parent1].manifest()
561 561 m2 = repo[parent2].manifest()
562 562 errors = 0
563 563 for err in repo.dirstate.verify(m1, m2):
564 564 ui.warn(err[0] % err[1:])
565 565 errors += 1
566 566 if errors:
567 567 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
568 568 raise error.Abort(errstr)
569 569
570 570
571 571 @command(
572 572 b'debugcolor',
573 573 [(b'', b'style', None, _(b'show all configured styles'))],
574 574 b'hg debugcolor',
575 575 )
576 576 def debugcolor(ui, repo, **opts):
577 577 """show available color, effects or style"""
578 578 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
579 579 if opts.get('style'):
580 580 return _debugdisplaystyle(ui)
581 581 else:
582 582 return _debugdisplaycolor(ui)
583 583
584 584
585 585 def _debugdisplaycolor(ui):
586 586 ui = ui.copy()
587 587 ui._styles.clear()
588 588 for effect in color._activeeffects(ui).keys():
589 589 ui._styles[effect] = effect
590 590 if ui._terminfoparams:
591 591 for k, v in ui.configitems(b'color'):
592 592 if k.startswith(b'color.'):
593 593 ui._styles[k] = k[6:]
594 594 elif k.startswith(b'terminfo.'):
595 595 ui._styles[k] = k[9:]
596 596 ui.write(_(b'available colors:\n'))
597 597 # sort label with a '_' after the other to group '_background' entry.
598 598 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
599 599 for colorname, label in items:
600 600 ui.write(b'%s\n' % colorname, label=label)
601 601
602 602
603 603 def _debugdisplaystyle(ui):
604 604 ui.write(_(b'available style:\n'))
605 605 if not ui._styles:
606 606 return
607 607 width = max(len(s) for s in ui._styles)
608 608 for label, effects in sorted(ui._styles.items()):
609 609 ui.write(b'%s' % label, label=label)
610 610 if effects:
611 611 # 50
612 612 ui.write(b': ')
613 613 ui.write(b' ' * (max(0, width - len(label))))
614 614 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
615 615 ui.write(b'\n')
616 616
617 617
618 618 @command(b'debugcreatestreamclonebundle', [], b'FILE')
619 619 def debugcreatestreamclonebundle(ui, repo, fname):
620 620 """create a stream clone bundle file
621 621
622 622 Stream bundles are special bundles that are essentially archives of
623 623 revlog files. They are commonly used for cloning very quickly.
624 624 """
625 625 # TODO we may want to turn this into an abort when this functionality
626 626 # is moved into `hg bundle`.
627 627 if phases.hassecret(repo):
628 628 ui.warn(
629 629 _(
630 630 b'(warning: stream clone bundle will contain secret '
631 631 b'revisions)\n'
632 632 )
633 633 )
634 634
635 635 requirements, gen = streamclone.generatebundlev1(repo)
636 636 changegroup.writechunks(ui, gen, fname)
637 637
638 638 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
639 639
640 640
641 641 @command(
642 642 b'debugdag',
643 643 [
644 644 (b't', b'tags', None, _(b'use tags as labels')),
645 645 (b'b', b'branches', None, _(b'annotate with branch names')),
646 646 (b'', b'dots', None, _(b'use dots for runs')),
647 647 (b's', b'spaces', None, _(b'separate elements by spaces')),
648 648 ],
649 649 _(b'[OPTION]... [FILE [REV]...]'),
650 650 optionalrepo=True,
651 651 )
652 652 def debugdag(ui, repo, file_=None, *revs, **opts):
653 653 """format the changelog or an index DAG as a concise textual description
654 654
655 655 If you pass a revlog index, the revlog's DAG is emitted. If you list
656 656 revision numbers, they get labeled in the output as rN.
657 657
658 658 Otherwise, the changelog DAG of the current repo is emitted.
659 659 """
660 660 spaces = opts.get('spaces')
661 661 dots = opts.get('dots')
662 662 if file_:
663 663 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
664 664 revs = {int(r) for r in revs}
665 665
666 666 def events():
667 667 for r in rlog:
668 668 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
669 669 if r in revs:
670 670 yield b'l', (r, b"r%i" % r)
671 671
672 672 elif repo:
673 673 cl = repo.changelog
674 674 tags = opts.get('tags')
675 675 branches = opts.get('branches')
676 676 if tags:
677 677 labels = {}
678 678 for l, n in repo.tags().items():
679 679 labels.setdefault(cl.rev(n), []).append(l)
680 680
681 681 def events():
682 682 b = b"default"
683 683 for r in cl:
684 684 if branches:
685 685 newb = cl.read(cl.node(r))[5][b'branch']
686 686 if newb != b:
687 687 yield b'a', newb
688 688 b = newb
689 689 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
690 690 if tags:
691 691 ls = labels.get(r)
692 692 if ls:
693 693 for l in ls:
694 694 yield b'l', (r, l)
695 695
696 696 else:
697 697 raise error.Abort(_(b'need repo for changelog dag'))
698 698
699 699 for line in dagparser.dagtextlines(
700 700 events(),
701 701 addspaces=spaces,
702 702 wraplabels=True,
703 703 wrapannotations=True,
704 704 wrapnonlinear=dots,
705 705 usedots=dots,
706 706 maxlinewidth=70,
707 707 ):
708 708 ui.write(line)
709 709 ui.write(b"\n")
710 710
711 711
712 712 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
713 713 def debugdata(ui, repo, file_, rev=None, **opts):
714 714 """dump the contents of a data file revision"""
715 715 opts = pycompat.byteskwargs(opts)
716 716 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
717 717 if rev is not None:
718 718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
719 719 file_, rev = None, file_
720 720 elif rev is None:
721 721 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
722 722 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
723 723 try:
724 724 ui.write(r.rawdata(r.lookup(rev)))
725 725 except KeyError:
726 726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 727
728 728
729 729 @command(
730 730 b'debugdate',
731 731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 732 _(b'[-e] DATE [RANGE]'),
733 733 norepo=True,
734 734 optionalrepo=True,
735 735 )
736 736 def debugdate(ui, date, range=None, **opts):
737 737 """parse and display a date"""
738 738 if opts["extended"]:
739 739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 740 else:
741 741 d = dateutil.parsedate(date)
742 742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 744 if range:
745 745 m = dateutil.matchdate(range)
746 746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 747
748 748
749 749 @command(
750 750 b'debugdeltachain',
751 751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 752 _(b'-c|-m|FILE'),
753 753 optionalrepo=True,
754 754 )
755 755 def debugdeltachain(ui, repo, file_=None, **opts):
756 756 """dump information about delta chains in a revlog
757 757
758 758 Output can be templatized. Available template keywords are:
759 759
760 760 :``rev``: revision number
761 761 :``p1``: parent 1 revision number (for reference)
762 762 :``p2``: parent 2 revision number (for reference)
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 - base: a full snapshot
768 - snap: an intermediate snapshot
769 - p1: a delta against the first parent
770 - p2: a delta against the second parent
771 - prev: a delta against the previous revision
772 - other: a delta against an arbitrary revision
767 773 :``compsize``: compressed size of revision
768 774 :``uncompsize``: uncompressed size of revision
769 775 :``chainsize``: total size of compressed revisions in chain
770 776 :``chainratio``: total chain size divided by uncompressed revision size
771 777 (new delta chains typically start at ratio 2.00)
772 778 :``lindist``: linear distance from base revision in delta chain to end
773 779 of this revision
774 780 :``extradist``: total size of revisions not part of this delta chain from
775 781 base of delta chain to end of this revision; a measurement
776 782 of how much extra data we need to read/seek across to read
777 783 the delta chain for this revision
778 784 :``extraratio``: extradist divided by chainsize; another representation of
779 785 how much unrelated data is needed to load this delta chain
780 786
781 787 If the repository is configured to use the sparse read, additional keywords
782 788 are available:
783 789
784 790 :``readsize``: total size of data read from the disk for a revision
785 791 (sum of the sizes of all the blocks)
786 792 :``largestblock``: size of the largest block of data read from the disk
787 793 :``readdensity``: density of useful bytes in the data read from the disk
788 794 :``srchunks``: in how many data hunks the whole revision would be read
789 795
790 796 The sparse read can be enabled with experimental.sparse-read = True
791 797 """
792 798 opts = pycompat.byteskwargs(opts)
793 799 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 800 index = r.index
795 801 start = r.start
796 802 length = r.length
797 803 generaldelta = r._generaldelta
798 804 withsparseread = getattr(r, '_withsparseread', False)
799 805
800 806 def revinfo(rev):
801 807 e = index[rev]
802 808 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
803 809 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
804 810 chainsize = 0
805 811
806 812 base = e[revlog_constants.ENTRY_DELTA_BASE]
807 813 p1 = e[revlog_constants.ENTRY_PARENT_1]
808 814 p2 = e[revlog_constants.ENTRY_PARENT_2]
809 815
810 816 if generaldelta:
811 817 if base == p1:
812 818 deltatype = b'p1'
813 819 elif base == p2:
814 820 deltatype = b'p2'
815 821 elif base == rev:
816 822 deltatype = b'base'
817 823 elif r.issnapshot(rev):
818 824 deltatype = b'snap'
819 825 elif base == rev - 1:
820 826 deltatype = b'prev'
821 827 else:
822 828 deltatype = b'other'
823 829 else:
824 830 if base == rev:
825 831 deltatype = b'base'
826 832 else:
827 833 deltatype = b'prev'
828 834
829 835 chain = r._deltachain(rev)[0]
830 836 for iterrev in chain:
831 837 e = index[iterrev]
832 838 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
833 839
834 840 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
835 841
836 842 fm = ui.formatter(b'debugdeltachain', opts)
837 843
838 844 fm.plain(
839 845 b' rev p1 p2 chain# chainlen prev delta '
840 846 b'size rawsize chainsize ratio lindist extradist '
841 847 b'extraratio'
842 848 )
843 849 if withsparseread:
844 850 fm.plain(b' readsize largestblk rddensity srchunks')
845 851 fm.plain(b'\n')
846 852
847 853 chainbases = {}
848 854 for rev in r:
849 855 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
850 856 chainbase = chain[0]
851 857 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
852 858 basestart = start(chainbase)
853 859 revstart = start(rev)
854 860 lineardist = revstart + comp - basestart
855 861 extradist = lineardist - chainsize
856 862 try:
857 863 prevrev = chain[-2]
858 864 except IndexError:
859 865 prevrev = -1
860 866
861 867 if uncomp != 0:
862 868 chainratio = float(chainsize) / float(uncomp)
863 869 else:
864 870 chainratio = chainsize
865 871
866 872 if chainsize != 0:
867 873 extraratio = float(extradist) / float(chainsize)
868 874 else:
869 875 extraratio = extradist
870 876
871 877 fm.startitem()
872 878 fm.write(
873 879 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
874 880 b'uncompsize chainsize chainratio lindist extradist '
875 881 b'extraratio',
876 882 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
877 883 rev,
878 884 p1,
879 885 p2,
880 886 chainid,
881 887 len(chain),
882 888 prevrev,
883 889 deltatype,
884 890 comp,
885 891 uncomp,
886 892 chainsize,
887 893 chainratio,
888 894 lineardist,
889 895 extradist,
890 896 extraratio,
891 897 rev=rev,
892 898 chainid=chainid,
893 899 chainlen=len(chain),
894 900 prevrev=prevrev,
895 901 deltatype=deltatype,
896 902 compsize=comp,
897 903 uncompsize=uncomp,
898 904 chainsize=chainsize,
899 905 chainratio=chainratio,
900 906 lindist=lineardist,
901 907 extradist=extradist,
902 908 extraratio=extraratio,
903 909 )
904 910 if withsparseread:
905 911 readsize = 0
906 912 largestblock = 0
907 913 srchunks = 0
908 914
909 915 for revschunk in deltautil.slicechunk(r, chain):
910 916 srchunks += 1
911 917 blkend = start(revschunk[-1]) + length(revschunk[-1])
912 918 blksize = blkend - start(revschunk[0])
913 919
914 920 readsize += blksize
915 921 if largestblock < blksize:
916 922 largestblock = blksize
917 923
918 924 if readsize:
919 925 readdensity = float(chainsize) / float(readsize)
920 926 else:
921 927 readdensity = 1
922 928
923 929 fm.write(
924 930 b'readsize largestblock readdensity srchunks',
925 931 b' %10d %10d %9.5f %8d',
926 932 readsize,
927 933 largestblock,
928 934 readdensity,
929 935 srchunks,
930 936 readsize=readsize,
931 937 largestblock=largestblock,
932 938 readdensity=readdensity,
933 939 srchunks=srchunks,
934 940 )
935 941
936 942 fm.plain(b'\n')
937 943
938 944 fm.end()
939 945
940 946
941 947 @command(
942 948 b'debugdirstate|debugstate',
943 949 [
944 950 (
945 951 b'',
946 952 b'nodates',
947 953 None,
948 954 _(b'do not display the saved mtime (DEPRECATED)'),
949 955 ),
950 956 (b'', b'dates', True, _(b'display the saved mtime')),
951 957 (b'', b'datesort', None, _(b'sort by saved mtime')),
952 958 (
953 959 b'',
954 960 b'docket',
955 961 False,
956 962 _(b'display the docket (metadata file) instead'),
957 963 ),
958 964 (
959 965 b'',
960 966 b'all',
961 967 False,
962 968 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
963 969 ),
964 970 ],
965 971 _(b'[OPTION]...'),
966 972 )
967 973 def debugstate(ui, repo, **opts):
968 974 """show the contents of the current dirstate"""
969 975
970 976 if opts.get("docket"):
971 977 if not repo.dirstate._use_dirstate_v2:
972 978 raise error.Abort(_(b'dirstate v1 does not have a docket'))
973 979
974 980 docket = repo.dirstate._map.docket
975 981 (
976 982 start_offset,
977 983 root_nodes,
978 984 nodes_with_entry,
979 985 nodes_with_copy,
980 986 unused_bytes,
981 987 _unused,
982 988 ignore_pattern,
983 989 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
984 990
985 991 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
986 992 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
987 993 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
988 994 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
989 995 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
990 996 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
991 997 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
992 998 ui.write(
993 999 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
994 1000 )
995 1001 return
996 1002
997 1003 nodates = not opts['dates']
998 1004 if opts.get('nodates') is not None:
999 1005 nodates = True
1000 1006 datesort = opts.get('datesort')
1001 1007
1002 1008 if datesort:
1003 1009
1004 1010 def keyfunc(entry):
1005 1011 filename, _state, _mode, _size, mtime = entry
1006 1012 return (mtime, filename)
1007 1013
1008 1014 else:
1009 1015 keyfunc = None # sort by filename
1010 1016 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1011 1017 entries.sort(key=keyfunc)
1012 1018 for entry in entries:
1013 1019 filename, state, mode, size, mtime = entry
1014 1020 if mtime == -1:
1015 1021 timestr = b'unset '
1016 1022 elif nodates:
1017 1023 timestr = b'set '
1018 1024 else:
1019 1025 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1020 1026 timestr = encoding.strtolocal(timestr)
1021 1027 if mode & 0o20000:
1022 1028 mode = b'lnk'
1023 1029 else:
1024 1030 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1025 1031 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1026 1032 for f in repo.dirstate.copies():
1027 1033 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1028 1034
1029 1035
1030 1036 @command(
1031 1037 b'debugdirstateignorepatternshash',
1032 1038 [],
1033 1039 _(b''),
1034 1040 )
1035 1041 def debugdirstateignorepatternshash(ui, repo, **opts):
1036 1042 """show the hash of ignore patterns stored in dirstate if v2,
1037 1043 or nothing for dirstate-v2
1038 1044 """
1039 1045 if repo.dirstate._use_dirstate_v2:
1040 1046 docket = repo.dirstate._map.docket
1041 1047 hash_len = 20 # 160 bits for SHA-1
1042 1048 hash_bytes = docket.tree_metadata[-hash_len:]
1043 1049 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1044 1050
1045 1051
1046 1052 @command(
1047 1053 b'debugdiscovery',
1048 1054 [
1049 1055 (b'', b'old', None, _(b'use old-style discovery')),
1050 1056 (
1051 1057 b'',
1052 1058 b'nonheads',
1053 1059 None,
1054 1060 _(b'use old-style discovery with non-heads included'),
1055 1061 ),
1056 1062 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1057 1063 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1058 1064 (
1059 1065 b'',
1060 1066 b'local-as-revs',
1061 1067 b"",
1062 1068 b'treat local has having these revisions only',
1063 1069 ),
1064 1070 (
1065 1071 b'',
1066 1072 b'remote-as-revs',
1067 1073 b"",
1068 1074 b'use local as remote, with only these revisions',
1069 1075 ),
1070 1076 ]
1071 1077 + cmdutil.remoteopts
1072 1078 + cmdutil.formatteropts,
1073 1079 _(b'[--rev REV] [OTHER]'),
1074 1080 )
1075 1081 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1076 1082 """runs the changeset discovery protocol in isolation
1077 1083
1078 1084 The local peer can be "replaced" by a subset of the local repository by
1079 1085 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1080 1086 be "replaced" by a subset of the local repository using the
1081 1087 `--local-as-revs` flag. This is useful to efficiently debug pathological
1082 1088 discovery situation.
1083 1089
1084 1090 The following developer oriented config are relevant for people playing with this command:
1085 1091
1086 1092 * devel.discovery.exchange-heads=True
1087 1093
1088 1094 If False, the discovery will not start with
1089 1095 remote head fetching and local head querying.
1090 1096
1091 1097 * devel.discovery.grow-sample=True
1092 1098
1093 1099 If False, the sample size used in set discovery will not be increased
1094 1100 through the process
1095 1101
1096 1102 * devel.discovery.grow-sample.dynamic=True
1097 1103
1098 1104 When discovery.grow-sample.dynamic is True, the default, the sample size is
1099 1105 adapted to the shape of the undecided set (it is set to the max of:
1100 1106 <target-size>, len(roots(undecided)), len(heads(undecided)
1101 1107
1102 1108 * devel.discovery.grow-sample.rate=1.05
1103 1109
1104 1110 the rate at which the sample grow
1105 1111
1106 1112 * devel.discovery.randomize=True
1107 1113
1108 1114 If andom sampling during discovery are deterministic. It is meant for
1109 1115 integration tests.
1110 1116
1111 1117 * devel.discovery.sample-size=200
1112 1118
1113 1119 Control the initial size of the discovery sample
1114 1120
1115 1121 * devel.discovery.sample-size.initial=100
1116 1122
1117 1123 Control the initial size of the discovery for initial change
1118 1124 """
1119 1125 opts = pycompat.byteskwargs(opts)
1120 1126 unfi = repo.unfiltered()
1121 1127
1122 1128 # setup potential extra filtering
1123 1129 local_revs = opts[b"local_as_revs"]
1124 1130 remote_revs = opts[b"remote_as_revs"]
1125 1131
1126 1132 # make sure tests are repeatable
1127 1133 random.seed(int(opts[b'seed']))
1128 1134
1129 1135 if not remote_revs:
1130 1136
1131 1137 remoteurl, branches = urlutil.get_unique_pull_path(
1132 1138 b'debugdiscovery', repo, ui, remoteurl
1133 1139 )
1134 1140 remote = hg.peer(repo, opts, remoteurl)
1135 1141 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1136 1142 else:
1137 1143 branches = (None, [])
1138 1144 remote_filtered_revs = logcmdutil.revrange(
1139 1145 unfi, [b"not (::(%s))" % remote_revs]
1140 1146 )
1141 1147 remote_filtered_revs = frozenset(remote_filtered_revs)
1142 1148
1143 1149 def remote_func(x):
1144 1150 return remote_filtered_revs
1145 1151
1146 1152 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1147 1153
1148 1154 remote = repo.peer()
1149 1155 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1150 1156
1151 1157 if local_revs:
1152 1158 local_filtered_revs = logcmdutil.revrange(
1153 1159 unfi, [b"not (::(%s))" % local_revs]
1154 1160 )
1155 1161 local_filtered_revs = frozenset(local_filtered_revs)
1156 1162
1157 1163 def local_func(x):
1158 1164 return local_filtered_revs
1159 1165
1160 1166 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1161 1167 repo = repo.filtered(b'debug-discovery-local-filter')
1162 1168
1163 1169 data = {}
1164 1170 if opts.get(b'old'):
1165 1171
1166 1172 def doit(pushedrevs, remoteheads, remote=remote):
1167 1173 if not util.safehasattr(remote, b'branches'):
1168 1174 # enable in-client legacy support
1169 1175 remote = localrepo.locallegacypeer(remote.local())
1170 1176 common, _in, hds = treediscovery.findcommonincoming(
1171 1177 repo, remote, force=True, audit=data
1172 1178 )
1173 1179 common = set(common)
1174 1180 if not opts.get(b'nonheads'):
1175 1181 ui.writenoi18n(
1176 1182 b"unpruned common: %s\n"
1177 1183 % b" ".join(sorted(short(n) for n in common))
1178 1184 )
1179 1185
1180 1186 clnode = repo.changelog.node
1181 1187 common = repo.revs(b'heads(::%ln)', common)
1182 1188 common = {clnode(r) for r in common}
1183 1189 return common, hds
1184 1190
1185 1191 else:
1186 1192
1187 1193 def doit(pushedrevs, remoteheads, remote=remote):
1188 1194 nodes = None
1189 1195 if pushedrevs:
1190 1196 revs = logcmdutil.revrange(repo, pushedrevs)
1191 1197 nodes = [repo[r].node() for r in revs]
1192 1198 common, any, hds = setdiscovery.findcommonheads(
1193 1199 ui, repo, remote, ancestorsof=nodes, audit=data
1194 1200 )
1195 1201 return common, hds
1196 1202
1197 1203 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1198 1204 localrevs = opts[b'rev']
1199 1205
1200 1206 fm = ui.formatter(b'debugdiscovery', opts)
1201 1207 if fm.strict_format:
1202 1208
1203 1209 @contextlib.contextmanager
1204 1210 def may_capture_output():
1205 1211 ui.pushbuffer()
1206 1212 yield
1207 1213 data[b'output'] = ui.popbuffer()
1208 1214
1209 1215 else:
1210 1216 may_capture_output = util.nullcontextmanager
1211 1217 with may_capture_output():
1212 1218 with util.timedcm('debug-discovery') as t:
1213 1219 common, hds = doit(localrevs, remoterevs)
1214 1220
1215 1221 # compute all statistics
1216 1222 heads_common = set(common)
1217 1223 heads_remote = set(hds)
1218 1224 heads_local = set(repo.heads())
1219 1225 # note: they cannot be a local or remote head that is in common and not
1220 1226 # itself a head of common.
1221 1227 heads_common_local = heads_common & heads_local
1222 1228 heads_common_remote = heads_common & heads_remote
1223 1229 heads_common_both = heads_common & heads_remote & heads_local
1224 1230
1225 1231 all = repo.revs(b'all()')
1226 1232 common = repo.revs(b'::%ln', common)
1227 1233 roots_common = repo.revs(b'roots(::%ld)', common)
1228 1234 missing = repo.revs(b'not ::%ld', common)
1229 1235 heads_missing = repo.revs(b'heads(%ld)', missing)
1230 1236 roots_missing = repo.revs(b'roots(%ld)', missing)
1231 1237 assert len(common) + len(missing) == len(all)
1232 1238
1233 1239 initial_undecided = repo.revs(
1234 1240 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1235 1241 )
1236 1242 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1237 1243 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1238 1244 common_initial_undecided = initial_undecided & common
1239 1245 missing_initial_undecided = initial_undecided & missing
1240 1246
1241 1247 data[b'elapsed'] = t.elapsed
1242 1248 data[b'nb-common-heads'] = len(heads_common)
1243 1249 data[b'nb-common-heads-local'] = len(heads_common_local)
1244 1250 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1245 1251 data[b'nb-common-heads-both'] = len(heads_common_both)
1246 1252 data[b'nb-common-roots'] = len(roots_common)
1247 1253 data[b'nb-head-local'] = len(heads_local)
1248 1254 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1249 1255 data[b'nb-head-remote'] = len(heads_remote)
1250 1256 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1251 1257 heads_common_remote
1252 1258 )
1253 1259 data[b'nb-revs'] = len(all)
1254 1260 data[b'nb-revs-common'] = len(common)
1255 1261 data[b'nb-revs-missing'] = len(missing)
1256 1262 data[b'nb-missing-heads'] = len(heads_missing)
1257 1263 data[b'nb-missing-roots'] = len(roots_missing)
1258 1264 data[b'nb-ini_und'] = len(initial_undecided)
1259 1265 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1260 1266 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1261 1267 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1262 1268 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1263 1269
1264 1270 fm.startitem()
1265 1271 fm.data(**pycompat.strkwargs(data))
1266 1272 # display discovery summary
1267 1273 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1268 1274 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1269 1275 fm.plain(b"queries: %(total-queries)9d\n" % data)
1270 1276 fm.plain(b"heads summary:\n")
1271 1277 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1272 1278 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1273 1279 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1274 1280 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1275 1281 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1276 1282 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1277 1283 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1278 1284 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1279 1285 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1280 1286 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1281 1287 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1282 1288 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1283 1289 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1284 1290 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1285 1291 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1286 1292 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1287 1293 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1288 1294 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1289 1295 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1290 1296 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1291 1297 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1292 1298 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1293 1299
1294 1300 if ui.verbose:
1295 1301 fm.plain(
1296 1302 b"common heads: %s\n"
1297 1303 % b" ".join(sorted(short(n) for n in heads_common))
1298 1304 )
1299 1305 fm.end()
1300 1306
1301 1307
1302 1308 _chunksize = 4 << 10
1303 1309
1304 1310
1305 1311 @command(
1306 1312 b'debugdownload',
1307 1313 [
1308 1314 (b'o', b'output', b'', _(b'path')),
1309 1315 ],
1310 1316 optionalrepo=True,
1311 1317 )
1312 1318 def debugdownload(ui, repo, url, output=None, **opts):
1313 1319 """download a resource using Mercurial logic and config"""
1314 1320 fh = urlmod.open(ui, url, output)
1315 1321
1316 1322 dest = ui
1317 1323 if output:
1318 1324 dest = open(output, b"wb", _chunksize)
1319 1325 try:
1320 1326 data = fh.read(_chunksize)
1321 1327 while data:
1322 1328 dest.write(data)
1323 1329 data = fh.read(_chunksize)
1324 1330 finally:
1325 1331 if output:
1326 1332 dest.close()
1327 1333
1328 1334
1329 1335 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1330 1336 def debugextensions(ui, repo, **opts):
1331 1337 '''show information about active extensions'''
1332 1338 opts = pycompat.byteskwargs(opts)
1333 1339 exts = extensions.extensions(ui)
1334 1340 hgver = util.version()
1335 1341 fm = ui.formatter(b'debugextensions', opts)
1336 1342 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1337 1343 isinternal = extensions.ismoduleinternal(extmod)
1338 1344 extsource = None
1339 1345
1340 1346 if util.safehasattr(extmod, '__file__'):
1341 1347 extsource = pycompat.fsencode(extmod.__file__)
1342 1348 elif getattr(sys, 'oxidized', False):
1343 1349 extsource = pycompat.sysexecutable
1344 1350 if isinternal:
1345 1351 exttestedwith = [] # never expose magic string to users
1346 1352 else:
1347 1353 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1348 1354 extbuglink = getattr(extmod, 'buglink', None)
1349 1355
1350 1356 fm.startitem()
1351 1357
1352 1358 if ui.quiet or ui.verbose:
1353 1359 fm.write(b'name', b'%s\n', extname)
1354 1360 else:
1355 1361 fm.write(b'name', b'%s', extname)
1356 1362 if isinternal or hgver in exttestedwith:
1357 1363 fm.plain(b'\n')
1358 1364 elif not exttestedwith:
1359 1365 fm.plain(_(b' (untested!)\n'))
1360 1366 else:
1361 1367 lasttestedversion = exttestedwith[-1]
1362 1368 fm.plain(b' (%s!)\n' % lasttestedversion)
1363 1369
1364 1370 fm.condwrite(
1365 1371 ui.verbose and extsource,
1366 1372 b'source',
1367 1373 _(b' location: %s\n'),
1368 1374 extsource or b"",
1369 1375 )
1370 1376
1371 1377 if ui.verbose:
1372 1378 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1373 1379 fm.data(bundled=isinternal)
1374 1380
1375 1381 fm.condwrite(
1376 1382 ui.verbose and exttestedwith,
1377 1383 b'testedwith',
1378 1384 _(b' tested with: %s\n'),
1379 1385 fm.formatlist(exttestedwith, name=b'ver'),
1380 1386 )
1381 1387
1382 1388 fm.condwrite(
1383 1389 ui.verbose and extbuglink,
1384 1390 b'buglink',
1385 1391 _(b' bug reporting: %s\n'),
1386 1392 extbuglink or b"",
1387 1393 )
1388 1394
1389 1395 fm.end()
1390 1396
1391 1397
1392 1398 @command(
1393 1399 b'debugfileset',
1394 1400 [
1395 1401 (
1396 1402 b'r',
1397 1403 b'rev',
1398 1404 b'',
1399 1405 _(b'apply the filespec on this revision'),
1400 1406 _(b'REV'),
1401 1407 ),
1402 1408 (
1403 1409 b'',
1404 1410 b'all-files',
1405 1411 False,
1406 1412 _(b'test files from all revisions and working directory'),
1407 1413 ),
1408 1414 (
1409 1415 b's',
1410 1416 b'show-matcher',
1411 1417 None,
1412 1418 _(b'print internal representation of matcher'),
1413 1419 ),
1414 1420 (
1415 1421 b'p',
1416 1422 b'show-stage',
1417 1423 [],
1418 1424 _(b'print parsed tree at the given stage'),
1419 1425 _(b'NAME'),
1420 1426 ),
1421 1427 ],
1422 1428 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1423 1429 )
1424 1430 def debugfileset(ui, repo, expr, **opts):
1425 1431 '''parse and apply a fileset specification'''
1426 1432 from . import fileset
1427 1433
1428 1434 fileset.symbols # force import of fileset so we have predicates to optimize
1429 1435 opts = pycompat.byteskwargs(opts)
1430 1436 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1431 1437
1432 1438 stages = [
1433 1439 (b'parsed', pycompat.identity),
1434 1440 (b'analyzed', filesetlang.analyze),
1435 1441 (b'optimized', filesetlang.optimize),
1436 1442 ]
1437 1443 stagenames = {n for n, f in stages}
1438 1444
1439 1445 showalways = set()
1440 1446 if ui.verbose and not opts[b'show_stage']:
1441 1447 # show parsed tree by --verbose (deprecated)
1442 1448 showalways.add(b'parsed')
1443 1449 if opts[b'show_stage'] == [b'all']:
1444 1450 showalways.update(stagenames)
1445 1451 else:
1446 1452 for n in opts[b'show_stage']:
1447 1453 if n not in stagenames:
1448 1454 raise error.Abort(_(b'invalid stage name: %s') % n)
1449 1455 showalways.update(opts[b'show_stage'])
1450 1456
1451 1457 tree = filesetlang.parse(expr)
1452 1458 for n, f in stages:
1453 1459 tree = f(tree)
1454 1460 if n in showalways:
1455 1461 if opts[b'show_stage'] or n != b'parsed':
1456 1462 ui.write(b"* %s:\n" % n)
1457 1463 ui.write(filesetlang.prettyformat(tree), b"\n")
1458 1464
1459 1465 files = set()
1460 1466 if opts[b'all_files']:
1461 1467 for r in repo:
1462 1468 c = repo[r]
1463 1469 files.update(c.files())
1464 1470 files.update(c.substate)
1465 1471 if opts[b'all_files'] or ctx.rev() is None:
1466 1472 wctx = repo[None]
1467 1473 files.update(
1468 1474 repo.dirstate.walk(
1469 1475 scmutil.matchall(repo),
1470 1476 subrepos=list(wctx.substate),
1471 1477 unknown=True,
1472 1478 ignored=True,
1473 1479 )
1474 1480 )
1475 1481 files.update(wctx.substate)
1476 1482 else:
1477 1483 files.update(ctx.files())
1478 1484 files.update(ctx.substate)
1479 1485
1480 1486 m = ctx.matchfileset(repo.getcwd(), expr)
1481 1487 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1482 1488 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1483 1489 for f in sorted(files):
1484 1490 if not m(f):
1485 1491 continue
1486 1492 ui.write(b"%s\n" % f)
1487 1493
1488 1494
1489 1495 @command(
1490 1496 b"debug-repair-issue6528",
1491 1497 [
1492 1498 (
1493 1499 b'',
1494 1500 b'to-report',
1495 1501 b'',
1496 1502 _(b'build a report of affected revisions to this file'),
1497 1503 _(b'FILE'),
1498 1504 ),
1499 1505 (
1500 1506 b'',
1501 1507 b'from-report',
1502 1508 b'',
1503 1509 _(b'repair revisions listed in this report file'),
1504 1510 _(b'FILE'),
1505 1511 ),
1506 1512 (
1507 1513 b'',
1508 1514 b'paranoid',
1509 1515 False,
1510 1516 _(b'check that both detection methods do the same thing'),
1511 1517 ),
1512 1518 ]
1513 1519 + cmdutil.dryrunopts,
1514 1520 )
1515 1521 def debug_repair_issue6528(ui, repo, **opts):
1516 1522 """find affected revisions and repair them. See issue6528 for more details.
1517 1523
1518 1524 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1519 1525 computation of affected revisions for a given repository across clones.
1520 1526 The report format is line-based (with empty lines ignored):
1521 1527
1522 1528 ```
1523 1529 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1524 1530 ```
1525 1531
1526 1532 There can be multiple broken revisions per filelog, they are separated by
1527 1533 a comma with no spaces. The only space is between the revision(s) and the
1528 1534 filename.
1529 1535
1530 1536 Note that this does *not* mean that this repairs future affected revisions,
1531 1537 that needs a separate fix at the exchange level that was introduced in
1532 1538 Mercurial 5.9.1.
1533 1539
1534 1540 There is a `--paranoid` flag to test that the fast implementation is correct
1535 1541 by checking it against the slow implementation. Since this matter is quite
1536 1542 urgent and testing every edge-case is probably quite costly, we use this
1537 1543 method to test on large repositories as a fuzzing method of sorts.
1538 1544 """
1539 1545 cmdutil.check_incompatible_arguments(
1540 1546 opts, 'to_report', ['from_report', 'dry_run']
1541 1547 )
1542 1548 dry_run = opts.get('dry_run')
1543 1549 to_report = opts.get('to_report')
1544 1550 from_report = opts.get('from_report')
1545 1551 paranoid = opts.get('paranoid')
1546 1552 # TODO maybe add filelog pattern and revision pattern parameters to help
1547 1553 # narrow down the search for users that know what they're looking for?
1548 1554
1549 1555 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1550 1556 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1551 1557 raise error.Abort(_(msg))
1552 1558
1553 1559 rewrite.repair_issue6528(
1554 1560 ui,
1555 1561 repo,
1556 1562 dry_run=dry_run,
1557 1563 to_report=to_report,
1558 1564 from_report=from_report,
1559 1565 paranoid=paranoid,
1560 1566 )
1561 1567
1562 1568
1563 1569 @command(b'debugformat', [] + cmdutil.formatteropts)
1564 1570 def debugformat(ui, repo, **opts):
1565 1571 """display format information about the current repository
1566 1572
1567 1573 Use --verbose to get extra information about current config value and
1568 1574 Mercurial default."""
1569 1575 opts = pycompat.byteskwargs(opts)
1570 1576 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1571 1577 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1572 1578
1573 1579 def makeformatname(name):
1574 1580 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1575 1581
1576 1582 fm = ui.formatter(b'debugformat', opts)
1577 1583 if fm.isplain():
1578 1584
1579 1585 def formatvalue(value):
1580 1586 if util.safehasattr(value, b'startswith'):
1581 1587 return value
1582 1588 if value:
1583 1589 return b'yes'
1584 1590 else:
1585 1591 return b'no'
1586 1592
1587 1593 else:
1588 1594 formatvalue = pycompat.identity
1589 1595
1590 1596 fm.plain(b'format-variant')
1591 1597 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1592 1598 fm.plain(b' repo')
1593 1599 if ui.verbose:
1594 1600 fm.plain(b' config default')
1595 1601 fm.plain(b'\n')
1596 1602 for fv in upgrade.allformatvariant:
1597 1603 fm.startitem()
1598 1604 repovalue = fv.fromrepo(repo)
1599 1605 configvalue = fv.fromconfig(repo)
1600 1606
1601 1607 if repovalue != configvalue:
1602 1608 namelabel = b'formatvariant.name.mismatchconfig'
1603 1609 repolabel = b'formatvariant.repo.mismatchconfig'
1604 1610 elif repovalue != fv.default:
1605 1611 namelabel = b'formatvariant.name.mismatchdefault'
1606 1612 repolabel = b'formatvariant.repo.mismatchdefault'
1607 1613 else:
1608 1614 namelabel = b'formatvariant.name.uptodate'
1609 1615 repolabel = b'formatvariant.repo.uptodate'
1610 1616
1611 1617 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1612 1618 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1613 1619 if fv.default != configvalue:
1614 1620 configlabel = b'formatvariant.config.special'
1615 1621 else:
1616 1622 configlabel = b'formatvariant.config.default'
1617 1623 fm.condwrite(
1618 1624 ui.verbose,
1619 1625 b'config',
1620 1626 b' %6s',
1621 1627 formatvalue(configvalue),
1622 1628 label=configlabel,
1623 1629 )
1624 1630 fm.condwrite(
1625 1631 ui.verbose,
1626 1632 b'default',
1627 1633 b' %7s',
1628 1634 formatvalue(fv.default),
1629 1635 label=b'formatvariant.default',
1630 1636 )
1631 1637 fm.plain(b'\n')
1632 1638 fm.end()
1633 1639
1634 1640
1635 1641 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1636 1642 def debugfsinfo(ui, path=b"."):
1637 1643 """show information detected about current filesystem"""
1638 1644 ui.writenoi18n(b'path: %s\n' % path)
1639 1645 ui.writenoi18n(
1640 1646 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1641 1647 )
1642 1648 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1643 1649 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1644 1650 ui.writenoi18n(
1645 1651 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1646 1652 )
1647 1653 ui.writenoi18n(
1648 1654 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1649 1655 )
1650 1656 casesensitive = b'(unknown)'
1651 1657 try:
1652 1658 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1653 1659 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1654 1660 except OSError:
1655 1661 pass
1656 1662 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1657 1663
1658 1664
1659 1665 @command(
1660 1666 b'debuggetbundle',
1661 1667 [
1662 1668 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1663 1669 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1664 1670 (
1665 1671 b't',
1666 1672 b'type',
1667 1673 b'bzip2',
1668 1674 _(b'bundle compression type to use'),
1669 1675 _(b'TYPE'),
1670 1676 ),
1671 1677 ],
1672 1678 _(b'REPO FILE [-H|-C ID]...'),
1673 1679 norepo=True,
1674 1680 )
1675 1681 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1676 1682 """retrieves a bundle from a repo
1677 1683
1678 1684 Every ID must be a full-length hex node id string. Saves the bundle to the
1679 1685 given file.
1680 1686 """
1681 1687 opts = pycompat.byteskwargs(opts)
1682 1688 repo = hg.peer(ui, opts, repopath)
1683 1689 if not repo.capable(b'getbundle'):
1684 1690 raise error.Abort(b"getbundle() not supported by target repository")
1685 1691 args = {}
1686 1692 if common:
1687 1693 args['common'] = [bin(s) for s in common]
1688 1694 if head:
1689 1695 args['heads'] = [bin(s) for s in head]
1690 1696 # TODO: get desired bundlecaps from command line.
1691 1697 args['bundlecaps'] = None
1692 1698 bundle = repo.getbundle(b'debug', **args)
1693 1699
1694 1700 bundletype = opts.get(b'type', b'bzip2').lower()
1695 1701 btypes = {
1696 1702 b'none': b'HG10UN',
1697 1703 b'bzip2': b'HG10BZ',
1698 1704 b'gzip': b'HG10GZ',
1699 1705 b'bundle2': b'HG20',
1700 1706 }
1701 1707 bundletype = btypes.get(bundletype)
1702 1708 if bundletype not in bundle2.bundletypes:
1703 1709 raise error.Abort(_(b'unknown bundle type specified with --type'))
1704 1710 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1705 1711
1706 1712
1707 1713 @command(b'debugignore', [], b'[FILE]')
1708 1714 def debugignore(ui, repo, *files, **opts):
1709 1715 """display the combined ignore pattern and information about ignored files
1710 1716
1711 1717 With no argument display the combined ignore pattern.
1712 1718
1713 1719 Given space separated file names, shows if the given file is ignored and
1714 1720 if so, show the ignore rule (file and line number) that matched it.
1715 1721 """
1716 1722 ignore = repo.dirstate._ignore
1717 1723 if not files:
1718 1724 # Show all the patterns
1719 1725 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1720 1726 else:
1721 1727 m = scmutil.match(repo[None], pats=files)
1722 1728 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1723 1729 for f in m.files():
1724 1730 nf = util.normpath(f)
1725 1731 ignored = None
1726 1732 ignoredata = None
1727 1733 if nf != b'.':
1728 1734 if ignore(nf):
1729 1735 ignored = nf
1730 1736 ignoredata = repo.dirstate._ignorefileandline(nf)
1731 1737 else:
1732 1738 for p in pathutil.finddirs(nf):
1733 1739 if ignore(p):
1734 1740 ignored = p
1735 1741 ignoredata = repo.dirstate._ignorefileandline(p)
1736 1742 break
1737 1743 if ignored:
1738 1744 if ignored == nf:
1739 1745 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1740 1746 else:
1741 1747 ui.write(
1742 1748 _(
1743 1749 b"%s is ignored because of "
1744 1750 b"containing directory %s\n"
1745 1751 )
1746 1752 % (uipathfn(f), ignored)
1747 1753 )
1748 1754 ignorefile, lineno, line = ignoredata
1749 1755 ui.write(
1750 1756 _(b"(ignore rule in %s, line %d: '%s')\n")
1751 1757 % (ignorefile, lineno, line)
1752 1758 )
1753 1759 else:
1754 1760 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1755 1761
1756 1762
1757 1763 @command(
1758 1764 b'debugindex',
1759 1765 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1760 1766 _(b'-c|-m|FILE'),
1761 1767 )
1762 1768 def debugindex(ui, repo, file_=None, **opts):
1763 1769 """dump index data for a storage primitive"""
1764 1770 opts = pycompat.byteskwargs(opts)
1765 1771 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1766 1772
1767 1773 if ui.debugflag:
1768 1774 shortfn = hex
1769 1775 else:
1770 1776 shortfn = short
1771 1777
1772 1778 idlen = 12
1773 1779 for i in store:
1774 1780 idlen = len(shortfn(store.node(i)))
1775 1781 break
1776 1782
1777 1783 fm = ui.formatter(b'debugindex', opts)
1778 1784 fm.plain(
1779 1785 b' rev linkrev %s %s p2\n'
1780 1786 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1781 1787 )
1782 1788
1783 1789 for rev in store:
1784 1790 node = store.node(rev)
1785 1791 parents = store.parents(node)
1786 1792
1787 1793 fm.startitem()
1788 1794 fm.write(b'rev', b'%6d ', rev)
1789 1795 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1790 1796 fm.write(b'node', b'%s ', shortfn(node))
1791 1797 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1792 1798 fm.write(b'p2', b'%s', shortfn(parents[1]))
1793 1799 fm.plain(b'\n')
1794 1800
1795 1801 fm.end()
1796 1802
1797 1803
1798 1804 @command(
1799 1805 b'debugindexdot',
1800 1806 cmdutil.debugrevlogopts,
1801 1807 _(b'-c|-m|FILE'),
1802 1808 optionalrepo=True,
1803 1809 )
1804 1810 def debugindexdot(ui, repo, file_=None, **opts):
1805 1811 """dump an index DAG as a graphviz dot file"""
1806 1812 opts = pycompat.byteskwargs(opts)
1807 1813 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1808 1814 ui.writenoi18n(b"digraph G {\n")
1809 1815 for i in r:
1810 1816 node = r.node(i)
1811 1817 pp = r.parents(node)
1812 1818 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1813 1819 if pp[1] != repo.nullid:
1814 1820 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1815 1821 ui.write(b"}\n")
1816 1822
1817 1823
1818 1824 @command(b'debugindexstats', [])
1819 1825 def debugindexstats(ui, repo):
1820 1826 """show stats related to the changelog index"""
1821 1827 repo.changelog.shortest(repo.nullid, 1)
1822 1828 index = repo.changelog.index
1823 1829 if not util.safehasattr(index, b'stats'):
1824 1830 raise error.Abort(_(b'debugindexstats only works with native code'))
1825 1831 for k, v in sorted(index.stats().items()):
1826 1832 ui.write(b'%s: %d\n' % (k, v))
1827 1833
1828 1834
1829 1835 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1830 1836 def debuginstall(ui, **opts):
1831 1837 """test Mercurial installation
1832 1838
1833 1839 Returns 0 on success.
1834 1840 """
1835 1841 opts = pycompat.byteskwargs(opts)
1836 1842
1837 1843 problems = 0
1838 1844
1839 1845 fm = ui.formatter(b'debuginstall', opts)
1840 1846 fm.startitem()
1841 1847
1842 1848 # encoding might be unknown or wrong. don't translate these messages.
1843 1849 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1844 1850 err = None
1845 1851 try:
1846 1852 codecs.lookup(pycompat.sysstr(encoding.encoding))
1847 1853 except LookupError as inst:
1848 1854 err = stringutil.forcebytestr(inst)
1849 1855 problems += 1
1850 1856 fm.condwrite(
1851 1857 err,
1852 1858 b'encodingerror',
1853 1859 b" %s\n (check that your locale is properly set)\n",
1854 1860 err,
1855 1861 )
1856 1862
1857 1863 # Python
1858 1864 pythonlib = None
1859 1865 if util.safehasattr(os, '__file__'):
1860 1866 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1861 1867 elif getattr(sys, 'oxidized', False):
1862 1868 pythonlib = pycompat.sysexecutable
1863 1869
1864 1870 fm.write(
1865 1871 b'pythonexe',
1866 1872 _(b"checking Python executable (%s)\n"),
1867 1873 pycompat.sysexecutable or _(b"unknown"),
1868 1874 )
1869 1875 fm.write(
1870 1876 b'pythonimplementation',
1871 1877 _(b"checking Python implementation (%s)\n"),
1872 1878 pycompat.sysbytes(platform.python_implementation()),
1873 1879 )
1874 1880 fm.write(
1875 1881 b'pythonver',
1876 1882 _(b"checking Python version (%s)\n"),
1877 1883 (b"%d.%d.%d" % sys.version_info[:3]),
1878 1884 )
1879 1885 fm.write(
1880 1886 b'pythonlib',
1881 1887 _(b"checking Python lib (%s)...\n"),
1882 1888 pythonlib or _(b"unknown"),
1883 1889 )
1884 1890
1885 1891 try:
1886 1892 from . import rustext # pytype: disable=import-error
1887 1893
1888 1894 rustext.__doc__ # trigger lazy import
1889 1895 except ImportError:
1890 1896 rustext = None
1891 1897
1892 1898 security = set(sslutil.supportedprotocols)
1893 1899 if sslutil.hassni:
1894 1900 security.add(b'sni')
1895 1901
1896 1902 fm.write(
1897 1903 b'pythonsecurity',
1898 1904 _(b"checking Python security support (%s)\n"),
1899 1905 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1900 1906 )
1901 1907
1902 1908 # These are warnings, not errors. So don't increment problem count. This
1903 1909 # may change in the future.
1904 1910 if b'tls1.2' not in security:
1905 1911 fm.plain(
1906 1912 _(
1907 1913 b' TLS 1.2 not supported by Python install; '
1908 1914 b'network connections lack modern security\n'
1909 1915 )
1910 1916 )
1911 1917 if b'sni' not in security:
1912 1918 fm.plain(
1913 1919 _(
1914 1920 b' SNI not supported by Python install; may have '
1915 1921 b'connectivity issues with some servers\n'
1916 1922 )
1917 1923 )
1918 1924
1919 1925 fm.plain(
1920 1926 _(
1921 1927 b"checking Rust extensions (%s)\n"
1922 1928 % (b'missing' if rustext is None else b'installed')
1923 1929 ),
1924 1930 )
1925 1931
1926 1932 # TODO print CA cert info
1927 1933
1928 1934 # hg version
1929 1935 hgver = util.version()
1930 1936 fm.write(
1931 1937 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1932 1938 )
1933 1939 fm.write(
1934 1940 b'hgverextra',
1935 1941 _(b"checking Mercurial custom build (%s)\n"),
1936 1942 b'+'.join(hgver.split(b'+')[1:]),
1937 1943 )
1938 1944
1939 1945 # compiled modules
1940 1946 hgmodules = None
1941 1947 if util.safehasattr(sys.modules[__name__], '__file__'):
1942 1948 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1943 1949 elif getattr(sys, 'oxidized', False):
1944 1950 hgmodules = pycompat.sysexecutable
1945 1951
1946 1952 fm.write(
1947 1953 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1948 1954 )
1949 1955 fm.write(
1950 1956 b'hgmodules',
1951 1957 _(b"checking installed modules (%s)...\n"),
1952 1958 hgmodules or _(b"unknown"),
1953 1959 )
1954 1960
1955 1961 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1956 1962 rustext = rustandc # for now, that's the only case
1957 1963 cext = policy.policy in (b'c', b'allow') or rustandc
1958 1964 nopure = cext or rustext
1959 1965 if nopure:
1960 1966 err = None
1961 1967 try:
1962 1968 if cext:
1963 1969 from .cext import ( # pytype: disable=import-error
1964 1970 base85,
1965 1971 bdiff,
1966 1972 mpatch,
1967 1973 osutil,
1968 1974 )
1969 1975
1970 1976 # quiet pyflakes
1971 1977 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1972 1978 if rustext:
1973 1979 from .rustext import ( # pytype: disable=import-error
1974 1980 ancestor,
1975 1981 dirstate,
1976 1982 )
1977 1983
1978 1984 dir(ancestor), dir(dirstate) # quiet pyflakes
1979 1985 except Exception as inst:
1980 1986 err = stringutil.forcebytestr(inst)
1981 1987 problems += 1
1982 1988 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1983 1989
1984 1990 compengines = util.compengines._engines.values()
1985 1991 fm.write(
1986 1992 b'compengines',
1987 1993 _(b'checking registered compression engines (%s)\n'),
1988 1994 fm.formatlist(
1989 1995 sorted(e.name() for e in compengines),
1990 1996 name=b'compengine',
1991 1997 fmt=b'%s',
1992 1998 sep=b', ',
1993 1999 ),
1994 2000 )
1995 2001 fm.write(
1996 2002 b'compenginesavail',
1997 2003 _(b'checking available compression engines (%s)\n'),
1998 2004 fm.formatlist(
1999 2005 sorted(e.name() for e in compengines if e.available()),
2000 2006 name=b'compengine',
2001 2007 fmt=b'%s',
2002 2008 sep=b', ',
2003 2009 ),
2004 2010 )
2005 2011 wirecompengines = compression.compengines.supportedwireengines(
2006 2012 compression.SERVERROLE
2007 2013 )
2008 2014 fm.write(
2009 2015 b'compenginesserver',
2010 2016 _(
2011 2017 b'checking available compression engines '
2012 2018 b'for wire protocol (%s)\n'
2013 2019 ),
2014 2020 fm.formatlist(
2015 2021 [e.name() for e in wirecompengines if e.wireprotosupport()],
2016 2022 name=b'compengine',
2017 2023 fmt=b'%s',
2018 2024 sep=b', ',
2019 2025 ),
2020 2026 )
2021 2027 re2 = b'missing'
2022 2028 if util._re2:
2023 2029 re2 = b'available'
2024 2030 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2025 2031 fm.data(re2=bool(util._re2))
2026 2032
2027 2033 # templates
2028 2034 p = templater.templatedir()
2029 2035 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2030 2036 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2031 2037 if p:
2032 2038 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2033 2039 if m:
2034 2040 # template found, check if it is working
2035 2041 err = None
2036 2042 try:
2037 2043 templater.templater.frommapfile(m)
2038 2044 except Exception as inst:
2039 2045 err = stringutil.forcebytestr(inst)
2040 2046 p = None
2041 2047 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2042 2048 else:
2043 2049 p = None
2044 2050 fm.condwrite(
2045 2051 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2046 2052 )
2047 2053 fm.condwrite(
2048 2054 not m,
2049 2055 b'defaulttemplatenotfound',
2050 2056 _(b" template '%s' not found\n"),
2051 2057 b"default",
2052 2058 )
2053 2059 if not p:
2054 2060 problems += 1
2055 2061 fm.condwrite(
2056 2062 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2057 2063 )
2058 2064
2059 2065 # editor
2060 2066 editor = ui.geteditor()
2061 2067 editor = util.expandpath(editor)
2062 2068 editorbin = procutil.shellsplit(editor)[0]
2063 2069 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2064 2070 cmdpath = procutil.findexe(editorbin)
2065 2071 fm.condwrite(
2066 2072 not cmdpath and editor == b'vi',
2067 2073 b'vinotfound',
2068 2074 _(
2069 2075 b" No commit editor set and can't find %s in PATH\n"
2070 2076 b" (specify a commit editor in your configuration"
2071 2077 b" file)\n"
2072 2078 ),
2073 2079 not cmdpath and editor == b'vi' and editorbin,
2074 2080 )
2075 2081 fm.condwrite(
2076 2082 not cmdpath and editor != b'vi',
2077 2083 b'editornotfound',
2078 2084 _(
2079 2085 b" Can't find editor '%s' in PATH\n"
2080 2086 b" (specify a commit editor in your configuration"
2081 2087 b" file)\n"
2082 2088 ),
2083 2089 not cmdpath and editorbin,
2084 2090 )
2085 2091 if not cmdpath and editor != b'vi':
2086 2092 problems += 1
2087 2093
2088 2094 # check username
2089 2095 username = None
2090 2096 err = None
2091 2097 try:
2092 2098 username = ui.username()
2093 2099 except error.Abort as e:
2094 2100 err = e.message
2095 2101 problems += 1
2096 2102
2097 2103 fm.condwrite(
2098 2104 username, b'username', _(b"checking username (%s)\n"), username
2099 2105 )
2100 2106 fm.condwrite(
2101 2107 err,
2102 2108 b'usernameerror',
2103 2109 _(
2104 2110 b"checking username...\n %s\n"
2105 2111 b" (specify a username in your configuration file)\n"
2106 2112 ),
2107 2113 err,
2108 2114 )
2109 2115
2110 2116 for name, mod in extensions.extensions():
2111 2117 handler = getattr(mod, 'debuginstall', None)
2112 2118 if handler is not None:
2113 2119 problems += handler(ui, fm)
2114 2120
2115 2121 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2116 2122 if not problems:
2117 2123 fm.data(problems=problems)
2118 2124 fm.condwrite(
2119 2125 problems,
2120 2126 b'problems',
2121 2127 _(b"%d problems detected, please check your install!\n"),
2122 2128 problems,
2123 2129 )
2124 2130 fm.end()
2125 2131
2126 2132 return problems
2127 2133
2128 2134
2129 2135 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2130 2136 def debugknown(ui, repopath, *ids, **opts):
2131 2137 """test whether node ids are known to a repo
2132 2138
2133 2139 Every ID must be a full-length hex node id string. Returns a list of 0s
2134 2140 and 1s indicating unknown/known.
2135 2141 """
2136 2142 opts = pycompat.byteskwargs(opts)
2137 2143 repo = hg.peer(ui, opts, repopath)
2138 2144 if not repo.capable(b'known'):
2139 2145 raise error.Abort(b"known() not supported by target repository")
2140 2146 flags = repo.known([bin(s) for s in ids])
2141 2147 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2142 2148
2143 2149
2144 2150 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2145 2151 def debuglabelcomplete(ui, repo, *args):
2146 2152 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2147 2153 debugnamecomplete(ui, repo, *args)
2148 2154
2149 2155
2150 2156 @command(
2151 2157 b'debuglocks',
2152 2158 [
2153 2159 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2154 2160 (
2155 2161 b'W',
2156 2162 b'force-free-wlock',
2157 2163 None,
2158 2164 _(b'free the working state lock (DANGEROUS)'),
2159 2165 ),
2160 2166 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2161 2167 (
2162 2168 b'S',
2163 2169 b'set-wlock',
2164 2170 None,
2165 2171 _(b'set the working state lock until stopped'),
2166 2172 ),
2167 2173 ],
2168 2174 _(b'[OPTION]...'),
2169 2175 )
2170 2176 def debuglocks(ui, repo, **opts):
2171 2177 """show or modify state of locks
2172 2178
2173 2179 By default, this command will show which locks are held. This
2174 2180 includes the user and process holding the lock, the amount of time
2175 2181 the lock has been held, and the machine name where the process is
2176 2182 running if it's not local.
2177 2183
2178 2184 Locks protect the integrity of Mercurial's data, so should be
2179 2185 treated with care. System crashes or other interruptions may cause
2180 2186 locks to not be properly released, though Mercurial will usually
2181 2187 detect and remove such stale locks automatically.
2182 2188
2183 2189 However, detecting stale locks may not always be possible (for
2184 2190 instance, on a shared filesystem). Removing locks may also be
2185 2191 blocked by filesystem permissions.
2186 2192
2187 2193 Setting a lock will prevent other commands from changing the data.
2188 2194 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2189 2195 The set locks are removed when the command exits.
2190 2196
2191 2197 Returns 0 if no locks are held.
2192 2198
2193 2199 """
2194 2200
2195 2201 if opts.get('force_free_lock'):
2196 2202 repo.svfs.tryunlink(b'lock')
2197 2203 if opts.get('force_free_wlock'):
2198 2204 repo.vfs.tryunlink(b'wlock')
2199 2205 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2200 2206 return 0
2201 2207
2202 2208 locks = []
2203 2209 try:
2204 2210 if opts.get('set_wlock'):
2205 2211 try:
2206 2212 locks.append(repo.wlock(False))
2207 2213 except error.LockHeld:
2208 2214 raise error.Abort(_(b'wlock is already held'))
2209 2215 if opts.get('set_lock'):
2210 2216 try:
2211 2217 locks.append(repo.lock(False))
2212 2218 except error.LockHeld:
2213 2219 raise error.Abort(_(b'lock is already held'))
2214 2220 if len(locks):
2215 2221 try:
2216 2222 if ui.interactive():
2217 2223 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2218 2224 ui.promptchoice(prompt)
2219 2225 else:
2220 2226 msg = b"%d locks held, waiting for signal\n"
2221 2227 msg %= len(locks)
2222 2228 ui.status(msg)
2223 2229 while True: # XXX wait for a signal
2224 2230 time.sleep(0.1)
2225 2231 except KeyboardInterrupt:
2226 2232 msg = b"signal-received releasing locks\n"
2227 2233 ui.status(msg)
2228 2234 return 0
2229 2235 finally:
2230 2236 release(*locks)
2231 2237
2232 2238 now = time.time()
2233 2239 held = 0
2234 2240
2235 2241 def report(vfs, name, method):
2236 2242 # this causes stale locks to get reaped for more accurate reporting
2237 2243 try:
2238 2244 l = method(False)
2239 2245 except error.LockHeld:
2240 2246 l = None
2241 2247
2242 2248 if l:
2243 2249 l.release()
2244 2250 else:
2245 2251 try:
2246 2252 st = vfs.lstat(name)
2247 2253 age = now - st[stat.ST_MTIME]
2248 2254 user = util.username(st.st_uid)
2249 2255 locker = vfs.readlock(name)
2250 2256 if b":" in locker:
2251 2257 host, pid = locker.split(b':')
2252 2258 if host == socket.gethostname():
2253 2259 locker = b'user %s, process %s' % (user or b'None', pid)
2254 2260 else:
2255 2261 locker = b'user %s, process %s, host %s' % (
2256 2262 user or b'None',
2257 2263 pid,
2258 2264 host,
2259 2265 )
2260 2266 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2261 2267 return 1
2262 2268 except OSError as e:
2263 2269 if e.errno != errno.ENOENT:
2264 2270 raise
2265 2271
2266 2272 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2267 2273 return 0
2268 2274
2269 2275 held += report(repo.svfs, b"lock", repo.lock)
2270 2276 held += report(repo.vfs, b"wlock", repo.wlock)
2271 2277
2272 2278 return held
2273 2279
2274 2280
2275 2281 @command(
2276 2282 b'debugmanifestfulltextcache',
2277 2283 [
2278 2284 (b'', b'clear', False, _(b'clear the cache')),
2279 2285 (
2280 2286 b'a',
2281 2287 b'add',
2282 2288 [],
2283 2289 _(b'add the given manifest nodes to the cache'),
2284 2290 _(b'NODE'),
2285 2291 ),
2286 2292 ],
2287 2293 b'',
2288 2294 )
2289 2295 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2290 2296 """show, clear or amend the contents of the manifest fulltext cache"""
2291 2297
2292 2298 def getcache():
2293 2299 r = repo.manifestlog.getstorage(b'')
2294 2300 try:
2295 2301 return r._fulltextcache
2296 2302 except AttributeError:
2297 2303 msg = _(
2298 2304 b"Current revlog implementation doesn't appear to have a "
2299 2305 b"manifest fulltext cache\n"
2300 2306 )
2301 2307 raise error.Abort(msg)
2302 2308
2303 2309 if opts.get('clear'):
2304 2310 with repo.wlock():
2305 2311 cache = getcache()
2306 2312 cache.clear(clear_persisted_data=True)
2307 2313 return
2308 2314
2309 2315 if add:
2310 2316 with repo.wlock():
2311 2317 m = repo.manifestlog
2312 2318 store = m.getstorage(b'')
2313 2319 for n in add:
2314 2320 try:
2315 2321 manifest = m[store.lookup(n)]
2316 2322 except error.LookupError as e:
2317 2323 raise error.Abort(
2318 2324 bytes(e), hint=b"Check your manifest node id"
2319 2325 )
2320 2326 manifest.read() # stores revisision in cache too
2321 2327 return
2322 2328
2323 2329 cache = getcache()
2324 2330 if not len(cache):
2325 2331 ui.write(_(b'cache empty\n'))
2326 2332 else:
2327 2333 ui.write(
2328 2334 _(
2329 2335 b'cache contains %d manifest entries, in order of most to '
2330 2336 b'least recent:\n'
2331 2337 )
2332 2338 % (len(cache),)
2333 2339 )
2334 2340 totalsize = 0
2335 2341 for nodeid in cache:
2336 2342 # Use cache.get to not update the LRU order
2337 2343 data = cache.peek(nodeid)
2338 2344 size = len(data)
2339 2345 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2340 2346 ui.write(
2341 2347 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2342 2348 )
2343 2349 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2344 2350 ui.write(
2345 2351 _(b'total cache data size %s, on-disk %s\n')
2346 2352 % (util.bytecount(totalsize), util.bytecount(ondisk))
2347 2353 )
2348 2354
2349 2355
2350 2356 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2351 2357 def debugmergestate(ui, repo, *args, **opts):
2352 2358 """print merge state
2353 2359
2354 2360 Use --verbose to print out information about whether v1 or v2 merge state
2355 2361 was chosen."""
2356 2362
2357 2363 if ui.verbose:
2358 2364 ms = mergestatemod.mergestate(repo)
2359 2365
2360 2366 # sort so that reasonable information is on top
2361 2367 v1records = ms._readrecordsv1()
2362 2368 v2records = ms._readrecordsv2()
2363 2369
2364 2370 if not v1records and not v2records:
2365 2371 pass
2366 2372 elif not v2records:
2367 2373 ui.writenoi18n(b'no version 2 merge state\n')
2368 2374 elif ms._v1v2match(v1records, v2records):
2369 2375 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2370 2376 else:
2371 2377 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2372 2378
2373 2379 opts = pycompat.byteskwargs(opts)
2374 2380 if not opts[b'template']:
2375 2381 opts[b'template'] = (
2376 2382 b'{if(commits, "", "no merge state found\n")}'
2377 2383 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2378 2384 b'{files % "file: {path} (state \\"{state}\\")\n'
2379 2385 b'{if(local_path, "'
2380 2386 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2381 2387 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2382 2388 b' other path: {other_path} (node {other_node})\n'
2383 2389 b'")}'
2384 2390 b'{if(rename_side, "'
2385 2391 b' rename side: {rename_side}\n'
2386 2392 b' renamed path: {renamed_path}\n'
2387 2393 b'")}'
2388 2394 b'{extras % " extra: {key} = {value}\n"}'
2389 2395 b'"}'
2390 2396 b'{extras % "extra: {file} ({key} = {value})\n"}'
2391 2397 )
2392 2398
2393 2399 ms = mergestatemod.mergestate.read(repo)
2394 2400
2395 2401 fm = ui.formatter(b'debugmergestate', opts)
2396 2402 fm.startitem()
2397 2403
2398 2404 fm_commits = fm.nested(b'commits')
2399 2405 if ms.active():
2400 2406 for name, node, label_index in (
2401 2407 (b'local', ms.local, 0),
2402 2408 (b'other', ms.other, 1),
2403 2409 ):
2404 2410 fm_commits.startitem()
2405 2411 fm_commits.data(name=name)
2406 2412 fm_commits.data(node=hex(node))
2407 2413 if ms._labels and len(ms._labels) > label_index:
2408 2414 fm_commits.data(label=ms._labels[label_index])
2409 2415 fm_commits.end()
2410 2416
2411 2417 fm_files = fm.nested(b'files')
2412 2418 if ms.active():
2413 2419 for f in ms:
2414 2420 fm_files.startitem()
2415 2421 fm_files.data(path=f)
2416 2422 state = ms._state[f]
2417 2423 fm_files.data(state=state[0])
2418 2424 if state[0] in (
2419 2425 mergestatemod.MERGE_RECORD_UNRESOLVED,
2420 2426 mergestatemod.MERGE_RECORD_RESOLVED,
2421 2427 ):
2422 2428 fm_files.data(local_key=state[1])
2423 2429 fm_files.data(local_path=state[2])
2424 2430 fm_files.data(ancestor_path=state[3])
2425 2431 fm_files.data(ancestor_node=state[4])
2426 2432 fm_files.data(other_path=state[5])
2427 2433 fm_files.data(other_node=state[6])
2428 2434 fm_files.data(local_flags=state[7])
2429 2435 elif state[0] in (
2430 2436 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2431 2437 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2432 2438 ):
2433 2439 fm_files.data(renamed_path=state[1])
2434 2440 fm_files.data(rename_side=state[2])
2435 2441 fm_extras = fm_files.nested(b'extras')
2436 2442 for k, v in sorted(ms.extras(f).items()):
2437 2443 fm_extras.startitem()
2438 2444 fm_extras.data(key=k)
2439 2445 fm_extras.data(value=v)
2440 2446 fm_extras.end()
2441 2447
2442 2448 fm_files.end()
2443 2449
2444 2450 fm_extras = fm.nested(b'extras')
2445 2451 for f, d in sorted(ms.allextras().items()):
2446 2452 if f in ms:
2447 2453 # If file is in mergestate, we have already processed it's extras
2448 2454 continue
2449 2455 for k, v in d.items():
2450 2456 fm_extras.startitem()
2451 2457 fm_extras.data(file=f)
2452 2458 fm_extras.data(key=k)
2453 2459 fm_extras.data(value=v)
2454 2460 fm_extras.end()
2455 2461
2456 2462 fm.end()
2457 2463
2458 2464
2459 2465 @command(b'debugnamecomplete', [], _(b'NAME...'))
2460 2466 def debugnamecomplete(ui, repo, *args):
2461 2467 '''complete "names" - tags, open branch names, bookmark names'''
2462 2468
2463 2469 names = set()
2464 2470 # since we previously only listed open branches, we will handle that
2465 2471 # specially (after this for loop)
2466 2472 for name, ns in repo.names.items():
2467 2473 if name != b'branches':
2468 2474 names.update(ns.listnames(repo))
2469 2475 names.update(
2470 2476 tag
2471 2477 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2472 2478 if not closed
2473 2479 )
2474 2480 completions = set()
2475 2481 if not args:
2476 2482 args = [b'']
2477 2483 for a in args:
2478 2484 completions.update(n for n in names if n.startswith(a))
2479 2485 ui.write(b'\n'.join(sorted(completions)))
2480 2486 ui.write(b'\n')
2481 2487
2482 2488
2483 2489 @command(
2484 2490 b'debugnodemap',
2485 2491 [
2486 2492 (
2487 2493 b'',
2488 2494 b'dump-new',
2489 2495 False,
2490 2496 _(b'write a (new) persistent binary nodemap on stdout'),
2491 2497 ),
2492 2498 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2493 2499 (
2494 2500 b'',
2495 2501 b'check',
2496 2502 False,
2497 2503 _(b'check that the data on disk data are correct.'),
2498 2504 ),
2499 2505 (
2500 2506 b'',
2501 2507 b'metadata',
2502 2508 False,
2503 2509 _(b'display the on disk meta data for the nodemap'),
2504 2510 ),
2505 2511 ],
2506 2512 )
2507 2513 def debugnodemap(ui, repo, **opts):
2508 2514 """write and inspect on disk nodemap"""
2509 2515 if opts['dump_new']:
2510 2516 unfi = repo.unfiltered()
2511 2517 cl = unfi.changelog
2512 2518 if util.safehasattr(cl.index, "nodemap_data_all"):
2513 2519 data = cl.index.nodemap_data_all()
2514 2520 else:
2515 2521 data = nodemap.persistent_data(cl.index)
2516 2522 ui.write(data)
2517 2523 elif opts['dump_disk']:
2518 2524 unfi = repo.unfiltered()
2519 2525 cl = unfi.changelog
2520 2526 nm_data = nodemap.persisted_data(cl)
2521 2527 if nm_data is not None:
2522 2528 docket, data = nm_data
2523 2529 ui.write(data[:])
2524 2530 elif opts['check']:
2525 2531 unfi = repo.unfiltered()
2526 2532 cl = unfi.changelog
2527 2533 nm_data = nodemap.persisted_data(cl)
2528 2534 if nm_data is not None:
2529 2535 docket, data = nm_data
2530 2536 return nodemap.check_data(ui, cl.index, data)
2531 2537 elif opts['metadata']:
2532 2538 unfi = repo.unfiltered()
2533 2539 cl = unfi.changelog
2534 2540 nm_data = nodemap.persisted_data(cl)
2535 2541 if nm_data is not None:
2536 2542 docket, data = nm_data
2537 2543 ui.write((b"uid: %s\n") % docket.uid)
2538 2544 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2539 2545 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2540 2546 ui.write((b"data-length: %d\n") % docket.data_length)
2541 2547 ui.write((b"data-unused: %d\n") % docket.data_unused)
2542 2548 unused_perc = docket.data_unused * 100.0 / docket.data_length
2543 2549 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2544 2550
2545 2551
2546 2552 @command(
2547 2553 b'debugobsolete',
2548 2554 [
2549 2555 (b'', b'flags', 0, _(b'markers flag')),
2550 2556 (
2551 2557 b'',
2552 2558 b'record-parents',
2553 2559 False,
2554 2560 _(b'record parent information for the precursor'),
2555 2561 ),
2556 2562 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2557 2563 (
2558 2564 b'',
2559 2565 b'exclusive',
2560 2566 False,
2561 2567 _(b'restrict display to markers only relevant to REV'),
2562 2568 ),
2563 2569 (b'', b'index', False, _(b'display index of the marker')),
2564 2570 (b'', b'delete', [], _(b'delete markers specified by indices')),
2565 2571 ]
2566 2572 + cmdutil.commitopts2
2567 2573 + cmdutil.formatteropts,
2568 2574 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2569 2575 )
2570 2576 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2571 2577 """create arbitrary obsolete marker
2572 2578
2573 2579 With no arguments, displays the list of obsolescence markers."""
2574 2580
2575 2581 opts = pycompat.byteskwargs(opts)
2576 2582
2577 2583 def parsenodeid(s):
2578 2584 try:
2579 2585 # We do not use revsingle/revrange functions here to accept
2580 2586 # arbitrary node identifiers, possibly not present in the
2581 2587 # local repository.
2582 2588 n = bin(s)
2583 2589 if len(n) != repo.nodeconstants.nodelen:
2584 2590 raise TypeError()
2585 2591 return n
2586 2592 except TypeError:
2587 2593 raise error.InputError(
2588 2594 b'changeset references must be full hexadecimal '
2589 2595 b'node identifiers'
2590 2596 )
2591 2597
2592 2598 if opts.get(b'delete'):
2593 2599 indices = []
2594 2600 for v in opts.get(b'delete'):
2595 2601 try:
2596 2602 indices.append(int(v))
2597 2603 except ValueError:
2598 2604 raise error.InputError(
2599 2605 _(b'invalid index value: %r') % v,
2600 2606 hint=_(b'use integers for indices'),
2601 2607 )
2602 2608
2603 2609 if repo.currenttransaction():
2604 2610 raise error.Abort(
2605 2611 _(b'cannot delete obsmarkers in the middle of transaction.')
2606 2612 )
2607 2613
2608 2614 with repo.lock():
2609 2615 n = repair.deleteobsmarkers(repo.obsstore, indices)
2610 2616 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2611 2617
2612 2618 return
2613 2619
2614 2620 if precursor is not None:
2615 2621 if opts[b'rev']:
2616 2622 raise error.InputError(
2617 2623 b'cannot select revision when creating marker'
2618 2624 )
2619 2625 metadata = {}
2620 2626 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2621 2627 succs = tuple(parsenodeid(succ) for succ in successors)
2622 2628 l = repo.lock()
2623 2629 try:
2624 2630 tr = repo.transaction(b'debugobsolete')
2625 2631 try:
2626 2632 date = opts.get(b'date')
2627 2633 if date:
2628 2634 date = dateutil.parsedate(date)
2629 2635 else:
2630 2636 date = None
2631 2637 prec = parsenodeid(precursor)
2632 2638 parents = None
2633 2639 if opts[b'record_parents']:
2634 2640 if prec not in repo.unfiltered():
2635 2641 raise error.Abort(
2636 2642 b'cannot used --record-parents on '
2637 2643 b'unknown changesets'
2638 2644 )
2639 2645 parents = repo.unfiltered()[prec].parents()
2640 2646 parents = tuple(p.node() for p in parents)
2641 2647 repo.obsstore.create(
2642 2648 tr,
2643 2649 prec,
2644 2650 succs,
2645 2651 opts[b'flags'],
2646 2652 parents=parents,
2647 2653 date=date,
2648 2654 metadata=metadata,
2649 2655 ui=ui,
2650 2656 )
2651 2657 tr.close()
2652 2658 except ValueError as exc:
2653 2659 raise error.Abort(
2654 2660 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2655 2661 )
2656 2662 finally:
2657 2663 tr.release()
2658 2664 finally:
2659 2665 l.release()
2660 2666 else:
2661 2667 if opts[b'rev']:
2662 2668 revs = logcmdutil.revrange(repo, opts[b'rev'])
2663 2669 nodes = [repo[r].node() for r in revs]
2664 2670 markers = list(
2665 2671 obsutil.getmarkers(
2666 2672 repo, nodes=nodes, exclusive=opts[b'exclusive']
2667 2673 )
2668 2674 )
2669 2675 markers.sort(key=lambda x: x._data)
2670 2676 else:
2671 2677 markers = obsutil.getmarkers(repo)
2672 2678
2673 2679 markerstoiter = markers
2674 2680 isrelevant = lambda m: True
2675 2681 if opts.get(b'rev') and opts.get(b'index'):
2676 2682 markerstoiter = obsutil.getmarkers(repo)
2677 2683 markerset = set(markers)
2678 2684 isrelevant = lambda m: m in markerset
2679 2685
2680 2686 fm = ui.formatter(b'debugobsolete', opts)
2681 2687 for i, m in enumerate(markerstoiter):
2682 2688 if not isrelevant(m):
2683 2689 # marker can be irrelevant when we're iterating over a set
2684 2690 # of markers (markerstoiter) which is bigger than the set
2685 2691 # of markers we want to display (markers)
2686 2692 # this can happen if both --index and --rev options are
2687 2693 # provided and thus we need to iterate over all of the markers
2688 2694 # to get the correct indices, but only display the ones that
2689 2695 # are relevant to --rev value
2690 2696 continue
2691 2697 fm.startitem()
2692 2698 ind = i if opts.get(b'index') else None
2693 2699 cmdutil.showmarker(fm, m, index=ind)
2694 2700 fm.end()
2695 2701
2696 2702
2697 2703 @command(
2698 2704 b'debugp1copies',
2699 2705 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2700 2706 _(b'[-r REV]'),
2701 2707 )
2702 2708 def debugp1copies(ui, repo, **opts):
2703 2709 """dump copy information compared to p1"""
2704 2710
2705 2711 opts = pycompat.byteskwargs(opts)
2706 2712 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2707 2713 for dst, src in ctx.p1copies().items():
2708 2714 ui.write(b'%s -> %s\n' % (src, dst))
2709 2715
2710 2716
2711 2717 @command(
2712 2718 b'debugp2copies',
2713 2719 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2714 2720 _(b'[-r REV]'),
2715 2721 )
2716 2722 def debugp1copies(ui, repo, **opts):
2717 2723 """dump copy information compared to p2"""
2718 2724
2719 2725 opts = pycompat.byteskwargs(opts)
2720 2726 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2721 2727 for dst, src in ctx.p2copies().items():
2722 2728 ui.write(b'%s -> %s\n' % (src, dst))
2723 2729
2724 2730
2725 2731 @command(
2726 2732 b'debugpathcomplete',
2727 2733 [
2728 2734 (b'f', b'full', None, _(b'complete an entire path')),
2729 2735 (b'n', b'normal', None, _(b'show only normal files')),
2730 2736 (b'a', b'added', None, _(b'show only added files')),
2731 2737 (b'r', b'removed', None, _(b'show only removed files')),
2732 2738 ],
2733 2739 _(b'FILESPEC...'),
2734 2740 )
2735 2741 def debugpathcomplete(ui, repo, *specs, **opts):
2736 2742 """complete part or all of a tracked path
2737 2743
2738 2744 This command supports shells that offer path name completion. It
2739 2745 currently completes only files already known to the dirstate.
2740 2746
2741 2747 Completion extends only to the next path segment unless
2742 2748 --full is specified, in which case entire paths are used."""
2743 2749
2744 2750 def complete(path, acceptable):
2745 2751 dirstate = repo.dirstate
2746 2752 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2747 2753 rootdir = repo.root + pycompat.ossep
2748 2754 if spec != repo.root and not spec.startswith(rootdir):
2749 2755 return [], []
2750 2756 if os.path.isdir(spec):
2751 2757 spec += b'/'
2752 2758 spec = spec[len(rootdir) :]
2753 2759 fixpaths = pycompat.ossep != b'/'
2754 2760 if fixpaths:
2755 2761 spec = spec.replace(pycompat.ossep, b'/')
2756 2762 speclen = len(spec)
2757 2763 fullpaths = opts['full']
2758 2764 files, dirs = set(), set()
2759 2765 adddir, addfile = dirs.add, files.add
2760 2766 for f, st in dirstate.items():
2761 2767 if f.startswith(spec) and st.state in acceptable:
2762 2768 if fixpaths:
2763 2769 f = f.replace(b'/', pycompat.ossep)
2764 2770 if fullpaths:
2765 2771 addfile(f)
2766 2772 continue
2767 2773 s = f.find(pycompat.ossep, speclen)
2768 2774 if s >= 0:
2769 2775 adddir(f[:s])
2770 2776 else:
2771 2777 addfile(f)
2772 2778 return files, dirs
2773 2779
2774 2780 acceptable = b''
2775 2781 if opts['normal']:
2776 2782 acceptable += b'nm'
2777 2783 if opts['added']:
2778 2784 acceptable += b'a'
2779 2785 if opts['removed']:
2780 2786 acceptable += b'r'
2781 2787 cwd = repo.getcwd()
2782 2788 if not specs:
2783 2789 specs = [b'.']
2784 2790
2785 2791 files, dirs = set(), set()
2786 2792 for spec in specs:
2787 2793 f, d = complete(spec, acceptable or b'nmar')
2788 2794 files.update(f)
2789 2795 dirs.update(d)
2790 2796 files.update(dirs)
2791 2797 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2792 2798 ui.write(b'\n')
2793 2799
2794 2800
2795 2801 @command(
2796 2802 b'debugpathcopies',
2797 2803 cmdutil.walkopts,
2798 2804 b'hg debugpathcopies REV1 REV2 [FILE]',
2799 2805 inferrepo=True,
2800 2806 )
2801 2807 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2802 2808 """show copies between two revisions"""
2803 2809 ctx1 = scmutil.revsingle(repo, rev1)
2804 2810 ctx2 = scmutil.revsingle(repo, rev2)
2805 2811 m = scmutil.match(ctx1, pats, opts)
2806 2812 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2807 2813 ui.write(b'%s -> %s\n' % (src, dst))
2808 2814
2809 2815
2810 2816 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2811 2817 def debugpeer(ui, path):
2812 2818 """establish a connection to a peer repository"""
2813 2819 # Always enable peer request logging. Requires --debug to display
2814 2820 # though.
2815 2821 overrides = {
2816 2822 (b'devel', b'debug.peer-request'): True,
2817 2823 }
2818 2824
2819 2825 with ui.configoverride(overrides):
2820 2826 peer = hg.peer(ui, {}, path)
2821 2827
2822 2828 try:
2823 2829 local = peer.local() is not None
2824 2830 canpush = peer.canpush()
2825 2831
2826 2832 ui.write(_(b'url: %s\n') % peer.url())
2827 2833 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2828 2834 ui.write(
2829 2835 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2830 2836 )
2831 2837 finally:
2832 2838 peer.close()
2833 2839
2834 2840
2835 2841 @command(
2836 2842 b'debugpickmergetool',
2837 2843 [
2838 2844 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2839 2845 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2840 2846 ]
2841 2847 + cmdutil.walkopts
2842 2848 + cmdutil.mergetoolopts,
2843 2849 _(b'[PATTERN]...'),
2844 2850 inferrepo=True,
2845 2851 )
2846 2852 def debugpickmergetool(ui, repo, *pats, **opts):
2847 2853 """examine which merge tool is chosen for specified file
2848 2854
2849 2855 As described in :hg:`help merge-tools`, Mercurial examines
2850 2856 configurations below in this order to decide which merge tool is
2851 2857 chosen for specified file.
2852 2858
2853 2859 1. ``--tool`` option
2854 2860 2. ``HGMERGE`` environment variable
2855 2861 3. configurations in ``merge-patterns`` section
2856 2862 4. configuration of ``ui.merge``
2857 2863 5. configurations in ``merge-tools`` section
2858 2864 6. ``hgmerge`` tool (for historical reason only)
2859 2865 7. default tool for fallback (``:merge`` or ``:prompt``)
2860 2866
2861 2867 This command writes out examination result in the style below::
2862 2868
2863 2869 FILE = MERGETOOL
2864 2870
2865 2871 By default, all files known in the first parent context of the
2866 2872 working directory are examined. Use file patterns and/or -I/-X
2867 2873 options to limit target files. -r/--rev is also useful to examine
2868 2874 files in another context without actual updating to it.
2869 2875
2870 2876 With --debug, this command shows warning messages while matching
2871 2877 against ``merge-patterns`` and so on, too. It is recommended to
2872 2878 use this option with explicit file patterns and/or -I/-X options,
2873 2879 because this option increases amount of output per file according
2874 2880 to configurations in hgrc.
2875 2881
2876 2882 With -v/--verbose, this command shows configurations below at
2877 2883 first (only if specified).
2878 2884
2879 2885 - ``--tool`` option
2880 2886 - ``HGMERGE`` environment variable
2881 2887 - configuration of ``ui.merge``
2882 2888
2883 2889 If merge tool is chosen before matching against
2884 2890 ``merge-patterns``, this command can't show any helpful
2885 2891 information, even with --debug. In such case, information above is
2886 2892 useful to know why a merge tool is chosen.
2887 2893 """
2888 2894 opts = pycompat.byteskwargs(opts)
2889 2895 overrides = {}
2890 2896 if opts[b'tool']:
2891 2897 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2892 2898 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2893 2899
2894 2900 with ui.configoverride(overrides, b'debugmergepatterns'):
2895 2901 hgmerge = encoding.environ.get(b"HGMERGE")
2896 2902 if hgmerge is not None:
2897 2903 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2898 2904 uimerge = ui.config(b"ui", b"merge")
2899 2905 if uimerge:
2900 2906 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2901 2907
2902 2908 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2903 2909 m = scmutil.match(ctx, pats, opts)
2904 2910 changedelete = opts[b'changedelete']
2905 2911 for path in ctx.walk(m):
2906 2912 fctx = ctx[path]
2907 2913 with ui.silent(
2908 2914 error=True
2909 2915 ) if not ui.debugflag else util.nullcontextmanager():
2910 2916 tool, toolpath = filemerge._picktool(
2911 2917 repo,
2912 2918 ui,
2913 2919 path,
2914 2920 fctx.isbinary(),
2915 2921 b'l' in fctx.flags(),
2916 2922 changedelete,
2917 2923 )
2918 2924 ui.write(b'%s = %s\n' % (path, tool))
2919 2925
2920 2926
2921 2927 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2922 2928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2923 2929 """access the pushkey key/value protocol
2924 2930
2925 2931 With two args, list the keys in the given namespace.
2926 2932
2927 2933 With five args, set a key to new if it currently is set to old.
2928 2934 Reports success or failure.
2929 2935 """
2930 2936
2931 2937 target = hg.peer(ui, {}, repopath)
2932 2938 try:
2933 2939 if keyinfo:
2934 2940 key, old, new = keyinfo
2935 2941 with target.commandexecutor() as e:
2936 2942 r = e.callcommand(
2937 2943 b'pushkey',
2938 2944 {
2939 2945 b'namespace': namespace,
2940 2946 b'key': key,
2941 2947 b'old': old,
2942 2948 b'new': new,
2943 2949 },
2944 2950 ).result()
2945 2951
2946 2952 ui.status(pycompat.bytestr(r) + b'\n')
2947 2953 return not r
2948 2954 else:
2949 2955 for k, v in sorted(target.listkeys(namespace).items()):
2950 2956 ui.write(
2951 2957 b"%s\t%s\n"
2952 2958 % (stringutil.escapestr(k), stringutil.escapestr(v))
2953 2959 )
2954 2960 finally:
2955 2961 target.close()
2956 2962
2957 2963
2958 2964 @command(b'debugpvec', [], _(b'A B'))
2959 2965 def debugpvec(ui, repo, a, b=None):
2960 2966 ca = scmutil.revsingle(repo, a)
2961 2967 cb = scmutil.revsingle(repo, b)
2962 2968 pa = pvec.ctxpvec(ca)
2963 2969 pb = pvec.ctxpvec(cb)
2964 2970 if pa == pb:
2965 2971 rel = b"="
2966 2972 elif pa > pb:
2967 2973 rel = b">"
2968 2974 elif pa < pb:
2969 2975 rel = b"<"
2970 2976 elif pa | pb:
2971 2977 rel = b"|"
2972 2978 ui.write(_(b"a: %s\n") % pa)
2973 2979 ui.write(_(b"b: %s\n") % pb)
2974 2980 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2975 2981 ui.write(
2976 2982 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2977 2983 % (
2978 2984 abs(pa._depth - pb._depth),
2979 2985 pvec._hamming(pa._vec, pb._vec),
2980 2986 pa.distance(pb),
2981 2987 rel,
2982 2988 )
2983 2989 )
2984 2990
2985 2991
2986 2992 @command(
2987 2993 b'debugrebuilddirstate|debugrebuildstate',
2988 2994 [
2989 2995 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2990 2996 (
2991 2997 b'',
2992 2998 b'minimal',
2993 2999 None,
2994 3000 _(
2995 3001 b'only rebuild files that are inconsistent with '
2996 3002 b'the working copy parent'
2997 3003 ),
2998 3004 ),
2999 3005 ],
3000 3006 _(b'[-r REV]'),
3001 3007 )
3002 3008 def debugrebuilddirstate(ui, repo, rev, **opts):
3003 3009 """rebuild the dirstate as it would look like for the given revision
3004 3010
3005 3011 If no revision is specified the first current parent will be used.
3006 3012
3007 3013 The dirstate will be set to the files of the given revision.
3008 3014 The actual working directory content or existing dirstate
3009 3015 information such as adds or removes is not considered.
3010 3016
3011 3017 ``minimal`` will only rebuild the dirstate status for files that claim to be
3012 3018 tracked but are not in the parent manifest, or that exist in the parent
3013 3019 manifest but are not in the dirstate. It will not change adds, removes, or
3014 3020 modified files that are in the working copy parent.
3015 3021
3016 3022 One use of this command is to make the next :hg:`status` invocation
3017 3023 check the actual file content.
3018 3024 """
3019 3025 ctx = scmutil.revsingle(repo, rev)
3020 3026 with repo.wlock():
3021 3027 dirstate = repo.dirstate
3022 3028 changedfiles = None
3023 3029 # See command doc for what minimal does.
3024 3030 if opts.get('minimal'):
3025 3031 manifestfiles = set(ctx.manifest().keys())
3026 3032 dirstatefiles = set(dirstate)
3027 3033 manifestonly = manifestfiles - dirstatefiles
3028 3034 dsonly = dirstatefiles - manifestfiles
3029 3035 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3030 3036 changedfiles = manifestonly | dsnotadded
3031 3037
3032 3038 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3033 3039
3034 3040
3035 3041 @command(
3036 3042 b'debugrebuildfncache',
3037 3043 [
3038 3044 (
3039 3045 b'',
3040 3046 b'only-data',
3041 3047 False,
3042 3048 _(b'only look for wrong .d files (much faster)'),
3043 3049 )
3044 3050 ],
3045 3051 b'',
3046 3052 )
3047 3053 def debugrebuildfncache(ui, repo, **opts):
3048 3054 """rebuild the fncache file"""
3049 3055 opts = pycompat.byteskwargs(opts)
3050 3056 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3051 3057
3052 3058
3053 3059 @command(
3054 3060 b'debugrename',
3055 3061 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3056 3062 _(b'[-r REV] [FILE]...'),
3057 3063 )
3058 3064 def debugrename(ui, repo, *pats, **opts):
3059 3065 """dump rename information"""
3060 3066
3061 3067 opts = pycompat.byteskwargs(opts)
3062 3068 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3063 3069 m = scmutil.match(ctx, pats, opts)
3064 3070 for abs in ctx.walk(m):
3065 3071 fctx = ctx[abs]
3066 3072 o = fctx.filelog().renamed(fctx.filenode())
3067 3073 rel = repo.pathto(abs)
3068 3074 if o:
3069 3075 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3070 3076 else:
3071 3077 ui.write(_(b"%s not renamed\n") % rel)
3072 3078
3073 3079
3074 3080 @command(b'debugrequires|debugrequirements', [], b'')
3075 3081 def debugrequirements(ui, repo):
3076 3082 """print the current repo requirements"""
3077 3083 for r in sorted(repo.requirements):
3078 3084 ui.write(b"%s\n" % r)
3079 3085
3080 3086
3081 3087 @command(
3082 3088 b'debugrevlog',
3083 3089 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3084 3090 _(b'-c|-m|FILE'),
3085 3091 optionalrepo=True,
3086 3092 )
3087 3093 def debugrevlog(ui, repo, file_=None, **opts):
3088 3094 """show data and statistics about a revlog"""
3089 3095 opts = pycompat.byteskwargs(opts)
3090 3096 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3091 3097
3092 3098 if opts.get(b"dump"):
3093 3099 numrevs = len(r)
3094 3100 ui.write(
3095 3101 (
3096 3102 b"# rev p1rev p2rev start end deltastart base p1 p2"
3097 3103 b" rawsize totalsize compression heads chainlen\n"
3098 3104 )
3099 3105 )
3100 3106 ts = 0
3101 3107 heads = set()
3102 3108
3103 3109 for rev in pycompat.xrange(numrevs):
3104 3110 dbase = r.deltaparent(rev)
3105 3111 if dbase == -1:
3106 3112 dbase = rev
3107 3113 cbase = r.chainbase(rev)
3108 3114 clen = r.chainlen(rev)
3109 3115 p1, p2 = r.parentrevs(rev)
3110 3116 rs = r.rawsize(rev)
3111 3117 ts = ts + rs
3112 3118 heads -= set(r.parentrevs(rev))
3113 3119 heads.add(rev)
3114 3120 try:
3115 3121 compression = ts / r.end(rev)
3116 3122 except ZeroDivisionError:
3117 3123 compression = 0
3118 3124 ui.write(
3119 3125 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3120 3126 b"%11d %5d %8d\n"
3121 3127 % (
3122 3128 rev,
3123 3129 p1,
3124 3130 p2,
3125 3131 r.start(rev),
3126 3132 r.end(rev),
3127 3133 r.start(dbase),
3128 3134 r.start(cbase),
3129 3135 r.start(p1),
3130 3136 r.start(p2),
3131 3137 rs,
3132 3138 ts,
3133 3139 compression,
3134 3140 len(heads),
3135 3141 clen,
3136 3142 )
3137 3143 )
3138 3144 return 0
3139 3145
3140 3146 format = r._format_version
3141 3147 v = r._format_flags
3142 3148 flags = []
3143 3149 gdelta = False
3144 3150 if v & revlog.FLAG_INLINE_DATA:
3145 3151 flags.append(b'inline')
3146 3152 if v & revlog.FLAG_GENERALDELTA:
3147 3153 gdelta = True
3148 3154 flags.append(b'generaldelta')
3149 3155 if not flags:
3150 3156 flags = [b'(none)']
3151 3157
3152 3158 ### tracks merge vs single parent
3153 3159 nummerges = 0
3154 3160
3155 3161 ### tracks ways the "delta" are build
3156 3162 # nodelta
3157 3163 numempty = 0
3158 3164 numemptytext = 0
3159 3165 numemptydelta = 0
3160 3166 # full file content
3161 3167 numfull = 0
3162 3168 # intermediate snapshot against a prior snapshot
3163 3169 numsemi = 0
3164 3170 # snapshot count per depth
3165 3171 numsnapdepth = collections.defaultdict(lambda: 0)
3166 3172 # delta against previous revision
3167 3173 numprev = 0
3168 3174 # delta against first or second parent (not prev)
3169 3175 nump1 = 0
3170 3176 nump2 = 0
3171 3177 # delta against neither prev nor parents
3172 3178 numother = 0
3173 3179 # delta against prev that are also first or second parent
3174 3180 # (details of `numprev`)
3175 3181 nump1prev = 0
3176 3182 nump2prev = 0
3177 3183
3178 3184 # data about delta chain of each revs
3179 3185 chainlengths = []
3180 3186 chainbases = []
3181 3187 chainspans = []
3182 3188
3183 3189 # data about each revision
3184 3190 datasize = [None, 0, 0]
3185 3191 fullsize = [None, 0, 0]
3186 3192 semisize = [None, 0, 0]
3187 3193 # snapshot count per depth
3188 3194 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3189 3195 deltasize = [None, 0, 0]
3190 3196 chunktypecounts = {}
3191 3197 chunktypesizes = {}
3192 3198
3193 3199 def addsize(size, l):
3194 3200 if l[0] is None or size < l[0]:
3195 3201 l[0] = size
3196 3202 if size > l[1]:
3197 3203 l[1] = size
3198 3204 l[2] += size
3199 3205
3200 3206 numrevs = len(r)
3201 3207 for rev in pycompat.xrange(numrevs):
3202 3208 p1, p2 = r.parentrevs(rev)
3203 3209 delta = r.deltaparent(rev)
3204 3210 if format > 0:
3205 3211 addsize(r.rawsize(rev), datasize)
3206 3212 if p2 != nullrev:
3207 3213 nummerges += 1
3208 3214 size = r.length(rev)
3209 3215 if delta == nullrev:
3210 3216 chainlengths.append(0)
3211 3217 chainbases.append(r.start(rev))
3212 3218 chainspans.append(size)
3213 3219 if size == 0:
3214 3220 numempty += 1
3215 3221 numemptytext += 1
3216 3222 else:
3217 3223 numfull += 1
3218 3224 numsnapdepth[0] += 1
3219 3225 addsize(size, fullsize)
3220 3226 addsize(size, snapsizedepth[0])
3221 3227 else:
3222 3228 chainlengths.append(chainlengths[delta] + 1)
3223 3229 baseaddr = chainbases[delta]
3224 3230 revaddr = r.start(rev)
3225 3231 chainbases.append(baseaddr)
3226 3232 chainspans.append((revaddr - baseaddr) + size)
3227 3233 if size == 0:
3228 3234 numempty += 1
3229 3235 numemptydelta += 1
3230 3236 elif r.issnapshot(rev):
3231 3237 addsize(size, semisize)
3232 3238 numsemi += 1
3233 3239 depth = r.snapshotdepth(rev)
3234 3240 numsnapdepth[depth] += 1
3235 3241 addsize(size, snapsizedepth[depth])
3236 3242 else:
3237 3243 addsize(size, deltasize)
3238 3244 if delta == rev - 1:
3239 3245 numprev += 1
3240 3246 if delta == p1:
3241 3247 nump1prev += 1
3242 3248 elif delta == p2:
3243 3249 nump2prev += 1
3244 3250 elif delta == p1:
3245 3251 nump1 += 1
3246 3252 elif delta == p2:
3247 3253 nump2 += 1
3248 3254 elif delta != nullrev:
3249 3255 numother += 1
3250 3256
3251 3257 # Obtain data on the raw chunks in the revlog.
3252 3258 if util.safehasattr(r, b'_getsegmentforrevs'):
3253 3259 segment = r._getsegmentforrevs(rev, rev)[1]
3254 3260 else:
3255 3261 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3256 3262 if segment:
3257 3263 chunktype = bytes(segment[0:1])
3258 3264 else:
3259 3265 chunktype = b'empty'
3260 3266
3261 3267 if chunktype not in chunktypecounts:
3262 3268 chunktypecounts[chunktype] = 0
3263 3269 chunktypesizes[chunktype] = 0
3264 3270
3265 3271 chunktypecounts[chunktype] += 1
3266 3272 chunktypesizes[chunktype] += size
3267 3273
3268 3274 # Adjust size min value for empty cases
3269 3275 for size in (datasize, fullsize, semisize, deltasize):
3270 3276 if size[0] is None:
3271 3277 size[0] = 0
3272 3278
3273 3279 numdeltas = numrevs - numfull - numempty - numsemi
3274 3280 numoprev = numprev - nump1prev - nump2prev
3275 3281 totalrawsize = datasize[2]
3276 3282 datasize[2] /= numrevs
3277 3283 fulltotal = fullsize[2]
3278 3284 if numfull == 0:
3279 3285 fullsize[2] = 0
3280 3286 else:
3281 3287 fullsize[2] /= numfull
3282 3288 semitotal = semisize[2]
3283 3289 snaptotal = {}
3284 3290 if numsemi > 0:
3285 3291 semisize[2] /= numsemi
3286 3292 for depth in snapsizedepth:
3287 3293 snaptotal[depth] = snapsizedepth[depth][2]
3288 3294 snapsizedepth[depth][2] /= numsnapdepth[depth]
3289 3295
3290 3296 deltatotal = deltasize[2]
3291 3297 if numdeltas > 0:
3292 3298 deltasize[2] /= numdeltas
3293 3299 totalsize = fulltotal + semitotal + deltatotal
3294 3300 avgchainlen = sum(chainlengths) / numrevs
3295 3301 maxchainlen = max(chainlengths)
3296 3302 maxchainspan = max(chainspans)
3297 3303 compratio = 1
3298 3304 if totalsize:
3299 3305 compratio = totalrawsize / totalsize
3300 3306
3301 3307 basedfmtstr = b'%%%dd\n'
3302 3308 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3303 3309
3304 3310 def dfmtstr(max):
3305 3311 return basedfmtstr % len(str(max))
3306 3312
3307 3313 def pcfmtstr(max, padding=0):
3308 3314 return basepcfmtstr % (len(str(max)), b' ' * padding)
3309 3315
3310 3316 def pcfmt(value, total):
3311 3317 if total:
3312 3318 return (value, 100 * float(value) / total)
3313 3319 else:
3314 3320 return value, 100.0
3315 3321
3316 3322 ui.writenoi18n(b'format : %d\n' % format)
3317 3323 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3318 3324
3319 3325 ui.write(b'\n')
3320 3326 fmt = pcfmtstr(totalsize)
3321 3327 fmt2 = dfmtstr(totalsize)
3322 3328 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3323 3329 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3324 3330 ui.writenoi18n(
3325 3331 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3326 3332 )
3327 3333 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3328 3334 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3329 3335 ui.writenoi18n(
3330 3336 b' text : '
3331 3337 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3332 3338 )
3333 3339 ui.writenoi18n(
3334 3340 b' delta : '
3335 3341 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3336 3342 )
3337 3343 ui.writenoi18n(
3338 3344 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3339 3345 )
3340 3346 for depth in sorted(numsnapdepth):
3341 3347 ui.write(
3342 3348 (b' lvl-%-3d : ' % depth)
3343 3349 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3344 3350 )
3345 3351 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3346 3352 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3347 3353 ui.writenoi18n(
3348 3354 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3349 3355 )
3350 3356 for depth in sorted(numsnapdepth):
3351 3357 ui.write(
3352 3358 (b' lvl-%-3d : ' % depth)
3353 3359 + fmt % pcfmt(snaptotal[depth], totalsize)
3354 3360 )
3355 3361 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3356 3362
3357 3363 def fmtchunktype(chunktype):
3358 3364 if chunktype == b'empty':
3359 3365 return b' %s : ' % chunktype
3360 3366 elif chunktype in pycompat.bytestr(string.ascii_letters):
3361 3367 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3362 3368 else:
3363 3369 return b' 0x%s : ' % hex(chunktype)
3364 3370
3365 3371 ui.write(b'\n')
3366 3372 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3367 3373 for chunktype in sorted(chunktypecounts):
3368 3374 ui.write(fmtchunktype(chunktype))
3369 3375 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3370 3376 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3371 3377 for chunktype in sorted(chunktypecounts):
3372 3378 ui.write(fmtchunktype(chunktype))
3373 3379 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3374 3380
3375 3381 ui.write(b'\n')
3376 3382 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3377 3383 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3378 3384 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3379 3385 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3380 3386 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3381 3387
3382 3388 if format > 0:
3383 3389 ui.write(b'\n')
3384 3390 ui.writenoi18n(
3385 3391 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3386 3392 % tuple(datasize)
3387 3393 )
3388 3394 ui.writenoi18n(
3389 3395 b'full revision size (min/max/avg) : %d / %d / %d\n'
3390 3396 % tuple(fullsize)
3391 3397 )
3392 3398 ui.writenoi18n(
3393 3399 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3394 3400 % tuple(semisize)
3395 3401 )
3396 3402 for depth in sorted(snapsizedepth):
3397 3403 if depth == 0:
3398 3404 continue
3399 3405 ui.writenoi18n(
3400 3406 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3401 3407 % ((depth,) + tuple(snapsizedepth[depth]))
3402 3408 )
3403 3409 ui.writenoi18n(
3404 3410 b'delta size (min/max/avg) : %d / %d / %d\n'
3405 3411 % tuple(deltasize)
3406 3412 )
3407 3413
3408 3414 if numdeltas > 0:
3409 3415 ui.write(b'\n')
3410 3416 fmt = pcfmtstr(numdeltas)
3411 3417 fmt2 = pcfmtstr(numdeltas, 4)
3412 3418 ui.writenoi18n(
3413 3419 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3414 3420 )
3415 3421 if numprev > 0:
3416 3422 ui.writenoi18n(
3417 3423 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3418 3424 )
3419 3425 ui.writenoi18n(
3420 3426 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3421 3427 )
3422 3428 ui.writenoi18n(
3423 3429 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3424 3430 )
3425 3431 if gdelta:
3426 3432 ui.writenoi18n(
3427 3433 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3428 3434 )
3429 3435 ui.writenoi18n(
3430 3436 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3431 3437 )
3432 3438 ui.writenoi18n(
3433 3439 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3434 3440 )
3435 3441
3436 3442
3437 3443 @command(
3438 3444 b'debugrevlogindex',
3439 3445 cmdutil.debugrevlogopts
3440 3446 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3441 3447 _(b'[-f FORMAT] -c|-m|FILE'),
3442 3448 optionalrepo=True,
3443 3449 )
3444 3450 def debugrevlogindex(ui, repo, file_=None, **opts):
3445 3451 """dump the contents of a revlog index"""
3446 3452 opts = pycompat.byteskwargs(opts)
3447 3453 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3448 3454 format = opts.get(b'format', 0)
3449 3455 if format not in (0, 1):
3450 3456 raise error.Abort(_(b"unknown format %d") % format)
3451 3457
3452 3458 if ui.debugflag:
3453 3459 shortfn = hex
3454 3460 else:
3455 3461 shortfn = short
3456 3462
3457 3463 # There might not be anything in r, so have a sane default
3458 3464 idlen = 12
3459 3465 for i in r:
3460 3466 idlen = len(shortfn(r.node(i)))
3461 3467 break
3462 3468
3463 3469 if format == 0:
3464 3470 if ui.verbose:
3465 3471 ui.writenoi18n(
3466 3472 b" rev offset length linkrev %s %s p2\n"
3467 3473 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3468 3474 )
3469 3475 else:
3470 3476 ui.writenoi18n(
3471 3477 b" rev linkrev %s %s p2\n"
3472 3478 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3473 3479 )
3474 3480 elif format == 1:
3475 3481 if ui.verbose:
3476 3482 ui.writenoi18n(
3477 3483 (
3478 3484 b" rev flag offset length size link p1"
3479 3485 b" p2 %s\n"
3480 3486 )
3481 3487 % b"nodeid".rjust(idlen)
3482 3488 )
3483 3489 else:
3484 3490 ui.writenoi18n(
3485 3491 b" rev flag size link p1 p2 %s\n"
3486 3492 % b"nodeid".rjust(idlen)
3487 3493 )
3488 3494
3489 3495 for i in r:
3490 3496 node = r.node(i)
3491 3497 if format == 0:
3492 3498 try:
3493 3499 pp = r.parents(node)
3494 3500 except Exception:
3495 3501 pp = [repo.nullid, repo.nullid]
3496 3502 if ui.verbose:
3497 3503 ui.write(
3498 3504 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3499 3505 % (
3500 3506 i,
3501 3507 r.start(i),
3502 3508 r.length(i),
3503 3509 r.linkrev(i),
3504 3510 shortfn(node),
3505 3511 shortfn(pp[0]),
3506 3512 shortfn(pp[1]),
3507 3513 )
3508 3514 )
3509 3515 else:
3510 3516 ui.write(
3511 3517 b"% 6d % 7d %s %s %s\n"
3512 3518 % (
3513 3519 i,
3514 3520 r.linkrev(i),
3515 3521 shortfn(node),
3516 3522 shortfn(pp[0]),
3517 3523 shortfn(pp[1]),
3518 3524 )
3519 3525 )
3520 3526 elif format == 1:
3521 3527 pr = r.parentrevs(i)
3522 3528 if ui.verbose:
3523 3529 ui.write(
3524 3530 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3525 3531 % (
3526 3532 i,
3527 3533 r.flags(i),
3528 3534 r.start(i),
3529 3535 r.length(i),
3530 3536 r.rawsize(i),
3531 3537 r.linkrev(i),
3532 3538 pr[0],
3533 3539 pr[1],
3534 3540 shortfn(node),
3535 3541 )
3536 3542 )
3537 3543 else:
3538 3544 ui.write(
3539 3545 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3540 3546 % (
3541 3547 i,
3542 3548 r.flags(i),
3543 3549 r.rawsize(i),
3544 3550 r.linkrev(i),
3545 3551 pr[0],
3546 3552 pr[1],
3547 3553 shortfn(node),
3548 3554 )
3549 3555 )
3550 3556
3551 3557
3552 3558 @command(
3553 3559 b'debugrevspec',
3554 3560 [
3555 3561 (
3556 3562 b'',
3557 3563 b'optimize',
3558 3564 None,
3559 3565 _(b'print parsed tree after optimizing (DEPRECATED)'),
3560 3566 ),
3561 3567 (
3562 3568 b'',
3563 3569 b'show-revs',
3564 3570 True,
3565 3571 _(b'print list of result revisions (default)'),
3566 3572 ),
3567 3573 (
3568 3574 b's',
3569 3575 b'show-set',
3570 3576 None,
3571 3577 _(b'print internal representation of result set'),
3572 3578 ),
3573 3579 (
3574 3580 b'p',
3575 3581 b'show-stage',
3576 3582 [],
3577 3583 _(b'print parsed tree at the given stage'),
3578 3584 _(b'NAME'),
3579 3585 ),
3580 3586 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3581 3587 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3582 3588 ],
3583 3589 b'REVSPEC',
3584 3590 )
3585 3591 def debugrevspec(ui, repo, expr, **opts):
3586 3592 """parse and apply a revision specification
3587 3593
3588 3594 Use -p/--show-stage option to print the parsed tree at the given stages.
3589 3595 Use -p all to print tree at every stage.
3590 3596
3591 3597 Use --no-show-revs option with -s or -p to print only the set
3592 3598 representation or the parsed tree respectively.
3593 3599
3594 3600 Use --verify-optimized to compare the optimized result with the unoptimized
3595 3601 one. Returns 1 if the optimized result differs.
3596 3602 """
3597 3603 opts = pycompat.byteskwargs(opts)
3598 3604 aliases = ui.configitems(b'revsetalias')
3599 3605 stages = [
3600 3606 (b'parsed', lambda tree: tree),
3601 3607 (
3602 3608 b'expanded',
3603 3609 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3604 3610 ),
3605 3611 (b'concatenated', revsetlang.foldconcat),
3606 3612 (b'analyzed', revsetlang.analyze),
3607 3613 (b'optimized', revsetlang.optimize),
3608 3614 ]
3609 3615 if opts[b'no_optimized']:
3610 3616 stages = stages[:-1]
3611 3617 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3612 3618 raise error.Abort(
3613 3619 _(b'cannot use --verify-optimized with --no-optimized')
3614 3620 )
3615 3621 stagenames = {n for n, f in stages}
3616 3622
3617 3623 showalways = set()
3618 3624 showchanged = set()
3619 3625 if ui.verbose and not opts[b'show_stage']:
3620 3626 # show parsed tree by --verbose (deprecated)
3621 3627 showalways.add(b'parsed')
3622 3628 showchanged.update([b'expanded', b'concatenated'])
3623 3629 if opts[b'optimize']:
3624 3630 showalways.add(b'optimized')
3625 3631 if opts[b'show_stage'] and opts[b'optimize']:
3626 3632 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3627 3633 if opts[b'show_stage'] == [b'all']:
3628 3634 showalways.update(stagenames)
3629 3635 else:
3630 3636 for n in opts[b'show_stage']:
3631 3637 if n not in stagenames:
3632 3638 raise error.Abort(_(b'invalid stage name: %s') % n)
3633 3639 showalways.update(opts[b'show_stage'])
3634 3640
3635 3641 treebystage = {}
3636 3642 printedtree = None
3637 3643 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3638 3644 for n, f in stages:
3639 3645 treebystage[n] = tree = f(tree)
3640 3646 if n in showalways or (n in showchanged and tree != printedtree):
3641 3647 if opts[b'show_stage'] or n != b'parsed':
3642 3648 ui.write(b"* %s:\n" % n)
3643 3649 ui.write(revsetlang.prettyformat(tree), b"\n")
3644 3650 printedtree = tree
3645 3651
3646 3652 if opts[b'verify_optimized']:
3647 3653 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3648 3654 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3649 3655 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3650 3656 ui.writenoi18n(
3651 3657 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3652 3658 )
3653 3659 ui.writenoi18n(
3654 3660 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3655 3661 )
3656 3662 arevs = list(arevs)
3657 3663 brevs = list(brevs)
3658 3664 if arevs == brevs:
3659 3665 return 0
3660 3666 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3661 3667 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3662 3668 sm = difflib.SequenceMatcher(None, arevs, brevs)
3663 3669 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3664 3670 if tag in ('delete', 'replace'):
3665 3671 for c in arevs[alo:ahi]:
3666 3672 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3667 3673 if tag in ('insert', 'replace'):
3668 3674 for c in brevs[blo:bhi]:
3669 3675 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3670 3676 if tag == 'equal':
3671 3677 for c in arevs[alo:ahi]:
3672 3678 ui.write(b' %d\n' % c)
3673 3679 return 1
3674 3680
3675 3681 func = revset.makematcher(tree)
3676 3682 revs = func(repo)
3677 3683 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3678 3684 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3679 3685 if not opts[b'show_revs']:
3680 3686 return
3681 3687 for c in revs:
3682 3688 ui.write(b"%d\n" % c)
3683 3689
3684 3690
3685 3691 @command(
3686 3692 b'debugserve',
3687 3693 [
3688 3694 (
3689 3695 b'',
3690 3696 b'sshstdio',
3691 3697 False,
3692 3698 _(b'run an SSH server bound to process handles'),
3693 3699 ),
3694 3700 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3695 3701 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3696 3702 ],
3697 3703 b'',
3698 3704 )
3699 3705 def debugserve(ui, repo, **opts):
3700 3706 """run a server with advanced settings
3701 3707
3702 3708 This command is similar to :hg:`serve`. It exists partially as a
3703 3709 workaround to the fact that ``hg serve --stdio`` must have specific
3704 3710 arguments for security reasons.
3705 3711 """
3706 3712 opts = pycompat.byteskwargs(opts)
3707 3713
3708 3714 if not opts[b'sshstdio']:
3709 3715 raise error.Abort(_(b'only --sshstdio is currently supported'))
3710 3716
3711 3717 logfh = None
3712 3718
3713 3719 if opts[b'logiofd'] and opts[b'logiofile']:
3714 3720 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3715 3721
3716 3722 if opts[b'logiofd']:
3717 3723 # Ideally we would be line buffered. But line buffering in binary
3718 3724 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3719 3725 # buffering could have performance impacts. But since this isn't
3720 3726 # performance critical code, it should be fine.
3721 3727 try:
3722 3728 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3723 3729 except OSError as e:
3724 3730 if e.errno != errno.ESPIPE:
3725 3731 raise
3726 3732 # can't seek a pipe, so `ab` mode fails on py3
3727 3733 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3728 3734 elif opts[b'logiofile']:
3729 3735 logfh = open(opts[b'logiofile'], b'ab', 0)
3730 3736
3731 3737 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3732 3738 s.serve_forever()
3733 3739
3734 3740
3735 3741 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3736 3742 def debugsetparents(ui, repo, rev1, rev2=None):
3737 3743 """manually set the parents of the current working directory (DANGEROUS)
3738 3744
3739 3745 This command is not what you are looking for and should not be used. Using
3740 3746 this command will most certainly results in slight corruption of the file
3741 3747 level histories withing your repository. DO NOT USE THIS COMMAND.
3742 3748
3743 3749 The command update the p1 and p2 field in the dirstate, and not touching
3744 3750 anything else. This useful for writing repository conversion tools, but
3745 3751 should be used with extreme care. For example, neither the working
3746 3752 directory nor the dirstate is updated, so file status may be incorrect
3747 3753 after running this command. Only used if you are one of the few people that
3748 3754 deeply unstand both conversion tools and file level histories. If you are
3749 3755 reading this help, you are not one of this people (most of them sailed west
3750 3756 from Mithlond anyway.
3751 3757
3752 3758 So one last time DO NOT USE THIS COMMAND.
3753 3759
3754 3760 Returns 0 on success.
3755 3761 """
3756 3762
3757 3763 node1 = scmutil.revsingle(repo, rev1).node()
3758 3764 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3759 3765
3760 3766 with repo.wlock():
3761 3767 repo.setparents(node1, node2)
3762 3768
3763 3769
3764 3770 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3765 3771 def debugsidedata(ui, repo, file_, rev=None, **opts):
3766 3772 """dump the side data for a cl/manifest/file revision
3767 3773
3768 3774 Use --verbose to dump the sidedata content."""
3769 3775 opts = pycompat.byteskwargs(opts)
3770 3776 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3771 3777 if rev is not None:
3772 3778 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3773 3779 file_, rev = None, file_
3774 3780 elif rev is None:
3775 3781 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3776 3782 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3777 3783 r = getattr(r, '_revlog', r)
3778 3784 try:
3779 3785 sidedata = r.sidedata(r.lookup(rev))
3780 3786 except KeyError:
3781 3787 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3782 3788 if sidedata:
3783 3789 sidedata = list(sidedata.items())
3784 3790 sidedata.sort()
3785 3791 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3786 3792 for key, value in sidedata:
3787 3793 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3788 3794 if ui.verbose:
3789 3795 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3790 3796
3791 3797
3792 3798 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3793 3799 def debugssl(ui, repo, source=None, **opts):
3794 3800 """test a secure connection to a server
3795 3801
3796 3802 This builds the certificate chain for the server on Windows, installing the
3797 3803 missing intermediates and trusted root via Windows Update if necessary. It
3798 3804 does nothing on other platforms.
3799 3805
3800 3806 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3801 3807 that server is used. See :hg:`help urls` for more information.
3802 3808
3803 3809 If the update succeeds, retry the original operation. Otherwise, the cause
3804 3810 of the SSL error is likely another issue.
3805 3811 """
3806 3812 if not pycompat.iswindows:
3807 3813 raise error.Abort(
3808 3814 _(b'certificate chain building is only possible on Windows')
3809 3815 )
3810 3816
3811 3817 if not source:
3812 3818 if not repo:
3813 3819 raise error.Abort(
3814 3820 _(
3815 3821 b"there is no Mercurial repository here, and no "
3816 3822 b"server specified"
3817 3823 )
3818 3824 )
3819 3825 source = b"default"
3820 3826
3821 3827 source, branches = urlutil.get_unique_pull_path(
3822 3828 b'debugssl', repo, ui, source
3823 3829 )
3824 3830 url = urlutil.url(source)
3825 3831
3826 3832 defaultport = {b'https': 443, b'ssh': 22}
3827 3833 if url.scheme in defaultport:
3828 3834 try:
3829 3835 addr = (url.host, int(url.port or defaultport[url.scheme]))
3830 3836 except ValueError:
3831 3837 raise error.Abort(_(b"malformed port number in URL"))
3832 3838 else:
3833 3839 raise error.Abort(_(b"only https and ssh connections are supported"))
3834 3840
3835 3841 from . import win32
3836 3842
3837 3843 s = ssl.wrap_socket(
3838 3844 socket.socket(),
3839 3845 ssl_version=ssl.PROTOCOL_TLS,
3840 3846 cert_reqs=ssl.CERT_NONE,
3841 3847 ca_certs=None,
3842 3848 )
3843 3849
3844 3850 try:
3845 3851 s.connect(addr)
3846 3852 cert = s.getpeercert(True)
3847 3853
3848 3854 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3849 3855
3850 3856 complete = win32.checkcertificatechain(cert, build=False)
3851 3857
3852 3858 if not complete:
3853 3859 ui.status(_(b'certificate chain is incomplete, updating... '))
3854 3860
3855 3861 if not win32.checkcertificatechain(cert):
3856 3862 ui.status(_(b'failed.\n'))
3857 3863 else:
3858 3864 ui.status(_(b'done.\n'))
3859 3865 else:
3860 3866 ui.status(_(b'full certificate chain is available\n'))
3861 3867 finally:
3862 3868 s.close()
3863 3869
3864 3870
3865 3871 @command(
3866 3872 b"debugbackupbundle",
3867 3873 [
3868 3874 (
3869 3875 b"",
3870 3876 b"recover",
3871 3877 b"",
3872 3878 b"brings the specified changeset back into the repository",
3873 3879 )
3874 3880 ]
3875 3881 + cmdutil.logopts,
3876 3882 _(b"hg debugbackupbundle [--recover HASH]"),
3877 3883 )
3878 3884 def debugbackupbundle(ui, repo, *pats, **opts):
3879 3885 """lists the changesets available in backup bundles
3880 3886
3881 3887 Without any arguments, this command prints a list of the changesets in each
3882 3888 backup bundle.
3883 3889
3884 3890 --recover takes a changeset hash and unbundles the first bundle that
3885 3891 contains that hash, which puts that changeset back in your repository.
3886 3892
3887 3893 --verbose will print the entire commit message and the bundle path for that
3888 3894 backup.
3889 3895 """
3890 3896 backups = list(
3891 3897 filter(
3892 3898 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3893 3899 )
3894 3900 )
3895 3901 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3896 3902
3897 3903 opts = pycompat.byteskwargs(opts)
3898 3904 opts[b"bundle"] = b""
3899 3905 opts[b"force"] = None
3900 3906 limit = logcmdutil.getlimit(opts)
3901 3907
3902 3908 def display(other, chlist, displayer):
3903 3909 if opts.get(b"newest_first"):
3904 3910 chlist.reverse()
3905 3911 count = 0
3906 3912 for n in chlist:
3907 3913 if limit is not None and count >= limit:
3908 3914 break
3909 3915 parents = [
3910 3916 True for p in other.changelog.parents(n) if p != repo.nullid
3911 3917 ]
3912 3918 if opts.get(b"no_merges") and len(parents) == 2:
3913 3919 continue
3914 3920 count += 1
3915 3921 displayer.show(other[n])
3916 3922
3917 3923 recovernode = opts.get(b"recover")
3918 3924 if recovernode:
3919 3925 if scmutil.isrevsymbol(repo, recovernode):
3920 3926 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3921 3927 return
3922 3928 elif backups:
3923 3929 msg = _(
3924 3930 b"Recover changesets using: hg debugbackupbundle --recover "
3925 3931 b"<changeset hash>\n\nAvailable backup changesets:"
3926 3932 )
3927 3933 ui.status(msg, label=b"status.removed")
3928 3934 else:
3929 3935 ui.status(_(b"no backup changesets found\n"))
3930 3936 return
3931 3937
3932 3938 for backup in backups:
3933 3939 # Much of this is copied from the hg incoming logic
3934 3940 source = os.path.relpath(backup, encoding.getcwd())
3935 3941 source, branches = urlutil.get_unique_pull_path(
3936 3942 b'debugbackupbundle',
3937 3943 repo,
3938 3944 ui,
3939 3945 source,
3940 3946 default_branches=opts.get(b'branch'),
3941 3947 )
3942 3948 try:
3943 3949 other = hg.peer(repo, opts, source)
3944 3950 except error.LookupError as ex:
3945 3951 msg = _(b"\nwarning: unable to open bundle %s") % source
3946 3952 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3947 3953 ui.warn(msg, hint=hint)
3948 3954 continue
3949 3955 revs, checkout = hg.addbranchrevs(
3950 3956 repo, other, branches, opts.get(b"rev")
3951 3957 )
3952 3958
3953 3959 if revs:
3954 3960 revs = [other.lookup(rev) for rev in revs]
3955 3961
3956 3962 with ui.silent():
3957 3963 try:
3958 3964 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3959 3965 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3960 3966 )
3961 3967 except error.LookupError:
3962 3968 continue
3963 3969
3964 3970 try:
3965 3971 if not chlist:
3966 3972 continue
3967 3973 if recovernode:
3968 3974 with repo.lock(), repo.transaction(b"unbundle") as tr:
3969 3975 if scmutil.isrevsymbol(other, recovernode):
3970 3976 ui.status(_(b"Unbundling %s\n") % (recovernode))
3971 3977 f = hg.openpath(ui, source)
3972 3978 gen = exchange.readbundle(ui, f, source)
3973 3979 if isinstance(gen, bundle2.unbundle20):
3974 3980 bundle2.applybundle(
3975 3981 repo,
3976 3982 gen,
3977 3983 tr,
3978 3984 source=b"unbundle",
3979 3985 url=b"bundle:" + source,
3980 3986 )
3981 3987 else:
3982 3988 gen.apply(repo, b"unbundle", b"bundle:" + source)
3983 3989 break
3984 3990 else:
3985 3991 backupdate = encoding.strtolocal(
3986 3992 time.strftime(
3987 3993 "%a %H:%M, %Y-%m-%d",
3988 3994 time.localtime(os.path.getmtime(source)),
3989 3995 )
3990 3996 )
3991 3997 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3992 3998 if ui.verbose:
3993 3999 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3994 4000 else:
3995 4001 opts[
3996 4002 b"template"
3997 4003 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3998 4004 displayer = logcmdutil.changesetdisplayer(
3999 4005 ui, other, opts, False
4000 4006 )
4001 4007 display(other, chlist, displayer)
4002 4008 displayer.close()
4003 4009 finally:
4004 4010 cleanupfn()
4005 4011
4006 4012
4007 4013 @command(
4008 4014 b'debugsub',
4009 4015 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
4010 4016 _(b'[-r REV] [REV]'),
4011 4017 )
4012 4018 def debugsub(ui, repo, rev=None):
4013 4019 ctx = scmutil.revsingle(repo, rev, None)
4014 4020 for k, v in sorted(ctx.substate.items()):
4015 4021 ui.writenoi18n(b'path %s\n' % k)
4016 4022 ui.writenoi18n(b' source %s\n' % v[0])
4017 4023 ui.writenoi18n(b' revision %s\n' % v[1])
4018 4024
4019 4025
4020 4026 @command(b'debugshell', optionalrepo=True)
4021 4027 def debugshell(ui, repo):
4022 4028 """run an interactive Python interpreter
4023 4029
4024 4030 The local namespace is provided with a reference to the ui and
4025 4031 the repo instance (if available).
4026 4032 """
4027 4033 import code
4028 4034
4029 4035 imported_objects = {
4030 4036 'ui': ui,
4031 4037 'repo': repo,
4032 4038 }
4033 4039
4034 4040 code.interact(local=imported_objects)
4035 4041
4036 4042
4037 4043 @command(
4038 4044 b'debugsuccessorssets',
4039 4045 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4040 4046 _(b'[REV]'),
4041 4047 )
4042 4048 def debugsuccessorssets(ui, repo, *revs, **opts):
4043 4049 """show set of successors for revision
4044 4050
4045 4051 A successors set of changeset A is a consistent group of revisions that
4046 4052 succeed A. It contains non-obsolete changesets only unless closests
4047 4053 successors set is set.
4048 4054
4049 4055 In most cases a changeset A has a single successors set containing a single
4050 4056 successor (changeset A replaced by A').
4051 4057
4052 4058 A changeset that is made obsolete with no successors are called "pruned".
4053 4059 Such changesets have no successors sets at all.
4054 4060
4055 4061 A changeset that has been "split" will have a successors set containing
4056 4062 more than one successor.
4057 4063
4058 4064 A changeset that has been rewritten in multiple different ways is called
4059 4065 "divergent". Such changesets have multiple successor sets (each of which
4060 4066 may also be split, i.e. have multiple successors).
4061 4067
4062 4068 Results are displayed as follows::
4063 4069
4064 4070 <rev1>
4065 4071 <successors-1A>
4066 4072 <rev2>
4067 4073 <successors-2A>
4068 4074 <successors-2B1> <successors-2B2> <successors-2B3>
4069 4075
4070 4076 Here rev2 has two possible (i.e. divergent) successors sets. The first
4071 4077 holds one element, whereas the second holds three (i.e. the changeset has
4072 4078 been split).
4073 4079 """
4074 4080 # passed to successorssets caching computation from one call to another
4075 4081 cache = {}
4076 4082 ctx2str = bytes
4077 4083 node2str = short
4078 4084 for rev in logcmdutil.revrange(repo, revs):
4079 4085 ctx = repo[rev]
4080 4086 ui.write(b'%s\n' % ctx2str(ctx))
4081 4087 for succsset in obsutil.successorssets(
4082 4088 repo, ctx.node(), closest=opts['closest'], cache=cache
4083 4089 ):
4084 4090 if succsset:
4085 4091 ui.write(b' ')
4086 4092 ui.write(node2str(succsset[0]))
4087 4093 for node in succsset[1:]:
4088 4094 ui.write(b' ')
4089 4095 ui.write(node2str(node))
4090 4096 ui.write(b'\n')
4091 4097
4092 4098
4093 4099 @command(b'debugtagscache', [])
4094 4100 def debugtagscache(ui, repo):
4095 4101 """display the contents of .hg/cache/hgtagsfnodes1"""
4096 4102 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4097 4103 flog = repo.file(b'.hgtags')
4098 4104 for r in repo:
4099 4105 node = repo[r].node()
4100 4106 tagsnode = cache.getfnode(node, computemissing=False)
4101 4107 if tagsnode:
4102 4108 tagsnodedisplay = hex(tagsnode)
4103 4109 if not flog.hasnode(tagsnode):
4104 4110 tagsnodedisplay += b' (unknown node)'
4105 4111 elif tagsnode is None:
4106 4112 tagsnodedisplay = b'missing'
4107 4113 else:
4108 4114 tagsnodedisplay = b'invalid'
4109 4115
4110 4116 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4111 4117
4112 4118
4113 4119 @command(
4114 4120 b'debugtemplate',
4115 4121 [
4116 4122 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4117 4123 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4118 4124 ],
4119 4125 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4120 4126 optionalrepo=True,
4121 4127 )
4122 4128 def debugtemplate(ui, repo, tmpl, **opts):
4123 4129 """parse and apply a template
4124 4130
4125 4131 If -r/--rev is given, the template is processed as a log template and
4126 4132 applied to the given changesets. Otherwise, it is processed as a generic
4127 4133 template.
4128 4134
4129 4135 Use --verbose to print the parsed tree.
4130 4136 """
4131 4137 revs = None
4132 4138 if opts['rev']:
4133 4139 if repo is None:
4134 4140 raise error.RepoError(
4135 4141 _(b'there is no Mercurial repository here (.hg not found)')
4136 4142 )
4137 4143 revs = logcmdutil.revrange(repo, opts['rev'])
4138 4144
4139 4145 props = {}
4140 4146 for d in opts['define']:
4141 4147 try:
4142 4148 k, v = (e.strip() for e in d.split(b'=', 1))
4143 4149 if not k or k == b'ui':
4144 4150 raise ValueError
4145 4151 props[k] = v
4146 4152 except ValueError:
4147 4153 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4148 4154
4149 4155 if ui.verbose:
4150 4156 aliases = ui.configitems(b'templatealias')
4151 4157 tree = templater.parse(tmpl)
4152 4158 ui.note(templater.prettyformat(tree), b'\n')
4153 4159 newtree = templater.expandaliases(tree, aliases)
4154 4160 if newtree != tree:
4155 4161 ui.notenoi18n(
4156 4162 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4157 4163 )
4158 4164
4159 4165 if revs is None:
4160 4166 tres = formatter.templateresources(ui, repo)
4161 4167 t = formatter.maketemplater(ui, tmpl, resources=tres)
4162 4168 if ui.verbose:
4163 4169 kwds, funcs = t.symbolsuseddefault()
4164 4170 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4165 4171 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4166 4172 ui.write(t.renderdefault(props))
4167 4173 else:
4168 4174 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4169 4175 if ui.verbose:
4170 4176 kwds, funcs = displayer.t.symbolsuseddefault()
4171 4177 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4172 4178 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4173 4179 for r in revs:
4174 4180 displayer.show(repo[r], **pycompat.strkwargs(props))
4175 4181 displayer.close()
4176 4182
4177 4183
4178 4184 @command(
4179 4185 b'debuguigetpass',
4180 4186 [
4181 4187 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4182 4188 ],
4183 4189 _(b'[-p TEXT]'),
4184 4190 norepo=True,
4185 4191 )
4186 4192 def debuguigetpass(ui, prompt=b''):
4187 4193 """show prompt to type password"""
4188 4194 r = ui.getpass(prompt)
4189 4195 if r is None:
4190 4196 r = b"<default response>"
4191 4197 ui.writenoi18n(b'response: %s\n' % r)
4192 4198
4193 4199
4194 4200 @command(
4195 4201 b'debuguiprompt',
4196 4202 [
4197 4203 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4198 4204 ],
4199 4205 _(b'[-p TEXT]'),
4200 4206 norepo=True,
4201 4207 )
4202 4208 def debuguiprompt(ui, prompt=b''):
4203 4209 """show plain prompt"""
4204 4210 r = ui.prompt(prompt)
4205 4211 ui.writenoi18n(b'response: %s\n' % r)
4206 4212
4207 4213
4208 4214 @command(b'debugupdatecaches', [])
4209 4215 def debugupdatecaches(ui, repo, *pats, **opts):
4210 4216 """warm all known caches in the repository"""
4211 4217 with repo.wlock(), repo.lock():
4212 4218 repo.updatecaches(caches=repository.CACHES_ALL)
4213 4219
4214 4220
4215 4221 @command(
4216 4222 b'debugupgraderepo',
4217 4223 [
4218 4224 (
4219 4225 b'o',
4220 4226 b'optimize',
4221 4227 [],
4222 4228 _(b'extra optimization to perform'),
4223 4229 _(b'NAME'),
4224 4230 ),
4225 4231 (b'', b'run', False, _(b'performs an upgrade')),
4226 4232 (b'', b'backup', True, _(b'keep the old repository content around')),
4227 4233 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4228 4234 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4229 4235 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4230 4236 ],
4231 4237 )
4232 4238 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4233 4239 """upgrade a repository to use different features
4234 4240
4235 4241 If no arguments are specified, the repository is evaluated for upgrade
4236 4242 and a list of problems and potential optimizations is printed.
4237 4243
4238 4244 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4239 4245 can be influenced via additional arguments. More details will be provided
4240 4246 by the command output when run without ``--run``.
4241 4247
4242 4248 During the upgrade, the repository will be locked and no writes will be
4243 4249 allowed.
4244 4250
4245 4251 At the end of the upgrade, the repository may not be readable while new
4246 4252 repository data is swapped in. This window will be as long as it takes to
4247 4253 rename some directories inside the ``.hg`` directory. On most machines, this
4248 4254 should complete almost instantaneously and the chances of a consumer being
4249 4255 unable to access the repository should be low.
4250 4256
4251 4257 By default, all revlogs will be upgraded. You can restrict this using flags
4252 4258 such as `--manifest`:
4253 4259
4254 4260 * `--manifest`: only optimize the manifest
4255 4261 * `--no-manifest`: optimize all revlog but the manifest
4256 4262 * `--changelog`: optimize the changelog only
4257 4263 * `--no-changelog --no-manifest`: optimize filelogs only
4258 4264 * `--filelogs`: optimize the filelogs only
4259 4265 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4260 4266 """
4261 4267 return upgrade.upgraderepo(
4262 4268 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4263 4269 )
4264 4270
4265 4271
4266 4272 @command(
4267 4273 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4268 4274 )
4269 4275 def debugwalk(ui, repo, *pats, **opts):
4270 4276 """show how files match on given patterns"""
4271 4277 opts = pycompat.byteskwargs(opts)
4272 4278 m = scmutil.match(repo[None], pats, opts)
4273 4279 if ui.verbose:
4274 4280 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4275 4281 items = list(repo[None].walk(m))
4276 4282 if not items:
4277 4283 return
4278 4284 f = lambda fn: fn
4279 4285 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4280 4286 f = lambda fn: util.normpath(fn)
4281 4287 fmt = b'f %%-%ds %%-%ds %%s' % (
4282 4288 max([len(abs) for abs in items]),
4283 4289 max([len(repo.pathto(abs)) for abs in items]),
4284 4290 )
4285 4291 for abs in items:
4286 4292 line = fmt % (
4287 4293 abs,
4288 4294 f(repo.pathto(abs)),
4289 4295 m.exact(abs) and b'exact' or b'',
4290 4296 )
4291 4297 ui.write(b"%s\n" % line.rstrip())
4292 4298
4293 4299
4294 4300 @command(b'debugwhyunstable', [], _(b'REV'))
4295 4301 def debugwhyunstable(ui, repo, rev):
4296 4302 """explain instabilities of a changeset"""
4297 4303 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4298 4304 dnodes = b''
4299 4305 if entry.get(b'divergentnodes'):
4300 4306 dnodes = (
4301 4307 b' '.join(
4302 4308 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4303 4309 for ctx in entry[b'divergentnodes']
4304 4310 )
4305 4311 + b' '
4306 4312 )
4307 4313 ui.write(
4308 4314 b'%s: %s%s %s\n'
4309 4315 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4310 4316 )
4311 4317
4312 4318
4313 4319 @command(
4314 4320 b'debugwireargs',
4315 4321 [
4316 4322 (b'', b'three', b'', b'three'),
4317 4323 (b'', b'four', b'', b'four'),
4318 4324 (b'', b'five', b'', b'five'),
4319 4325 ]
4320 4326 + cmdutil.remoteopts,
4321 4327 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4322 4328 norepo=True,
4323 4329 )
4324 4330 def debugwireargs(ui, repopath, *vals, **opts):
4325 4331 opts = pycompat.byteskwargs(opts)
4326 4332 repo = hg.peer(ui, opts, repopath)
4327 4333 try:
4328 4334 for opt in cmdutil.remoteopts:
4329 4335 del opts[opt[1]]
4330 4336 args = {}
4331 4337 for k, v in opts.items():
4332 4338 if v:
4333 4339 args[k] = v
4334 4340 args = pycompat.strkwargs(args)
4335 4341 # run twice to check that we don't mess up the stream for the next command
4336 4342 res1 = repo.debugwireargs(*vals, **args)
4337 4343 res2 = repo.debugwireargs(*vals, **args)
4338 4344 ui.write(b"%s\n" % res1)
4339 4345 if res1 != res2:
4340 4346 ui.warn(b"%s\n" % res2)
4341 4347 finally:
4342 4348 repo.close()
4343 4349
4344 4350
4345 4351 def _parsewirelangblocks(fh):
4346 4352 activeaction = None
4347 4353 blocklines = []
4348 4354 lastindent = 0
4349 4355
4350 4356 for line in fh:
4351 4357 line = line.rstrip()
4352 4358 if not line:
4353 4359 continue
4354 4360
4355 4361 if line.startswith(b'#'):
4356 4362 continue
4357 4363
4358 4364 if not line.startswith(b' '):
4359 4365 # New block. Flush previous one.
4360 4366 if activeaction:
4361 4367 yield activeaction, blocklines
4362 4368
4363 4369 activeaction = line
4364 4370 blocklines = []
4365 4371 lastindent = 0
4366 4372 continue
4367 4373
4368 4374 # Else we start with an indent.
4369 4375
4370 4376 if not activeaction:
4371 4377 raise error.Abort(_(b'indented line outside of block'))
4372 4378
4373 4379 indent = len(line) - len(line.lstrip())
4374 4380
4375 4381 # If this line is indented more than the last line, concatenate it.
4376 4382 if indent > lastindent and blocklines:
4377 4383 blocklines[-1] += line.lstrip()
4378 4384 else:
4379 4385 blocklines.append(line)
4380 4386 lastindent = indent
4381 4387
4382 4388 # Flush last block.
4383 4389 if activeaction:
4384 4390 yield activeaction, blocklines
4385 4391
4386 4392
4387 4393 @command(
4388 4394 b'debugwireproto',
4389 4395 [
4390 4396 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4391 4397 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4392 4398 (
4393 4399 b'',
4394 4400 b'noreadstderr',
4395 4401 False,
4396 4402 _(b'do not read from stderr of the remote'),
4397 4403 ),
4398 4404 (
4399 4405 b'',
4400 4406 b'nologhandshake',
4401 4407 False,
4402 4408 _(b'do not log I/O related to the peer handshake'),
4403 4409 ),
4404 4410 ]
4405 4411 + cmdutil.remoteopts,
4406 4412 _(b'[PATH]'),
4407 4413 optionalrepo=True,
4408 4414 )
4409 4415 def debugwireproto(ui, repo, path=None, **opts):
4410 4416 """send wire protocol commands to a server
4411 4417
4412 4418 This command can be used to issue wire protocol commands to remote
4413 4419 peers and to debug the raw data being exchanged.
4414 4420
4415 4421 ``--localssh`` will start an SSH server against the current repository
4416 4422 and connect to that. By default, the connection will perform a handshake
4417 4423 and establish an appropriate peer instance.
4418 4424
4419 4425 ``--peer`` can be used to bypass the handshake protocol and construct a
4420 4426 peer instance using the specified class type. Valid values are ``raw``,
4421 4427 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4422 4428 don't support higher-level command actions.
4423 4429
4424 4430 ``--noreadstderr`` can be used to disable automatic reading from stderr
4425 4431 of the peer (for SSH connections only). Disabling automatic reading of
4426 4432 stderr is useful for making output more deterministic.
4427 4433
4428 4434 Commands are issued via a mini language which is specified via stdin.
4429 4435 The language consists of individual actions to perform. An action is
4430 4436 defined by a block. A block is defined as a line with no leading
4431 4437 space followed by 0 or more lines with leading space. Blocks are
4432 4438 effectively a high-level command with additional metadata.
4433 4439
4434 4440 Lines beginning with ``#`` are ignored.
4435 4441
4436 4442 The following sections denote available actions.
4437 4443
4438 4444 raw
4439 4445 ---
4440 4446
4441 4447 Send raw data to the server.
4442 4448
4443 4449 The block payload contains the raw data to send as one atomic send
4444 4450 operation. The data may not actually be delivered in a single system
4445 4451 call: it depends on the abilities of the transport being used.
4446 4452
4447 4453 Each line in the block is de-indented and concatenated. Then, that
4448 4454 value is evaluated as a Python b'' literal. This allows the use of
4449 4455 backslash escaping, etc.
4450 4456
4451 4457 raw+
4452 4458 ----
4453 4459
4454 4460 Behaves like ``raw`` except flushes output afterwards.
4455 4461
4456 4462 command <X>
4457 4463 -----------
4458 4464
4459 4465 Send a request to run a named command, whose name follows the ``command``
4460 4466 string.
4461 4467
4462 4468 Arguments to the command are defined as lines in this block. The format of
4463 4469 each line is ``<key> <value>``. e.g.::
4464 4470
4465 4471 command listkeys
4466 4472 namespace bookmarks
4467 4473
4468 4474 If the value begins with ``eval:``, it will be interpreted as a Python
4469 4475 literal expression. Otherwise values are interpreted as Python b'' literals.
4470 4476 This allows sending complex types and encoding special byte sequences via
4471 4477 backslash escaping.
4472 4478
4473 4479 The following arguments have special meaning:
4474 4480
4475 4481 ``PUSHFILE``
4476 4482 When defined, the *push* mechanism of the peer will be used instead
4477 4483 of the static request-response mechanism and the content of the
4478 4484 file specified in the value of this argument will be sent as the
4479 4485 command payload.
4480 4486
4481 4487 This can be used to submit a local bundle file to the remote.
4482 4488
4483 4489 batchbegin
4484 4490 ----------
4485 4491
4486 4492 Instruct the peer to begin a batched send.
4487 4493
4488 4494 All ``command`` blocks are queued for execution until the next
4489 4495 ``batchsubmit`` block.
4490 4496
4491 4497 batchsubmit
4492 4498 -----------
4493 4499
4494 4500 Submit previously queued ``command`` blocks as a batch request.
4495 4501
4496 4502 This action MUST be paired with a ``batchbegin`` action.
4497 4503
4498 4504 httprequest <method> <path>
4499 4505 ---------------------------
4500 4506
4501 4507 (HTTP peer only)
4502 4508
4503 4509 Send an HTTP request to the peer.
4504 4510
4505 4511 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4506 4512
4507 4513 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4508 4514 headers to add to the request. e.g. ``Accept: foo``.
4509 4515
4510 4516 The following arguments are special:
4511 4517
4512 4518 ``BODYFILE``
4513 4519 The content of the file defined as the value to this argument will be
4514 4520 transferred verbatim as the HTTP request body.
4515 4521
4516 4522 ``frame <type> <flags> <payload>``
4517 4523 Send a unified protocol frame as part of the request body.
4518 4524
4519 4525 All frames will be collected and sent as the body to the HTTP
4520 4526 request.
4521 4527
4522 4528 close
4523 4529 -----
4524 4530
4525 4531 Close the connection to the server.
4526 4532
4527 4533 flush
4528 4534 -----
4529 4535
4530 4536 Flush data written to the server.
4531 4537
4532 4538 readavailable
4533 4539 -------------
4534 4540
4535 4541 Close the write end of the connection and read all available data from
4536 4542 the server.
4537 4543
4538 4544 If the connection to the server encompasses multiple pipes, we poll both
4539 4545 pipes and read available data.
4540 4546
4541 4547 readline
4542 4548 --------
4543 4549
4544 4550 Read a line of output from the server. If there are multiple output
4545 4551 pipes, reads only the main pipe.
4546 4552
4547 4553 ereadline
4548 4554 ---------
4549 4555
4550 4556 Like ``readline``, but read from the stderr pipe, if available.
4551 4557
4552 4558 read <X>
4553 4559 --------
4554 4560
4555 4561 ``read()`` N bytes from the server's main output pipe.
4556 4562
4557 4563 eread <X>
4558 4564 ---------
4559 4565
4560 4566 ``read()`` N bytes from the server's stderr pipe, if available.
4561 4567
4562 4568 Specifying Unified Frame-Based Protocol Frames
4563 4569 ----------------------------------------------
4564 4570
4565 4571 It is possible to emit a *Unified Frame-Based Protocol* by using special
4566 4572 syntax.
4567 4573
4568 4574 A frame is composed as a type, flags, and payload. These can be parsed
4569 4575 from a string of the form:
4570 4576
4571 4577 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4572 4578
4573 4579 ``request-id`` and ``stream-id`` are integers defining the request and
4574 4580 stream identifiers.
4575 4581
4576 4582 ``type`` can be an integer value for the frame type or the string name
4577 4583 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4578 4584 ``command-name``.
4579 4585
4580 4586 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4581 4587 components. Each component (and there can be just one) can be an integer
4582 4588 or a flag name for stream flags or frame flags, respectively. Values are
4583 4589 resolved to integers and then bitwise OR'd together.
4584 4590
4585 4591 ``payload`` represents the raw frame payload. If it begins with
4586 4592 ``cbor:``, the following string is evaluated as Python code and the
4587 4593 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4588 4594 as a Python byte string literal.
4589 4595 """
4590 4596 opts = pycompat.byteskwargs(opts)
4591 4597
4592 4598 if opts[b'localssh'] and not repo:
4593 4599 raise error.Abort(_(b'--localssh requires a repository'))
4594 4600
4595 4601 if opts[b'peer'] and opts[b'peer'] not in (
4596 4602 b'raw',
4597 4603 b'ssh1',
4598 4604 ):
4599 4605 raise error.Abort(
4600 4606 _(b'invalid value for --peer'),
4601 4607 hint=_(b'valid values are "raw" and "ssh1"'),
4602 4608 )
4603 4609
4604 4610 if path and opts[b'localssh']:
4605 4611 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4606 4612
4607 4613 if ui.interactive():
4608 4614 ui.write(_(b'(waiting for commands on stdin)\n'))
4609 4615
4610 4616 blocks = list(_parsewirelangblocks(ui.fin))
4611 4617
4612 4618 proc = None
4613 4619 stdin = None
4614 4620 stdout = None
4615 4621 stderr = None
4616 4622 opener = None
4617 4623
4618 4624 if opts[b'localssh']:
4619 4625 # We start the SSH server in its own process so there is process
4620 4626 # separation. This prevents a whole class of potential bugs around
4621 4627 # shared state from interfering with server operation.
4622 4628 args = procutil.hgcmd() + [
4623 4629 b'-R',
4624 4630 repo.root,
4625 4631 b'debugserve',
4626 4632 b'--sshstdio',
4627 4633 ]
4628 4634 proc = subprocess.Popen(
4629 4635 pycompat.rapply(procutil.tonativestr, args),
4630 4636 stdin=subprocess.PIPE,
4631 4637 stdout=subprocess.PIPE,
4632 4638 stderr=subprocess.PIPE,
4633 4639 bufsize=0,
4634 4640 )
4635 4641
4636 4642 stdin = proc.stdin
4637 4643 stdout = proc.stdout
4638 4644 stderr = proc.stderr
4639 4645
4640 4646 # We turn the pipes into observers so we can log I/O.
4641 4647 if ui.verbose or opts[b'peer'] == b'raw':
4642 4648 stdin = util.makeloggingfileobject(
4643 4649 ui, proc.stdin, b'i', logdata=True
4644 4650 )
4645 4651 stdout = util.makeloggingfileobject(
4646 4652 ui, proc.stdout, b'o', logdata=True
4647 4653 )
4648 4654 stderr = util.makeloggingfileobject(
4649 4655 ui, proc.stderr, b'e', logdata=True
4650 4656 )
4651 4657
4652 4658 # --localssh also implies the peer connection settings.
4653 4659
4654 4660 url = b'ssh://localserver'
4655 4661 autoreadstderr = not opts[b'noreadstderr']
4656 4662
4657 4663 if opts[b'peer'] == b'ssh1':
4658 4664 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4659 4665 peer = sshpeer.sshv1peer(
4660 4666 ui,
4661 4667 url,
4662 4668 proc,
4663 4669 stdin,
4664 4670 stdout,
4665 4671 stderr,
4666 4672 None,
4667 4673 autoreadstderr=autoreadstderr,
4668 4674 )
4669 4675 elif opts[b'peer'] == b'raw':
4670 4676 ui.write(_(b'using raw connection to peer\n'))
4671 4677 peer = None
4672 4678 else:
4673 4679 ui.write(_(b'creating ssh peer from handshake results\n'))
4674 4680 peer = sshpeer.makepeer(
4675 4681 ui,
4676 4682 url,
4677 4683 proc,
4678 4684 stdin,
4679 4685 stdout,
4680 4686 stderr,
4681 4687 autoreadstderr=autoreadstderr,
4682 4688 )
4683 4689
4684 4690 elif path:
4685 4691 # We bypass hg.peer() so we can proxy the sockets.
4686 4692 # TODO consider not doing this because we skip
4687 4693 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4688 4694 u = urlutil.url(path)
4689 4695 if u.scheme != b'http':
4690 4696 raise error.Abort(_(b'only http:// paths are currently supported'))
4691 4697
4692 4698 url, authinfo = u.authinfo()
4693 4699 openerargs = {
4694 4700 'useragent': b'Mercurial debugwireproto',
4695 4701 }
4696 4702
4697 4703 # Turn pipes/sockets into observers so we can log I/O.
4698 4704 if ui.verbose:
4699 4705 openerargs.update(
4700 4706 {
4701 4707 'loggingfh': ui,
4702 4708 'loggingname': b's',
4703 4709 'loggingopts': {
4704 4710 'logdata': True,
4705 4711 'logdataapis': False,
4706 4712 },
4707 4713 }
4708 4714 )
4709 4715
4710 4716 if ui.debugflag:
4711 4717 openerargs['loggingopts']['logdataapis'] = True
4712 4718
4713 4719 # Don't send default headers when in raw mode. This allows us to
4714 4720 # bypass most of the behavior of our URL handling code so we can
4715 4721 # have near complete control over what's sent on the wire.
4716 4722 if opts[b'peer'] == b'raw':
4717 4723 openerargs['sendaccept'] = False
4718 4724
4719 4725 opener = urlmod.opener(ui, authinfo, **openerargs)
4720 4726
4721 4727 if opts[b'peer'] == b'raw':
4722 4728 ui.write(_(b'using raw connection to peer\n'))
4723 4729 peer = None
4724 4730 elif opts[b'peer']:
4725 4731 raise error.Abort(
4726 4732 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4727 4733 )
4728 4734 else:
4729 4735 peer = httppeer.makepeer(ui, path, opener=opener)
4730 4736
4731 4737 # We /could/ populate stdin/stdout with sock.makefile()...
4732 4738 else:
4733 4739 raise error.Abort(_(b'unsupported connection configuration'))
4734 4740
4735 4741 batchedcommands = None
4736 4742
4737 4743 # Now perform actions based on the parsed wire language instructions.
4738 4744 for action, lines in blocks:
4739 4745 if action in (b'raw', b'raw+'):
4740 4746 if not stdin:
4741 4747 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4742 4748
4743 4749 # Concatenate the data together.
4744 4750 data = b''.join(l.lstrip() for l in lines)
4745 4751 data = stringutil.unescapestr(data)
4746 4752 stdin.write(data)
4747 4753
4748 4754 if action == b'raw+':
4749 4755 stdin.flush()
4750 4756 elif action == b'flush':
4751 4757 if not stdin:
4752 4758 raise error.Abort(_(b'cannot call flush on this peer'))
4753 4759 stdin.flush()
4754 4760 elif action.startswith(b'command'):
4755 4761 if not peer:
4756 4762 raise error.Abort(
4757 4763 _(
4758 4764 b'cannot send commands unless peer instance '
4759 4765 b'is available'
4760 4766 )
4761 4767 )
4762 4768
4763 4769 command = action.split(b' ', 1)[1]
4764 4770
4765 4771 args = {}
4766 4772 for line in lines:
4767 4773 # We need to allow empty values.
4768 4774 fields = line.lstrip().split(b' ', 1)
4769 4775 if len(fields) == 1:
4770 4776 key = fields[0]
4771 4777 value = b''
4772 4778 else:
4773 4779 key, value = fields
4774 4780
4775 4781 if value.startswith(b'eval:'):
4776 4782 value = stringutil.evalpythonliteral(value[5:])
4777 4783 else:
4778 4784 value = stringutil.unescapestr(value)
4779 4785
4780 4786 args[key] = value
4781 4787
4782 4788 if batchedcommands is not None:
4783 4789 batchedcommands.append((command, args))
4784 4790 continue
4785 4791
4786 4792 ui.status(_(b'sending %s command\n') % command)
4787 4793
4788 4794 if b'PUSHFILE' in args:
4789 4795 with open(args[b'PUSHFILE'], 'rb') as fh:
4790 4796 del args[b'PUSHFILE']
4791 4797 res, output = peer._callpush(
4792 4798 command, fh, **pycompat.strkwargs(args)
4793 4799 )
4794 4800 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4795 4801 ui.status(
4796 4802 _(b'remote output: %s\n') % stringutil.escapestr(output)
4797 4803 )
4798 4804 else:
4799 4805 with peer.commandexecutor() as e:
4800 4806 res = e.callcommand(command, args).result()
4801 4807
4802 4808 ui.status(
4803 4809 _(b'response: %s\n')
4804 4810 % stringutil.pprint(res, bprefix=True, indent=2)
4805 4811 )
4806 4812
4807 4813 elif action == b'batchbegin':
4808 4814 if batchedcommands is not None:
4809 4815 raise error.Abort(_(b'nested batchbegin not allowed'))
4810 4816
4811 4817 batchedcommands = []
4812 4818 elif action == b'batchsubmit':
4813 4819 # There is a batching API we could go through. But it would be
4814 4820 # difficult to normalize requests into function calls. It is easier
4815 4821 # to bypass this layer and normalize to commands + args.
4816 4822 ui.status(
4817 4823 _(b'sending batch with %d sub-commands\n')
4818 4824 % len(batchedcommands)
4819 4825 )
4820 4826 assert peer is not None
4821 4827 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4822 4828 ui.status(
4823 4829 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4824 4830 )
4825 4831
4826 4832 batchedcommands = None
4827 4833
4828 4834 elif action.startswith(b'httprequest '):
4829 4835 if not opener:
4830 4836 raise error.Abort(
4831 4837 _(b'cannot use httprequest without an HTTP peer')
4832 4838 )
4833 4839
4834 4840 request = action.split(b' ', 2)
4835 4841 if len(request) != 3:
4836 4842 raise error.Abort(
4837 4843 _(
4838 4844 b'invalid httprequest: expected format is '
4839 4845 b'"httprequest <method> <path>'
4840 4846 )
4841 4847 )
4842 4848
4843 4849 method, httppath = request[1:]
4844 4850 headers = {}
4845 4851 body = None
4846 4852 frames = []
4847 4853 for line in lines:
4848 4854 line = line.lstrip()
4849 4855 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4850 4856 if m:
4851 4857 # Headers need to use native strings.
4852 4858 key = pycompat.strurl(m.group(1))
4853 4859 value = pycompat.strurl(m.group(2))
4854 4860 headers[key] = value
4855 4861 continue
4856 4862
4857 4863 if line.startswith(b'BODYFILE '):
4858 4864 with open(line.split(b' ', 1), b'rb') as fh:
4859 4865 body = fh.read()
4860 4866 elif line.startswith(b'frame '):
4861 4867 frame = wireprotoframing.makeframefromhumanstring(
4862 4868 line[len(b'frame ') :]
4863 4869 )
4864 4870
4865 4871 frames.append(frame)
4866 4872 else:
4867 4873 raise error.Abort(
4868 4874 _(b'unknown argument to httprequest: %s') % line
4869 4875 )
4870 4876
4871 4877 url = path + httppath
4872 4878
4873 4879 if frames:
4874 4880 body = b''.join(bytes(f) for f in frames)
4875 4881
4876 4882 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4877 4883
4878 4884 # urllib.Request insists on using has_data() as a proxy for
4879 4885 # determining the request method. Override that to use our
4880 4886 # explicitly requested method.
4881 4887 req.get_method = lambda: pycompat.sysstr(method)
4882 4888
4883 4889 try:
4884 4890 res = opener.open(req)
4885 4891 body = res.read()
4886 4892 except util.urlerr.urlerror as e:
4887 4893 # read() method must be called, but only exists in Python 2
4888 4894 getattr(e, 'read', lambda: None)()
4889 4895 continue
4890 4896
4891 4897 ct = res.headers.get('Content-Type')
4892 4898 if ct == 'application/mercurial-cbor':
4893 4899 ui.write(
4894 4900 _(b'cbor> %s\n')
4895 4901 % stringutil.pprint(
4896 4902 cborutil.decodeall(body), bprefix=True, indent=2
4897 4903 )
4898 4904 )
4899 4905
4900 4906 elif action == b'close':
4901 4907 assert peer is not None
4902 4908 peer.close()
4903 4909 elif action == b'readavailable':
4904 4910 if not stdout or not stderr:
4905 4911 raise error.Abort(
4906 4912 _(b'readavailable not available on this peer')
4907 4913 )
4908 4914
4909 4915 stdin.close()
4910 4916 stdout.read()
4911 4917 stderr.read()
4912 4918
4913 4919 elif action == b'readline':
4914 4920 if not stdout:
4915 4921 raise error.Abort(_(b'readline not available on this peer'))
4916 4922 stdout.readline()
4917 4923 elif action == b'ereadline':
4918 4924 if not stderr:
4919 4925 raise error.Abort(_(b'ereadline not available on this peer'))
4920 4926 stderr.readline()
4921 4927 elif action.startswith(b'read '):
4922 4928 count = int(action.split(b' ', 1)[1])
4923 4929 if not stdout:
4924 4930 raise error.Abort(_(b'read not available on this peer'))
4925 4931 stdout.read(count)
4926 4932 elif action.startswith(b'eread '):
4927 4933 count = int(action.split(b' ', 1)[1])
4928 4934 if not stderr:
4929 4935 raise error.Abort(_(b'eread not available on this peer'))
4930 4936 stderr.read(count)
4931 4937 else:
4932 4938 raise error.Abort(_(b'unknown action: %s') % action)
4933 4939
4934 4940 if batchedcommands is not None:
4935 4941 raise error.Abort(_(b'unclosed "batchbegin" request'))
4936 4942
4937 4943 if peer:
4938 4944 peer.close()
4939 4945
4940 4946 if proc:
4941 4947 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now