##// END OF EJS Templates
debugdiscovery: fix a typo in the help...
marmoute -
r49810:db960032 default
parent child Browse files
Show More
@@ -1,4883 +1,4883
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 requirements,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 )
94 94 from .interfaces import repository
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 deltas as deltautil,
106 106 nodemap,
107 107 rewrite,
108 108 sidedata,
109 109 )
110 110
111 111 release = lockmod.release
112 112
113 113 table = {}
114 114 table.update(strip.command._table)
115 115 command = registrar.command(table)
116 116
117 117
118 118 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
119 119 def debugancestor(ui, repo, *args):
120 120 """find the ancestor revision of two revisions in a given index"""
121 121 if len(args) == 3:
122 122 index, rev1, rev2 = args
123 123 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
124 124 lookup = r.lookup
125 125 elif len(args) == 2:
126 126 if not repo:
127 127 raise error.Abort(
128 128 _(b'there is no Mercurial repository here (.hg not found)')
129 129 )
130 130 rev1, rev2 = args
131 131 r = repo.changelog
132 132 lookup = repo.lookup
133 133 else:
134 134 raise error.Abort(_(b'either two or three arguments required'))
135 135 a = r.ancestor(lookup(rev1), lookup(rev2))
136 136 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
137 137
138 138
139 139 @command(b'debugantivirusrunning', [])
140 140 def debugantivirusrunning(ui, repo):
141 141 """attempt to trigger an antivirus scanner to see if one is active"""
142 142 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
143 143 f.write(
144 144 util.b85decode(
145 145 # This is a base85-armored version of the EICAR test file. See
146 146 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
147 147 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
148 148 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
149 149 )
150 150 )
151 151 # Give an AV engine time to scan the file.
152 152 time.sleep(2)
153 153 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
154 154
155 155
156 156 @command(b'debugapplystreamclonebundle', [], b'FILE')
157 157 def debugapplystreamclonebundle(ui, repo, fname):
158 158 """apply a stream clone bundle file"""
159 159 f = hg.openpath(ui, fname)
160 160 gen = exchange.readbundle(ui, f, fname)
161 161 gen.apply(repo)
162 162
163 163
164 164 @command(
165 165 b'debugbuilddag',
166 166 [
167 167 (
168 168 b'm',
169 169 b'mergeable-file',
170 170 None,
171 171 _(b'add single file mergeable changes'),
172 172 ),
173 173 (
174 174 b'o',
175 175 b'overwritten-file',
176 176 None,
177 177 _(b'add single file all revs overwrite'),
178 178 ),
179 179 (b'n', b'new-file', None, _(b'add new file at each rev')),
180 180 (
181 181 b'',
182 182 b'from-existing',
183 183 None,
184 184 _(b'continue from a non-empty repository'),
185 185 ),
186 186 ],
187 187 _(b'[OPTION]... [TEXT]'),
188 188 )
189 189 def debugbuilddag(
190 190 ui,
191 191 repo,
192 192 text=None,
193 193 mergeable_file=False,
194 194 overwritten_file=False,
195 195 new_file=False,
196 196 from_existing=False,
197 197 ):
198 198 """builds a repo with a given DAG from scratch in the current empty repo
199 199
200 200 The description of the DAG is read from stdin if not given on the
201 201 command line.
202 202
203 203 Elements:
204 204
205 205 - "+n" is a linear run of n nodes based on the current default parent
206 206 - "." is a single node based on the current default parent
207 207 - "$" resets the default parent to null (implied at the start);
208 208 otherwise the default parent is always the last node created
209 209 - "<p" sets the default parent to the backref p
210 210 - "*p" is a fork at parent p, which is a backref
211 211 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
212 212 - "/p2" is a merge of the preceding node and p2
213 213 - ":tag" defines a local tag for the preceding node
214 214 - "@branch" sets the named branch for subsequent nodes
215 215 - "#...\\n" is a comment up to the end of the line
216 216
217 217 Whitespace between the above elements is ignored.
218 218
219 219 A backref is either
220 220
221 221 - a number n, which references the node curr-n, where curr is the current
222 222 node, or
223 223 - the name of a local tag you placed earlier using ":tag", or
224 224 - empty to denote the default parent.
225 225
226 226 All string valued-elements are either strictly alphanumeric, or must
227 227 be enclosed in double quotes ("..."), with "\\" as escape character.
228 228 """
229 229
230 230 if text is None:
231 231 ui.status(_(b"reading DAG from stdin\n"))
232 232 text = ui.fin.read()
233 233
234 234 cl = repo.changelog
235 235 if len(cl) > 0 and not from_existing:
236 236 raise error.Abort(_(b'repository is not empty'))
237 237
238 238 # determine number of revs in DAG
239 239 total = 0
240 240 for type, data in dagparser.parsedag(text):
241 241 if type == b'n':
242 242 total += 1
243 243
244 244 if mergeable_file:
245 245 linesperrev = 2
246 246 # make a file with k lines per rev
247 247 initialmergedlines = [
248 248 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
249 249 ]
250 250 initialmergedlines.append(b"")
251 251
252 252 tags = []
253 253 progress = ui.makeprogress(
254 254 _(b'building'), unit=_(b'revisions'), total=total
255 255 )
256 256 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
257 257 at = -1
258 258 atbranch = b'default'
259 259 nodeids = []
260 260 id = 0
261 261 progress.update(id)
262 262 for type, data in dagparser.parsedag(text):
263 263 if type == b'n':
264 264 ui.note((b'node %s\n' % pycompat.bytestr(data)))
265 265 id, ps = data
266 266
267 267 files = []
268 268 filecontent = {}
269 269
270 270 p2 = None
271 271 if mergeable_file:
272 272 fn = b"mf"
273 273 p1 = repo[ps[0]]
274 274 if len(ps) > 1:
275 275 p2 = repo[ps[1]]
276 276 pa = p1.ancestor(p2)
277 277 base, local, other = [
278 278 x[fn].data() for x in (pa, p1, p2)
279 279 ]
280 280 m3 = simplemerge.Merge3Text(base, local, other)
281 281 ml = [
282 282 l.strip()
283 283 for l in simplemerge.render_minimized(m3)[0]
284 284 ]
285 285 ml.append(b"")
286 286 elif at > 0:
287 287 ml = p1[fn].data().split(b"\n")
288 288 else:
289 289 ml = initialmergedlines
290 290 ml[id * linesperrev] += b" r%i" % id
291 291 mergedtext = b"\n".join(ml)
292 292 files.append(fn)
293 293 filecontent[fn] = mergedtext
294 294
295 295 if overwritten_file:
296 296 fn = b"of"
297 297 files.append(fn)
298 298 filecontent[fn] = b"r%i\n" % id
299 299
300 300 if new_file:
301 301 fn = b"nf%i" % id
302 302 files.append(fn)
303 303 filecontent[fn] = b"r%i\n" % id
304 304 if len(ps) > 1:
305 305 if not p2:
306 306 p2 = repo[ps[1]]
307 307 for fn in p2:
308 308 if fn.startswith(b"nf"):
309 309 files.append(fn)
310 310 filecontent[fn] = p2[fn].data()
311 311
312 312 def fctxfn(repo, cx, path):
313 313 if path in filecontent:
314 314 return context.memfilectx(
315 315 repo, cx, path, filecontent[path]
316 316 )
317 317 return None
318 318
319 319 if len(ps) == 0 or ps[0] < 0:
320 320 pars = [None, None]
321 321 elif len(ps) == 1:
322 322 pars = [nodeids[ps[0]], None]
323 323 else:
324 324 pars = [nodeids[p] for p in ps]
325 325 cx = context.memctx(
326 326 repo,
327 327 pars,
328 328 b"r%i" % id,
329 329 files,
330 330 fctxfn,
331 331 date=(id, 0),
332 332 user=b"debugbuilddag",
333 333 extra={b'branch': atbranch},
334 334 )
335 335 nodeid = repo.commitctx(cx)
336 336 nodeids.append(nodeid)
337 337 at = id
338 338 elif type == b'l':
339 339 id, name = data
340 340 ui.note((b'tag %s\n' % name))
341 341 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
342 342 elif type == b'a':
343 343 ui.note((b'branch %s\n' % data))
344 344 atbranch = data
345 345 progress.update(id)
346 346
347 347 if tags:
348 348 repo.vfs.write(b"localtags", b"".join(tags))
349 349
350 350
351 351 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
352 352 indent_string = b' ' * indent
353 353 if all:
354 354 ui.writenoi18n(
355 355 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
356 356 % indent_string
357 357 )
358 358
359 359 def showchunks(named):
360 360 ui.write(b"\n%s%s\n" % (indent_string, named))
361 361 for deltadata in gen.deltaiter():
362 362 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
363 363 ui.write(
364 364 b"%s%s %s %s %s %s %d\n"
365 365 % (
366 366 indent_string,
367 367 hex(node),
368 368 hex(p1),
369 369 hex(p2),
370 370 hex(cs),
371 371 hex(deltabase),
372 372 len(delta),
373 373 )
374 374 )
375 375
376 376 gen.changelogheader()
377 377 showchunks(b"changelog")
378 378 gen.manifestheader()
379 379 showchunks(b"manifest")
380 380 for chunkdata in iter(gen.filelogheader, {}):
381 381 fname = chunkdata[b'filename']
382 382 showchunks(fname)
383 383 else:
384 384 if isinstance(gen, bundle2.unbundle20):
385 385 raise error.Abort(_(b'use debugbundle2 for this file'))
386 386 gen.changelogheader()
387 387 for deltadata in gen.deltaiter():
388 388 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
389 389 ui.write(b"%s%s\n" % (indent_string, hex(node)))
390 390
391 391
392 392 def _debugobsmarkers(ui, part, indent=0, **opts):
393 393 """display version and markers contained in 'data'"""
394 394 opts = pycompat.byteskwargs(opts)
395 395 data = part.read()
396 396 indent_string = b' ' * indent
397 397 try:
398 398 version, markers = obsolete._readmarkers(data)
399 399 except error.UnknownVersion as exc:
400 400 msg = b"%sunsupported version: %s (%d bytes)\n"
401 401 msg %= indent_string, exc.version, len(data)
402 402 ui.write(msg)
403 403 else:
404 404 msg = b"%sversion: %d (%d bytes)\n"
405 405 msg %= indent_string, version, len(data)
406 406 ui.write(msg)
407 407 fm = ui.formatter(b'debugobsolete', opts)
408 408 for rawmarker in sorted(markers):
409 409 m = obsutil.marker(None, rawmarker)
410 410 fm.startitem()
411 411 fm.plain(indent_string)
412 412 cmdutil.showmarker(fm, m)
413 413 fm.end()
414 414
415 415
416 416 def _debugphaseheads(ui, data, indent=0):
417 417 """display version and markers contained in 'data'"""
418 418 indent_string = b' ' * indent
419 419 headsbyphase = phases.binarydecode(data)
420 420 for phase in phases.allphases:
421 421 for head in headsbyphase[phase]:
422 422 ui.write(indent_string)
423 423 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
424 424
425 425
426 426 def _quasirepr(thing):
427 427 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
428 428 return b'{%s}' % (
429 429 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
430 430 )
431 431 return pycompat.bytestr(repr(thing))
432 432
433 433
434 434 def _debugbundle2(ui, gen, all=None, **opts):
435 435 """lists the contents of a bundle2"""
436 436 if not isinstance(gen, bundle2.unbundle20):
437 437 raise error.Abort(_(b'not a bundle2 file'))
438 438 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
439 439 parttypes = opts.get('part_type', [])
440 440 for part in gen.iterparts():
441 441 if parttypes and part.type not in parttypes:
442 442 continue
443 443 msg = b'%s -- %s (mandatory: %r)\n'
444 444 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
445 445 if part.type == b'changegroup':
446 446 version = part.params.get(b'version', b'01')
447 447 cg = changegroup.getunbundler(version, part, b'UN')
448 448 if not ui.quiet:
449 449 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
450 450 if part.type == b'obsmarkers':
451 451 if not ui.quiet:
452 452 _debugobsmarkers(ui, part, indent=4, **opts)
453 453 if part.type == b'phase-heads':
454 454 if not ui.quiet:
455 455 _debugphaseheads(ui, part, indent=4)
456 456
457 457
458 458 @command(
459 459 b'debugbundle',
460 460 [
461 461 (b'a', b'all', None, _(b'show all details')),
462 462 (b'', b'part-type', [], _(b'show only the named part type')),
463 463 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
464 464 ],
465 465 _(b'FILE'),
466 466 norepo=True,
467 467 )
468 468 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
469 469 """lists the contents of a bundle"""
470 470 with hg.openpath(ui, bundlepath) as f:
471 471 if spec:
472 472 spec = exchange.getbundlespec(ui, f)
473 473 ui.write(b'%s\n' % spec)
474 474 return
475 475
476 476 gen = exchange.readbundle(ui, f, bundlepath)
477 477 if isinstance(gen, bundle2.unbundle20):
478 478 return _debugbundle2(ui, gen, all=all, **opts)
479 479 _debugchangegroup(ui, gen, all=all, **opts)
480 480
481 481
482 482 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
483 483 def debugcapabilities(ui, path, **opts):
484 484 """lists the capabilities of a remote peer"""
485 485 opts = pycompat.byteskwargs(opts)
486 486 peer = hg.peer(ui, opts, path)
487 487 try:
488 488 caps = peer.capabilities()
489 489 ui.writenoi18n(b'Main capabilities:\n')
490 490 for c in sorted(caps):
491 491 ui.write(b' %s\n' % c)
492 492 b2caps = bundle2.bundle2caps(peer)
493 493 if b2caps:
494 494 ui.writenoi18n(b'Bundle2 capabilities:\n')
495 495 for key, values in sorted(b2caps.items()):
496 496 ui.write(b' %s\n' % key)
497 497 for v in values:
498 498 ui.write(b' %s\n' % v)
499 499 finally:
500 500 peer.close()
501 501
502 502
503 503 @command(
504 504 b'debugchangedfiles',
505 505 [
506 506 (
507 507 b'',
508 508 b'compute',
509 509 False,
510 510 b"compute information instead of reading it from storage",
511 511 ),
512 512 ],
513 513 b'REV',
514 514 )
515 515 def debugchangedfiles(ui, repo, rev, **opts):
516 516 """list the stored files changes for a revision"""
517 517 ctx = logcmdutil.revsingle(repo, rev, None)
518 518 files = None
519 519
520 520 if opts['compute']:
521 521 files = metadata.compute_all_files_changes(ctx)
522 522 else:
523 523 sd = repo.changelog.sidedata(ctx.rev())
524 524 files_block = sd.get(sidedata.SD_FILES)
525 525 if files_block is not None:
526 526 files = metadata.decode_files_sidedata(sd)
527 527 if files is not None:
528 528 for f in sorted(files.touched):
529 529 if f in files.added:
530 530 action = b"added"
531 531 elif f in files.removed:
532 532 action = b"removed"
533 533 elif f in files.merged:
534 534 action = b"merged"
535 535 elif f in files.salvaged:
536 536 action = b"salvaged"
537 537 else:
538 538 action = b"touched"
539 539
540 540 copy_parent = b""
541 541 copy_source = b""
542 542 if f in files.copied_from_p1:
543 543 copy_parent = b"p1"
544 544 copy_source = files.copied_from_p1[f]
545 545 elif f in files.copied_from_p2:
546 546 copy_parent = b"p2"
547 547 copy_source = files.copied_from_p2[f]
548 548
549 549 data = (action, copy_parent, f, copy_source)
550 550 template = b"%-8s %2s: %s, %s;\n"
551 551 ui.write(template % data)
552 552
553 553
554 554 @command(b'debugcheckstate', [], b'')
555 555 def debugcheckstate(ui, repo):
556 556 """validate the correctness of the current dirstate"""
557 557 parent1, parent2 = repo.dirstate.parents()
558 558 m1 = repo[parent1].manifest()
559 559 m2 = repo[parent2].manifest()
560 560 errors = 0
561 561 for err in repo.dirstate.verify(m1, m2):
562 562 ui.warn(err[0] % err[1:])
563 563 errors += 1
564 564 if errors:
565 565 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
566 566 raise error.Abort(errstr)
567 567
568 568
569 569 @command(
570 570 b'debugcolor',
571 571 [(b'', b'style', None, _(b'show all configured styles'))],
572 572 b'hg debugcolor',
573 573 )
574 574 def debugcolor(ui, repo, **opts):
575 575 """show available color, effects or style"""
576 576 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
577 577 if opts.get('style'):
578 578 return _debugdisplaystyle(ui)
579 579 else:
580 580 return _debugdisplaycolor(ui)
581 581
582 582
583 583 def _debugdisplaycolor(ui):
584 584 ui = ui.copy()
585 585 ui._styles.clear()
586 586 for effect in color._activeeffects(ui).keys():
587 587 ui._styles[effect] = effect
588 588 if ui._terminfoparams:
589 589 for k, v in ui.configitems(b'color'):
590 590 if k.startswith(b'color.'):
591 591 ui._styles[k] = k[6:]
592 592 elif k.startswith(b'terminfo.'):
593 593 ui._styles[k] = k[9:]
594 594 ui.write(_(b'available colors:\n'))
595 595 # sort label with a '_' after the other to group '_background' entry.
596 596 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
597 597 for colorname, label in items:
598 598 ui.write(b'%s\n' % colorname, label=label)
599 599
600 600
601 601 def _debugdisplaystyle(ui):
602 602 ui.write(_(b'available style:\n'))
603 603 if not ui._styles:
604 604 return
605 605 width = max(len(s) for s in ui._styles)
606 606 for label, effects in sorted(ui._styles.items()):
607 607 ui.write(b'%s' % label, label=label)
608 608 if effects:
609 609 # 50
610 610 ui.write(b': ')
611 611 ui.write(b' ' * (max(0, width - len(label))))
612 612 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
613 613 ui.write(b'\n')
614 614
615 615
616 616 @command(b'debugcreatestreamclonebundle', [], b'FILE')
617 617 def debugcreatestreamclonebundle(ui, repo, fname):
618 618 """create a stream clone bundle file
619 619
620 620 Stream bundles are special bundles that are essentially archives of
621 621 revlog files. They are commonly used for cloning very quickly.
622 622 """
623 623 # TODO we may want to turn this into an abort when this functionality
624 624 # is moved into `hg bundle`.
625 625 if phases.hassecret(repo):
626 626 ui.warn(
627 627 _(
628 628 b'(warning: stream clone bundle will contain secret '
629 629 b'revisions)\n'
630 630 )
631 631 )
632 632
633 633 requirements, gen = streamclone.generatebundlev1(repo)
634 634 changegroup.writechunks(ui, gen, fname)
635 635
636 636 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
637 637
638 638
639 639 @command(
640 640 b'debugdag',
641 641 [
642 642 (b't', b'tags', None, _(b'use tags as labels')),
643 643 (b'b', b'branches', None, _(b'annotate with branch names')),
644 644 (b'', b'dots', None, _(b'use dots for runs')),
645 645 (b's', b'spaces', None, _(b'separate elements by spaces')),
646 646 ],
647 647 _(b'[OPTION]... [FILE [REV]...]'),
648 648 optionalrepo=True,
649 649 )
650 650 def debugdag(ui, repo, file_=None, *revs, **opts):
651 651 """format the changelog or an index DAG as a concise textual description
652 652
653 653 If you pass a revlog index, the revlog's DAG is emitted. If you list
654 654 revision numbers, they get labeled in the output as rN.
655 655
656 656 Otherwise, the changelog DAG of the current repo is emitted.
657 657 """
658 658 spaces = opts.get('spaces')
659 659 dots = opts.get('dots')
660 660 if file_:
661 661 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
662 662 revs = {int(r) for r in revs}
663 663
664 664 def events():
665 665 for r in rlog:
666 666 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
667 667 if r in revs:
668 668 yield b'l', (r, b"r%i" % r)
669 669
670 670 elif repo:
671 671 cl = repo.changelog
672 672 tags = opts.get('tags')
673 673 branches = opts.get('branches')
674 674 if tags:
675 675 labels = {}
676 676 for l, n in repo.tags().items():
677 677 labels.setdefault(cl.rev(n), []).append(l)
678 678
679 679 def events():
680 680 b = b"default"
681 681 for r in cl:
682 682 if branches:
683 683 newb = cl.read(cl.node(r))[5][b'branch']
684 684 if newb != b:
685 685 yield b'a', newb
686 686 b = newb
687 687 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
688 688 if tags:
689 689 ls = labels.get(r)
690 690 if ls:
691 691 for l in ls:
692 692 yield b'l', (r, l)
693 693
694 694 else:
695 695 raise error.Abort(_(b'need repo for changelog dag'))
696 696
697 697 for line in dagparser.dagtextlines(
698 698 events(),
699 699 addspaces=spaces,
700 700 wraplabels=True,
701 701 wrapannotations=True,
702 702 wrapnonlinear=dots,
703 703 usedots=dots,
704 704 maxlinewidth=70,
705 705 ):
706 706 ui.write(line)
707 707 ui.write(b"\n")
708 708
709 709
710 710 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
711 711 def debugdata(ui, repo, file_, rev=None, **opts):
712 712 """dump the contents of a data file revision"""
713 713 opts = pycompat.byteskwargs(opts)
714 714 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
715 715 if rev is not None:
716 716 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
717 717 file_, rev = None, file_
718 718 elif rev is None:
719 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 721 try:
722 722 ui.write(r.rawdata(r.lookup(rev)))
723 723 except KeyError:
724 724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 725
726 726
727 727 @command(
728 728 b'debugdate',
729 729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 730 _(b'[-e] DATE [RANGE]'),
731 731 norepo=True,
732 732 optionalrepo=True,
733 733 )
734 734 def debugdate(ui, date, range=None, **opts):
735 735 """parse and display a date"""
736 736 if opts["extended"]:
737 737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 738 else:
739 739 d = dateutil.parsedate(date)
740 740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 742 if range:
743 743 m = dateutil.matchdate(range)
744 744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 745
746 746
747 747 @command(
748 748 b'debugdeltachain',
749 749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 750 _(b'-c|-m|FILE'),
751 751 optionalrepo=True,
752 752 )
753 753 def debugdeltachain(ui, repo, file_=None, **opts):
754 754 """dump information about delta chains in a revlog
755 755
756 756 Output can be templatized. Available template keywords are:
757 757
758 758 :``rev``: revision number
759 759 :``chainid``: delta chain identifier (numbered by unique base)
760 760 :``chainlen``: delta chain length to this revision
761 761 :``prevrev``: previous revision in delta chain
762 762 :``deltatype``: role of delta / how it was computed
763 763 :``compsize``: compressed size of revision
764 764 :``uncompsize``: uncompressed size of revision
765 765 :``chainsize``: total size of compressed revisions in chain
766 766 :``chainratio``: total chain size divided by uncompressed revision size
767 767 (new delta chains typically start at ratio 2.00)
768 768 :``lindist``: linear distance from base revision in delta chain to end
769 769 of this revision
770 770 :``extradist``: total size of revisions not part of this delta chain from
771 771 base of delta chain to end of this revision; a measurement
772 772 of how much extra data we need to read/seek across to read
773 773 the delta chain for this revision
774 774 :``extraratio``: extradist divided by chainsize; another representation of
775 775 how much unrelated data is needed to load this delta chain
776 776
777 777 If the repository is configured to use the sparse read, additional keywords
778 778 are available:
779 779
780 780 :``readsize``: total size of data read from the disk for a revision
781 781 (sum of the sizes of all the blocks)
782 782 :``largestblock``: size of the largest block of data read from the disk
783 783 :``readdensity``: density of useful bytes in the data read from the disk
784 784 :``srchunks``: in how many data hunks the whole revision would be read
785 785
786 786 The sparse read can be enabled with experimental.sparse-read = True
787 787 """
788 788 opts = pycompat.byteskwargs(opts)
789 789 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
790 790 index = r.index
791 791 start = r.start
792 792 length = r.length
793 793 generaldelta = r._generaldelta
794 794 withsparseread = getattr(r, '_withsparseread', False)
795 795
796 796 def revinfo(rev):
797 797 e = index[rev]
798 798 compsize = e[1]
799 799 uncompsize = e[2]
800 800 chainsize = 0
801 801
802 802 if generaldelta:
803 803 if e[3] == e[5]:
804 804 deltatype = b'p1'
805 805 elif e[3] == e[6]:
806 806 deltatype = b'p2'
807 807 elif e[3] == rev - 1:
808 808 deltatype = b'prev'
809 809 elif e[3] == rev:
810 810 deltatype = b'base'
811 811 else:
812 812 deltatype = b'other'
813 813 else:
814 814 if e[3] == rev:
815 815 deltatype = b'base'
816 816 else:
817 817 deltatype = b'prev'
818 818
819 819 chain = r._deltachain(rev)[0]
820 820 for iterrev in chain:
821 821 e = index[iterrev]
822 822 chainsize += e[1]
823 823
824 824 return compsize, uncompsize, deltatype, chain, chainsize
825 825
826 826 fm = ui.formatter(b'debugdeltachain', opts)
827 827
828 828 fm.plain(
829 829 b' rev chain# chainlen prev delta '
830 830 b'size rawsize chainsize ratio lindist extradist '
831 831 b'extraratio'
832 832 )
833 833 if withsparseread:
834 834 fm.plain(b' readsize largestblk rddensity srchunks')
835 835 fm.plain(b'\n')
836 836
837 837 chainbases = {}
838 838 for rev in r:
839 839 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
840 840 chainbase = chain[0]
841 841 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
842 842 basestart = start(chainbase)
843 843 revstart = start(rev)
844 844 lineardist = revstart + comp - basestart
845 845 extradist = lineardist - chainsize
846 846 try:
847 847 prevrev = chain[-2]
848 848 except IndexError:
849 849 prevrev = -1
850 850
851 851 if uncomp != 0:
852 852 chainratio = float(chainsize) / float(uncomp)
853 853 else:
854 854 chainratio = chainsize
855 855
856 856 if chainsize != 0:
857 857 extraratio = float(extradist) / float(chainsize)
858 858 else:
859 859 extraratio = extradist
860 860
861 861 fm.startitem()
862 862 fm.write(
863 863 b'rev chainid chainlen prevrev deltatype compsize '
864 864 b'uncompsize chainsize chainratio lindist extradist '
865 865 b'extraratio',
866 866 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
867 867 rev,
868 868 chainid,
869 869 len(chain),
870 870 prevrev,
871 871 deltatype,
872 872 comp,
873 873 uncomp,
874 874 chainsize,
875 875 chainratio,
876 876 lineardist,
877 877 extradist,
878 878 extraratio,
879 879 rev=rev,
880 880 chainid=chainid,
881 881 chainlen=len(chain),
882 882 prevrev=prevrev,
883 883 deltatype=deltatype,
884 884 compsize=comp,
885 885 uncompsize=uncomp,
886 886 chainsize=chainsize,
887 887 chainratio=chainratio,
888 888 lindist=lineardist,
889 889 extradist=extradist,
890 890 extraratio=extraratio,
891 891 )
892 892 if withsparseread:
893 893 readsize = 0
894 894 largestblock = 0
895 895 srchunks = 0
896 896
897 897 for revschunk in deltautil.slicechunk(r, chain):
898 898 srchunks += 1
899 899 blkend = start(revschunk[-1]) + length(revschunk[-1])
900 900 blksize = blkend - start(revschunk[0])
901 901
902 902 readsize += blksize
903 903 if largestblock < blksize:
904 904 largestblock = blksize
905 905
906 906 if readsize:
907 907 readdensity = float(chainsize) / float(readsize)
908 908 else:
909 909 readdensity = 1
910 910
911 911 fm.write(
912 912 b'readsize largestblock readdensity srchunks',
913 913 b' %10d %10d %9.5f %8d',
914 914 readsize,
915 915 largestblock,
916 916 readdensity,
917 917 srchunks,
918 918 readsize=readsize,
919 919 largestblock=largestblock,
920 920 readdensity=readdensity,
921 921 srchunks=srchunks,
922 922 )
923 923
924 924 fm.plain(b'\n')
925 925
926 926 fm.end()
927 927
928 928
929 929 @command(
930 930 b'debugdirstate|debugstate',
931 931 [
932 932 (
933 933 b'',
934 934 b'nodates',
935 935 None,
936 936 _(b'do not display the saved mtime (DEPRECATED)'),
937 937 ),
938 938 (b'', b'dates', True, _(b'display the saved mtime')),
939 939 (b'', b'datesort', None, _(b'sort by saved mtime')),
940 940 (
941 941 b'',
942 942 b'all',
943 943 False,
944 944 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
945 945 ),
946 946 ],
947 947 _(b'[OPTION]...'),
948 948 )
949 949 def debugstate(ui, repo, **opts):
950 950 """show the contents of the current dirstate"""
951 951
952 952 nodates = not opts['dates']
953 953 if opts.get('nodates') is not None:
954 954 nodates = True
955 955 datesort = opts.get('datesort')
956 956
957 957 if datesort:
958 958
959 959 def keyfunc(entry):
960 960 filename, _state, _mode, _size, mtime = entry
961 961 return (mtime, filename)
962 962
963 963 else:
964 964 keyfunc = None # sort by filename
965 965 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
966 966 entries.sort(key=keyfunc)
967 967 for entry in entries:
968 968 filename, state, mode, size, mtime = entry
969 969 if mtime == -1:
970 970 timestr = b'unset '
971 971 elif nodates:
972 972 timestr = b'set '
973 973 else:
974 974 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
975 975 timestr = encoding.strtolocal(timestr)
976 976 if mode & 0o20000:
977 977 mode = b'lnk'
978 978 else:
979 979 mode = b'%3o' % (mode & 0o777 & ~util.umask)
980 980 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
981 981 for f in repo.dirstate.copies():
982 982 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
983 983
984 984
985 985 @command(
986 986 b'debugdirstateignorepatternshash',
987 987 [],
988 988 _(b''),
989 989 )
990 990 def debugdirstateignorepatternshash(ui, repo, **opts):
991 991 """show the hash of ignore patterns stored in dirstate if v2,
992 992 or nothing for dirstate-v2
993 993 """
994 994 if repo.dirstate._use_dirstate_v2:
995 995 docket = repo.dirstate._map.docket
996 996 hash_len = 20 # 160 bits for SHA-1
997 997 hash_bytes = docket.tree_metadata[-hash_len:]
998 998 ui.write(binascii.hexlify(hash_bytes) + b'\n')
999 999
1000 1000
1001 1001 @command(
1002 1002 b'debugdiscovery',
1003 1003 [
1004 1004 (b'', b'old', None, _(b'use old-style discovery')),
1005 1005 (
1006 1006 b'',
1007 1007 b'nonheads',
1008 1008 None,
1009 1009 _(b'use old-style discovery with non-heads included'),
1010 1010 ),
1011 1011 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1012 1012 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1013 1013 (
1014 1014 b'',
1015 1015 b'local-as-revs',
1016 1016 b"",
1017 1017 b'treat local has having these revisions only',
1018 1018 ),
1019 1019 (
1020 1020 b'',
1021 1021 b'remote-as-revs',
1022 1022 b"",
1023 b'use local as remote, with only these these revisions',
1023 b'use local as remote, with only these revisions',
1024 1024 ),
1025 1025 ]
1026 1026 + cmdutil.remoteopts
1027 1027 + cmdutil.formatteropts,
1028 1028 _(b'[--rev REV] [OTHER]'),
1029 1029 )
1030 1030 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1031 1031 """runs the changeset discovery protocol in isolation
1032 1032
1033 1033 The local peer can be "replaced" by a subset of the local repository by
1034 1034 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1035 1035 be "replaced" by a subset of the local repository using the
1036 1036 `--local-as-revs` flag. This is useful to efficiently debug pathological
1037 1037 discovery situation.
1038 1038
1039 1039 The following developer oriented config are relevant for people playing with this command:
1040 1040
1041 1041 * devel.discovery.exchange-heads=True
1042 1042
1043 1043 If False, the discovery will not start with
1044 1044 remote head fetching and local head querying.
1045 1045
1046 1046 * devel.discovery.grow-sample=True
1047 1047
1048 1048 If False, the sample size used in set discovery will not be increased
1049 1049 through the process
1050 1050
1051 1051 * devel.discovery.grow-sample.dynamic=True
1052 1052
1053 1053 When discovery.grow-sample.dynamic is True, the default, the sample size is
1054 1054 adapted to the shape of the undecided set (it is set to the max of:
1055 1055 <target-size>, len(roots(undecided)), len(heads(undecided)
1056 1056
1057 1057 * devel.discovery.grow-sample.rate=1.05
1058 1058
1059 1059 the rate at which the sample grow
1060 1060
1061 1061 * devel.discovery.randomize=True
1062 1062
1063 1063 If andom sampling during discovery are deterministic. It is meant for
1064 1064 integration tests.
1065 1065
1066 1066 * devel.discovery.sample-size=200
1067 1067
1068 1068 Control the initial size of the discovery sample
1069 1069
1070 1070 * devel.discovery.sample-size.initial=100
1071 1071
1072 1072 Control the initial size of the discovery for initial change
1073 1073 """
1074 1074 opts = pycompat.byteskwargs(opts)
1075 1075 unfi = repo.unfiltered()
1076 1076
1077 1077 # setup potential extra filtering
1078 1078 local_revs = opts[b"local_as_revs"]
1079 1079 remote_revs = opts[b"remote_as_revs"]
1080 1080
1081 1081 # make sure tests are repeatable
1082 1082 random.seed(int(opts[b'seed']))
1083 1083
1084 1084 if not remote_revs:
1085 1085
1086 1086 remoteurl, branches = urlutil.get_unique_pull_path(
1087 1087 b'debugdiscovery', repo, ui, remoteurl
1088 1088 )
1089 1089 remote = hg.peer(repo, opts, remoteurl)
1090 1090 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1091 1091 else:
1092 1092 branches = (None, [])
1093 1093 remote_filtered_revs = logcmdutil.revrange(
1094 1094 unfi, [b"not (::(%s))" % remote_revs]
1095 1095 )
1096 1096 remote_filtered_revs = frozenset(remote_filtered_revs)
1097 1097
1098 1098 def remote_func(x):
1099 1099 return remote_filtered_revs
1100 1100
1101 1101 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1102 1102
1103 1103 remote = repo.peer()
1104 1104 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1105 1105
1106 1106 if local_revs:
1107 1107 local_filtered_revs = logcmdutil.revrange(
1108 1108 unfi, [b"not (::(%s))" % local_revs]
1109 1109 )
1110 1110 local_filtered_revs = frozenset(local_filtered_revs)
1111 1111
1112 1112 def local_func(x):
1113 1113 return local_filtered_revs
1114 1114
1115 1115 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1116 1116 repo = repo.filtered(b'debug-discovery-local-filter')
1117 1117
1118 1118 data = {}
1119 1119 if opts.get(b'old'):
1120 1120
1121 1121 def doit(pushedrevs, remoteheads, remote=remote):
1122 1122 if not util.safehasattr(remote, b'branches'):
1123 1123 # enable in-client legacy support
1124 1124 remote = localrepo.locallegacypeer(remote.local())
1125 1125 common, _in, hds = treediscovery.findcommonincoming(
1126 1126 repo, remote, force=True, audit=data
1127 1127 )
1128 1128 common = set(common)
1129 1129 if not opts.get(b'nonheads'):
1130 1130 ui.writenoi18n(
1131 1131 b"unpruned common: %s\n"
1132 1132 % b" ".join(sorted(short(n) for n in common))
1133 1133 )
1134 1134
1135 1135 clnode = repo.changelog.node
1136 1136 common = repo.revs(b'heads(::%ln)', common)
1137 1137 common = {clnode(r) for r in common}
1138 1138 return common, hds
1139 1139
1140 1140 else:
1141 1141
1142 1142 def doit(pushedrevs, remoteheads, remote=remote):
1143 1143 nodes = None
1144 1144 if pushedrevs:
1145 1145 revs = logcmdutil.revrange(repo, pushedrevs)
1146 1146 nodes = [repo[r].node() for r in revs]
1147 1147 common, any, hds = setdiscovery.findcommonheads(
1148 1148 ui, repo, remote, ancestorsof=nodes, audit=data
1149 1149 )
1150 1150 return common, hds
1151 1151
1152 1152 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1153 1153 localrevs = opts[b'rev']
1154 1154
1155 1155 fm = ui.formatter(b'debugdiscovery', opts)
1156 1156 if fm.strict_format:
1157 1157
1158 1158 @contextlib.contextmanager
1159 1159 def may_capture_output():
1160 1160 ui.pushbuffer()
1161 1161 yield
1162 1162 data[b'output'] = ui.popbuffer()
1163 1163
1164 1164 else:
1165 1165 may_capture_output = util.nullcontextmanager
1166 1166 with may_capture_output():
1167 1167 with util.timedcm('debug-discovery') as t:
1168 1168 common, hds = doit(localrevs, remoterevs)
1169 1169
1170 1170 # compute all statistics
1171 1171 heads_common = set(common)
1172 1172 heads_remote = set(hds)
1173 1173 heads_local = set(repo.heads())
1174 1174 # note: they cannot be a local or remote head that is in common and not
1175 1175 # itself a head of common.
1176 1176 heads_common_local = heads_common & heads_local
1177 1177 heads_common_remote = heads_common & heads_remote
1178 1178 heads_common_both = heads_common & heads_remote & heads_local
1179 1179
1180 1180 all = repo.revs(b'all()')
1181 1181 common = repo.revs(b'::%ln', common)
1182 1182 roots_common = repo.revs(b'roots(::%ld)', common)
1183 1183 missing = repo.revs(b'not ::%ld', common)
1184 1184 heads_missing = repo.revs(b'heads(%ld)', missing)
1185 1185 roots_missing = repo.revs(b'roots(%ld)', missing)
1186 1186 assert len(common) + len(missing) == len(all)
1187 1187
1188 1188 initial_undecided = repo.revs(
1189 1189 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1190 1190 )
1191 1191 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1192 1192 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1193 1193 common_initial_undecided = initial_undecided & common
1194 1194 missing_initial_undecided = initial_undecided & missing
1195 1195
1196 1196 data[b'elapsed'] = t.elapsed
1197 1197 data[b'nb-common-heads'] = len(heads_common)
1198 1198 data[b'nb-common-heads-local'] = len(heads_common_local)
1199 1199 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1200 1200 data[b'nb-common-heads-both'] = len(heads_common_both)
1201 1201 data[b'nb-common-roots'] = len(roots_common)
1202 1202 data[b'nb-head-local'] = len(heads_local)
1203 1203 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1204 1204 data[b'nb-head-remote'] = len(heads_remote)
1205 1205 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1206 1206 heads_common_remote
1207 1207 )
1208 1208 data[b'nb-revs'] = len(all)
1209 1209 data[b'nb-revs-common'] = len(common)
1210 1210 data[b'nb-revs-missing'] = len(missing)
1211 1211 data[b'nb-missing-heads'] = len(heads_missing)
1212 1212 data[b'nb-missing-roots'] = len(roots_missing)
1213 1213 data[b'nb-ini_und'] = len(initial_undecided)
1214 1214 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1215 1215 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1216 1216 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1217 1217 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1218 1218
1219 1219 fm.startitem()
1220 1220 fm.data(**pycompat.strkwargs(data))
1221 1221 # display discovery summary
1222 1222 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1223 1223 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1224 1224 fm.plain(b"heads summary:\n")
1225 1225 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1226 1226 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1227 1227 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1228 1228 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1229 1229 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1230 1230 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1231 1231 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1232 1232 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1233 1233 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1234 1234 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1235 1235 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1236 1236 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1237 1237 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1238 1238 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1239 1239 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1240 1240 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1241 1241 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1242 1242 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1243 1243 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1244 1244 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1245 1245 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1246 1246 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1247 1247
1248 1248 if ui.verbose:
1249 1249 fm.plain(
1250 1250 b"common heads: %s\n"
1251 1251 % b" ".join(sorted(short(n) for n in heads_common))
1252 1252 )
1253 1253 fm.end()
1254 1254
1255 1255
1256 1256 _chunksize = 4 << 10
1257 1257
1258 1258
1259 1259 @command(
1260 1260 b'debugdownload',
1261 1261 [
1262 1262 (b'o', b'output', b'', _(b'path')),
1263 1263 ],
1264 1264 optionalrepo=True,
1265 1265 )
1266 1266 def debugdownload(ui, repo, url, output=None, **opts):
1267 1267 """download a resource using Mercurial logic and config"""
1268 1268 fh = urlmod.open(ui, url, output)
1269 1269
1270 1270 dest = ui
1271 1271 if output:
1272 1272 dest = open(output, b"wb", _chunksize)
1273 1273 try:
1274 1274 data = fh.read(_chunksize)
1275 1275 while data:
1276 1276 dest.write(data)
1277 1277 data = fh.read(_chunksize)
1278 1278 finally:
1279 1279 if output:
1280 1280 dest.close()
1281 1281
1282 1282
1283 1283 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1284 1284 def debugextensions(ui, repo, **opts):
1285 1285 '''show information about active extensions'''
1286 1286 opts = pycompat.byteskwargs(opts)
1287 1287 exts = extensions.extensions(ui)
1288 1288 hgver = util.version()
1289 1289 fm = ui.formatter(b'debugextensions', opts)
1290 1290 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1291 1291 isinternal = extensions.ismoduleinternal(extmod)
1292 1292 extsource = None
1293 1293
1294 1294 if util.safehasattr(extmod, '__file__'):
1295 1295 extsource = pycompat.fsencode(extmod.__file__)
1296 1296 elif getattr(sys, 'oxidized', False):
1297 1297 extsource = pycompat.sysexecutable
1298 1298 if isinternal:
1299 1299 exttestedwith = [] # never expose magic string to users
1300 1300 else:
1301 1301 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1302 1302 extbuglink = getattr(extmod, 'buglink', None)
1303 1303
1304 1304 fm.startitem()
1305 1305
1306 1306 if ui.quiet or ui.verbose:
1307 1307 fm.write(b'name', b'%s\n', extname)
1308 1308 else:
1309 1309 fm.write(b'name', b'%s', extname)
1310 1310 if isinternal or hgver in exttestedwith:
1311 1311 fm.plain(b'\n')
1312 1312 elif not exttestedwith:
1313 1313 fm.plain(_(b' (untested!)\n'))
1314 1314 else:
1315 1315 lasttestedversion = exttestedwith[-1]
1316 1316 fm.plain(b' (%s!)\n' % lasttestedversion)
1317 1317
1318 1318 fm.condwrite(
1319 1319 ui.verbose and extsource,
1320 1320 b'source',
1321 1321 _(b' location: %s\n'),
1322 1322 extsource or b"",
1323 1323 )
1324 1324
1325 1325 if ui.verbose:
1326 1326 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1327 1327 fm.data(bundled=isinternal)
1328 1328
1329 1329 fm.condwrite(
1330 1330 ui.verbose and exttestedwith,
1331 1331 b'testedwith',
1332 1332 _(b' tested with: %s\n'),
1333 1333 fm.formatlist(exttestedwith, name=b'ver'),
1334 1334 )
1335 1335
1336 1336 fm.condwrite(
1337 1337 ui.verbose and extbuglink,
1338 1338 b'buglink',
1339 1339 _(b' bug reporting: %s\n'),
1340 1340 extbuglink or b"",
1341 1341 )
1342 1342
1343 1343 fm.end()
1344 1344
1345 1345
1346 1346 @command(
1347 1347 b'debugfileset',
1348 1348 [
1349 1349 (
1350 1350 b'r',
1351 1351 b'rev',
1352 1352 b'',
1353 1353 _(b'apply the filespec on this revision'),
1354 1354 _(b'REV'),
1355 1355 ),
1356 1356 (
1357 1357 b'',
1358 1358 b'all-files',
1359 1359 False,
1360 1360 _(b'test files from all revisions and working directory'),
1361 1361 ),
1362 1362 (
1363 1363 b's',
1364 1364 b'show-matcher',
1365 1365 None,
1366 1366 _(b'print internal representation of matcher'),
1367 1367 ),
1368 1368 (
1369 1369 b'p',
1370 1370 b'show-stage',
1371 1371 [],
1372 1372 _(b'print parsed tree at the given stage'),
1373 1373 _(b'NAME'),
1374 1374 ),
1375 1375 ],
1376 1376 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1377 1377 )
1378 1378 def debugfileset(ui, repo, expr, **opts):
1379 1379 '''parse and apply a fileset specification'''
1380 1380 from . import fileset
1381 1381
1382 1382 fileset.symbols # force import of fileset so we have predicates to optimize
1383 1383 opts = pycompat.byteskwargs(opts)
1384 1384 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1385 1385
1386 1386 stages = [
1387 1387 (b'parsed', pycompat.identity),
1388 1388 (b'analyzed', filesetlang.analyze),
1389 1389 (b'optimized', filesetlang.optimize),
1390 1390 ]
1391 1391 stagenames = {n for n, f in stages}
1392 1392
1393 1393 showalways = set()
1394 1394 if ui.verbose and not opts[b'show_stage']:
1395 1395 # show parsed tree by --verbose (deprecated)
1396 1396 showalways.add(b'parsed')
1397 1397 if opts[b'show_stage'] == [b'all']:
1398 1398 showalways.update(stagenames)
1399 1399 else:
1400 1400 for n in opts[b'show_stage']:
1401 1401 if n not in stagenames:
1402 1402 raise error.Abort(_(b'invalid stage name: %s') % n)
1403 1403 showalways.update(opts[b'show_stage'])
1404 1404
1405 1405 tree = filesetlang.parse(expr)
1406 1406 for n, f in stages:
1407 1407 tree = f(tree)
1408 1408 if n in showalways:
1409 1409 if opts[b'show_stage'] or n != b'parsed':
1410 1410 ui.write(b"* %s:\n" % n)
1411 1411 ui.write(filesetlang.prettyformat(tree), b"\n")
1412 1412
1413 1413 files = set()
1414 1414 if opts[b'all_files']:
1415 1415 for r in repo:
1416 1416 c = repo[r]
1417 1417 files.update(c.files())
1418 1418 files.update(c.substate)
1419 1419 if opts[b'all_files'] or ctx.rev() is None:
1420 1420 wctx = repo[None]
1421 1421 files.update(
1422 1422 repo.dirstate.walk(
1423 1423 scmutil.matchall(repo),
1424 1424 subrepos=list(wctx.substate),
1425 1425 unknown=True,
1426 1426 ignored=True,
1427 1427 )
1428 1428 )
1429 1429 files.update(wctx.substate)
1430 1430 else:
1431 1431 files.update(ctx.files())
1432 1432 files.update(ctx.substate)
1433 1433
1434 1434 m = ctx.matchfileset(repo.getcwd(), expr)
1435 1435 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1436 1436 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1437 1437 for f in sorted(files):
1438 1438 if not m(f):
1439 1439 continue
1440 1440 ui.write(b"%s\n" % f)
1441 1441
1442 1442
1443 1443 @command(
1444 1444 b"debug-repair-issue6528",
1445 1445 [
1446 1446 (
1447 1447 b'',
1448 1448 b'to-report',
1449 1449 b'',
1450 1450 _(b'build a report of affected revisions to this file'),
1451 1451 _(b'FILE'),
1452 1452 ),
1453 1453 (
1454 1454 b'',
1455 1455 b'from-report',
1456 1456 b'',
1457 1457 _(b'repair revisions listed in this report file'),
1458 1458 _(b'FILE'),
1459 1459 ),
1460 1460 (
1461 1461 b'',
1462 1462 b'paranoid',
1463 1463 False,
1464 1464 _(b'check that both detection methods do the same thing'),
1465 1465 ),
1466 1466 ]
1467 1467 + cmdutil.dryrunopts,
1468 1468 )
1469 1469 def debug_repair_issue6528(ui, repo, **opts):
1470 1470 """find affected revisions and repair them. See issue6528 for more details.
1471 1471
1472 1472 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1473 1473 computation of affected revisions for a given repository across clones.
1474 1474 The report format is line-based (with empty lines ignored):
1475 1475
1476 1476 ```
1477 1477 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1478 1478 ```
1479 1479
1480 1480 There can be multiple broken revisions per filelog, they are separated by
1481 1481 a comma with no spaces. The only space is between the revision(s) and the
1482 1482 filename.
1483 1483
1484 1484 Note that this does *not* mean that this repairs future affected revisions,
1485 1485 that needs a separate fix at the exchange level that was introduced in
1486 1486 Mercurial 5.9.1.
1487 1487
1488 1488 There is a `--paranoid` flag to test that the fast implementation is correct
1489 1489 by checking it against the slow implementation. Since this matter is quite
1490 1490 urgent and testing every edge-case is probably quite costly, we use this
1491 1491 method to test on large repositories as a fuzzing method of sorts.
1492 1492 """
1493 1493 cmdutil.check_incompatible_arguments(
1494 1494 opts, 'to_report', ['from_report', 'dry_run']
1495 1495 )
1496 1496 dry_run = opts.get('dry_run')
1497 1497 to_report = opts.get('to_report')
1498 1498 from_report = opts.get('from_report')
1499 1499 paranoid = opts.get('paranoid')
1500 1500 # TODO maybe add filelog pattern and revision pattern parameters to help
1501 1501 # narrow down the search for users that know what they're looking for?
1502 1502
1503 1503 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1504 1504 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1505 1505 raise error.Abort(_(msg))
1506 1506
1507 1507 rewrite.repair_issue6528(
1508 1508 ui,
1509 1509 repo,
1510 1510 dry_run=dry_run,
1511 1511 to_report=to_report,
1512 1512 from_report=from_report,
1513 1513 paranoid=paranoid,
1514 1514 )
1515 1515
1516 1516
1517 1517 @command(b'debugformat', [] + cmdutil.formatteropts)
1518 1518 def debugformat(ui, repo, **opts):
1519 1519 """display format information about the current repository
1520 1520
1521 1521 Use --verbose to get extra information about current config value and
1522 1522 Mercurial default."""
1523 1523 opts = pycompat.byteskwargs(opts)
1524 1524 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1525 1525 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1526 1526
1527 1527 def makeformatname(name):
1528 1528 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1529 1529
1530 1530 fm = ui.formatter(b'debugformat', opts)
1531 1531 if fm.isplain():
1532 1532
1533 1533 def formatvalue(value):
1534 1534 if util.safehasattr(value, b'startswith'):
1535 1535 return value
1536 1536 if value:
1537 1537 return b'yes'
1538 1538 else:
1539 1539 return b'no'
1540 1540
1541 1541 else:
1542 1542 formatvalue = pycompat.identity
1543 1543
1544 1544 fm.plain(b'format-variant')
1545 1545 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1546 1546 fm.plain(b' repo')
1547 1547 if ui.verbose:
1548 1548 fm.plain(b' config default')
1549 1549 fm.plain(b'\n')
1550 1550 for fv in upgrade.allformatvariant:
1551 1551 fm.startitem()
1552 1552 repovalue = fv.fromrepo(repo)
1553 1553 configvalue = fv.fromconfig(repo)
1554 1554
1555 1555 if repovalue != configvalue:
1556 1556 namelabel = b'formatvariant.name.mismatchconfig'
1557 1557 repolabel = b'formatvariant.repo.mismatchconfig'
1558 1558 elif repovalue != fv.default:
1559 1559 namelabel = b'formatvariant.name.mismatchdefault'
1560 1560 repolabel = b'formatvariant.repo.mismatchdefault'
1561 1561 else:
1562 1562 namelabel = b'formatvariant.name.uptodate'
1563 1563 repolabel = b'formatvariant.repo.uptodate'
1564 1564
1565 1565 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1566 1566 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1567 1567 if fv.default != configvalue:
1568 1568 configlabel = b'formatvariant.config.special'
1569 1569 else:
1570 1570 configlabel = b'formatvariant.config.default'
1571 1571 fm.condwrite(
1572 1572 ui.verbose,
1573 1573 b'config',
1574 1574 b' %6s',
1575 1575 formatvalue(configvalue),
1576 1576 label=configlabel,
1577 1577 )
1578 1578 fm.condwrite(
1579 1579 ui.verbose,
1580 1580 b'default',
1581 1581 b' %7s',
1582 1582 formatvalue(fv.default),
1583 1583 label=b'formatvariant.default',
1584 1584 )
1585 1585 fm.plain(b'\n')
1586 1586 fm.end()
1587 1587
1588 1588
1589 1589 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1590 1590 def debugfsinfo(ui, path=b"."):
1591 1591 """show information detected about current filesystem"""
1592 1592 ui.writenoi18n(b'path: %s\n' % path)
1593 1593 ui.writenoi18n(
1594 1594 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1595 1595 )
1596 1596 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1597 1597 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1598 1598 ui.writenoi18n(
1599 1599 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1600 1600 )
1601 1601 ui.writenoi18n(
1602 1602 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1603 1603 )
1604 1604 casesensitive = b'(unknown)'
1605 1605 try:
1606 1606 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1607 1607 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1608 1608 except OSError:
1609 1609 pass
1610 1610 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1611 1611
1612 1612
1613 1613 @command(
1614 1614 b'debuggetbundle',
1615 1615 [
1616 1616 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1617 1617 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1618 1618 (
1619 1619 b't',
1620 1620 b'type',
1621 1621 b'bzip2',
1622 1622 _(b'bundle compression type to use'),
1623 1623 _(b'TYPE'),
1624 1624 ),
1625 1625 ],
1626 1626 _(b'REPO FILE [-H|-C ID]...'),
1627 1627 norepo=True,
1628 1628 )
1629 1629 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1630 1630 """retrieves a bundle from a repo
1631 1631
1632 1632 Every ID must be a full-length hex node id string. Saves the bundle to the
1633 1633 given file.
1634 1634 """
1635 1635 opts = pycompat.byteskwargs(opts)
1636 1636 repo = hg.peer(ui, opts, repopath)
1637 1637 if not repo.capable(b'getbundle'):
1638 1638 raise error.Abort(b"getbundle() not supported by target repository")
1639 1639 args = {}
1640 1640 if common:
1641 1641 args['common'] = [bin(s) for s in common]
1642 1642 if head:
1643 1643 args['heads'] = [bin(s) for s in head]
1644 1644 # TODO: get desired bundlecaps from command line.
1645 1645 args['bundlecaps'] = None
1646 1646 bundle = repo.getbundle(b'debug', **args)
1647 1647
1648 1648 bundletype = opts.get(b'type', b'bzip2').lower()
1649 1649 btypes = {
1650 1650 b'none': b'HG10UN',
1651 1651 b'bzip2': b'HG10BZ',
1652 1652 b'gzip': b'HG10GZ',
1653 1653 b'bundle2': b'HG20',
1654 1654 }
1655 1655 bundletype = btypes.get(bundletype)
1656 1656 if bundletype not in bundle2.bundletypes:
1657 1657 raise error.Abort(_(b'unknown bundle type specified with --type'))
1658 1658 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1659 1659
1660 1660
1661 1661 @command(b'debugignore', [], b'[FILE]')
1662 1662 def debugignore(ui, repo, *files, **opts):
1663 1663 """display the combined ignore pattern and information about ignored files
1664 1664
1665 1665 With no argument display the combined ignore pattern.
1666 1666
1667 1667 Given space separated file names, shows if the given file is ignored and
1668 1668 if so, show the ignore rule (file and line number) that matched it.
1669 1669 """
1670 1670 ignore = repo.dirstate._ignore
1671 1671 if not files:
1672 1672 # Show all the patterns
1673 1673 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1674 1674 else:
1675 1675 m = scmutil.match(repo[None], pats=files)
1676 1676 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1677 1677 for f in m.files():
1678 1678 nf = util.normpath(f)
1679 1679 ignored = None
1680 1680 ignoredata = None
1681 1681 if nf != b'.':
1682 1682 if ignore(nf):
1683 1683 ignored = nf
1684 1684 ignoredata = repo.dirstate._ignorefileandline(nf)
1685 1685 else:
1686 1686 for p in pathutil.finddirs(nf):
1687 1687 if ignore(p):
1688 1688 ignored = p
1689 1689 ignoredata = repo.dirstate._ignorefileandline(p)
1690 1690 break
1691 1691 if ignored:
1692 1692 if ignored == nf:
1693 1693 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1694 1694 else:
1695 1695 ui.write(
1696 1696 _(
1697 1697 b"%s is ignored because of "
1698 1698 b"containing directory %s\n"
1699 1699 )
1700 1700 % (uipathfn(f), ignored)
1701 1701 )
1702 1702 ignorefile, lineno, line = ignoredata
1703 1703 ui.write(
1704 1704 _(b"(ignore rule in %s, line %d: '%s')\n")
1705 1705 % (ignorefile, lineno, line)
1706 1706 )
1707 1707 else:
1708 1708 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1709 1709
1710 1710
1711 1711 @command(
1712 1712 b'debugindex',
1713 1713 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1714 1714 _(b'-c|-m|FILE'),
1715 1715 )
1716 1716 def debugindex(ui, repo, file_=None, **opts):
1717 1717 """dump index data for a storage primitive"""
1718 1718 opts = pycompat.byteskwargs(opts)
1719 1719 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1720 1720
1721 1721 if ui.debugflag:
1722 1722 shortfn = hex
1723 1723 else:
1724 1724 shortfn = short
1725 1725
1726 1726 idlen = 12
1727 1727 for i in store:
1728 1728 idlen = len(shortfn(store.node(i)))
1729 1729 break
1730 1730
1731 1731 fm = ui.formatter(b'debugindex', opts)
1732 1732 fm.plain(
1733 1733 b' rev linkrev %s %s p2\n'
1734 1734 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1735 1735 )
1736 1736
1737 1737 for rev in store:
1738 1738 node = store.node(rev)
1739 1739 parents = store.parents(node)
1740 1740
1741 1741 fm.startitem()
1742 1742 fm.write(b'rev', b'%6d ', rev)
1743 1743 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1744 1744 fm.write(b'node', b'%s ', shortfn(node))
1745 1745 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1746 1746 fm.write(b'p2', b'%s', shortfn(parents[1]))
1747 1747 fm.plain(b'\n')
1748 1748
1749 1749 fm.end()
1750 1750
1751 1751
1752 1752 @command(
1753 1753 b'debugindexdot',
1754 1754 cmdutil.debugrevlogopts,
1755 1755 _(b'-c|-m|FILE'),
1756 1756 optionalrepo=True,
1757 1757 )
1758 1758 def debugindexdot(ui, repo, file_=None, **opts):
1759 1759 """dump an index DAG as a graphviz dot file"""
1760 1760 opts = pycompat.byteskwargs(opts)
1761 1761 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1762 1762 ui.writenoi18n(b"digraph G {\n")
1763 1763 for i in r:
1764 1764 node = r.node(i)
1765 1765 pp = r.parents(node)
1766 1766 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1767 1767 if pp[1] != repo.nullid:
1768 1768 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1769 1769 ui.write(b"}\n")
1770 1770
1771 1771
1772 1772 @command(b'debugindexstats', [])
1773 1773 def debugindexstats(ui, repo):
1774 1774 """show stats related to the changelog index"""
1775 1775 repo.changelog.shortest(repo.nullid, 1)
1776 1776 index = repo.changelog.index
1777 1777 if not util.safehasattr(index, b'stats'):
1778 1778 raise error.Abort(_(b'debugindexstats only works with native code'))
1779 1779 for k, v in sorted(index.stats().items()):
1780 1780 ui.write(b'%s: %d\n' % (k, v))
1781 1781
1782 1782
1783 1783 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1784 1784 def debuginstall(ui, **opts):
1785 1785 """test Mercurial installation
1786 1786
1787 1787 Returns 0 on success.
1788 1788 """
1789 1789 opts = pycompat.byteskwargs(opts)
1790 1790
1791 1791 problems = 0
1792 1792
1793 1793 fm = ui.formatter(b'debuginstall', opts)
1794 1794 fm.startitem()
1795 1795
1796 1796 # encoding might be unknown or wrong. don't translate these messages.
1797 1797 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1798 1798 err = None
1799 1799 try:
1800 1800 codecs.lookup(pycompat.sysstr(encoding.encoding))
1801 1801 except LookupError as inst:
1802 1802 err = stringutil.forcebytestr(inst)
1803 1803 problems += 1
1804 1804 fm.condwrite(
1805 1805 err,
1806 1806 b'encodingerror',
1807 1807 b" %s\n (check that your locale is properly set)\n",
1808 1808 err,
1809 1809 )
1810 1810
1811 1811 # Python
1812 1812 pythonlib = None
1813 1813 if util.safehasattr(os, '__file__'):
1814 1814 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1815 1815 elif getattr(sys, 'oxidized', False):
1816 1816 pythonlib = pycompat.sysexecutable
1817 1817
1818 1818 fm.write(
1819 1819 b'pythonexe',
1820 1820 _(b"checking Python executable (%s)\n"),
1821 1821 pycompat.sysexecutable or _(b"unknown"),
1822 1822 )
1823 1823 fm.write(
1824 1824 b'pythonimplementation',
1825 1825 _(b"checking Python implementation (%s)\n"),
1826 1826 pycompat.sysbytes(platform.python_implementation()),
1827 1827 )
1828 1828 fm.write(
1829 1829 b'pythonver',
1830 1830 _(b"checking Python version (%s)\n"),
1831 1831 (b"%d.%d.%d" % sys.version_info[:3]),
1832 1832 )
1833 1833 fm.write(
1834 1834 b'pythonlib',
1835 1835 _(b"checking Python lib (%s)...\n"),
1836 1836 pythonlib or _(b"unknown"),
1837 1837 )
1838 1838
1839 1839 try:
1840 1840 from . import rustext # pytype: disable=import-error
1841 1841
1842 1842 rustext.__doc__ # trigger lazy import
1843 1843 except ImportError:
1844 1844 rustext = None
1845 1845
1846 1846 security = set(sslutil.supportedprotocols)
1847 1847 if sslutil.hassni:
1848 1848 security.add(b'sni')
1849 1849
1850 1850 fm.write(
1851 1851 b'pythonsecurity',
1852 1852 _(b"checking Python security support (%s)\n"),
1853 1853 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1854 1854 )
1855 1855
1856 1856 # These are warnings, not errors. So don't increment problem count. This
1857 1857 # may change in the future.
1858 1858 if b'tls1.2' not in security:
1859 1859 fm.plain(
1860 1860 _(
1861 1861 b' TLS 1.2 not supported by Python install; '
1862 1862 b'network connections lack modern security\n'
1863 1863 )
1864 1864 )
1865 1865 if b'sni' not in security:
1866 1866 fm.plain(
1867 1867 _(
1868 1868 b' SNI not supported by Python install; may have '
1869 1869 b'connectivity issues with some servers\n'
1870 1870 )
1871 1871 )
1872 1872
1873 1873 fm.plain(
1874 1874 _(
1875 1875 b"checking Rust extensions (%s)\n"
1876 1876 % (b'missing' if rustext is None else b'installed')
1877 1877 ),
1878 1878 )
1879 1879
1880 1880 # TODO print CA cert info
1881 1881
1882 1882 # hg version
1883 1883 hgver = util.version()
1884 1884 fm.write(
1885 1885 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1886 1886 )
1887 1887 fm.write(
1888 1888 b'hgverextra',
1889 1889 _(b"checking Mercurial custom build (%s)\n"),
1890 1890 b'+'.join(hgver.split(b'+')[1:]),
1891 1891 )
1892 1892
1893 1893 # compiled modules
1894 1894 hgmodules = None
1895 1895 if util.safehasattr(sys.modules[__name__], '__file__'):
1896 1896 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1897 1897 elif getattr(sys, 'oxidized', False):
1898 1898 hgmodules = pycompat.sysexecutable
1899 1899
1900 1900 fm.write(
1901 1901 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1902 1902 )
1903 1903 fm.write(
1904 1904 b'hgmodules',
1905 1905 _(b"checking installed modules (%s)...\n"),
1906 1906 hgmodules or _(b"unknown"),
1907 1907 )
1908 1908
1909 1909 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1910 1910 rustext = rustandc # for now, that's the only case
1911 1911 cext = policy.policy in (b'c', b'allow') or rustandc
1912 1912 nopure = cext or rustext
1913 1913 if nopure:
1914 1914 err = None
1915 1915 try:
1916 1916 if cext:
1917 1917 from .cext import ( # pytype: disable=import-error
1918 1918 base85,
1919 1919 bdiff,
1920 1920 mpatch,
1921 1921 osutil,
1922 1922 )
1923 1923
1924 1924 # quiet pyflakes
1925 1925 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1926 1926 if rustext:
1927 1927 from .rustext import ( # pytype: disable=import-error
1928 1928 ancestor,
1929 1929 dirstate,
1930 1930 )
1931 1931
1932 1932 dir(ancestor), dir(dirstate) # quiet pyflakes
1933 1933 except Exception as inst:
1934 1934 err = stringutil.forcebytestr(inst)
1935 1935 problems += 1
1936 1936 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1937 1937
1938 1938 compengines = util.compengines._engines.values()
1939 1939 fm.write(
1940 1940 b'compengines',
1941 1941 _(b'checking registered compression engines (%s)\n'),
1942 1942 fm.formatlist(
1943 1943 sorted(e.name() for e in compengines),
1944 1944 name=b'compengine',
1945 1945 fmt=b'%s',
1946 1946 sep=b', ',
1947 1947 ),
1948 1948 )
1949 1949 fm.write(
1950 1950 b'compenginesavail',
1951 1951 _(b'checking available compression engines (%s)\n'),
1952 1952 fm.formatlist(
1953 1953 sorted(e.name() for e in compengines if e.available()),
1954 1954 name=b'compengine',
1955 1955 fmt=b'%s',
1956 1956 sep=b', ',
1957 1957 ),
1958 1958 )
1959 1959 wirecompengines = compression.compengines.supportedwireengines(
1960 1960 compression.SERVERROLE
1961 1961 )
1962 1962 fm.write(
1963 1963 b'compenginesserver',
1964 1964 _(
1965 1965 b'checking available compression engines '
1966 1966 b'for wire protocol (%s)\n'
1967 1967 ),
1968 1968 fm.formatlist(
1969 1969 [e.name() for e in wirecompengines if e.wireprotosupport()],
1970 1970 name=b'compengine',
1971 1971 fmt=b'%s',
1972 1972 sep=b', ',
1973 1973 ),
1974 1974 )
1975 1975 re2 = b'missing'
1976 1976 if util._re2:
1977 1977 re2 = b'available'
1978 1978 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1979 1979 fm.data(re2=bool(util._re2))
1980 1980
1981 1981 # templates
1982 1982 p = templater.templatedir()
1983 1983 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1984 1984 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1985 1985 if p:
1986 1986 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1987 1987 if m:
1988 1988 # template found, check if it is working
1989 1989 err = None
1990 1990 try:
1991 1991 templater.templater.frommapfile(m)
1992 1992 except Exception as inst:
1993 1993 err = stringutil.forcebytestr(inst)
1994 1994 p = None
1995 1995 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1996 1996 else:
1997 1997 p = None
1998 1998 fm.condwrite(
1999 1999 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2000 2000 )
2001 2001 fm.condwrite(
2002 2002 not m,
2003 2003 b'defaulttemplatenotfound',
2004 2004 _(b" template '%s' not found\n"),
2005 2005 b"default",
2006 2006 )
2007 2007 if not p:
2008 2008 problems += 1
2009 2009 fm.condwrite(
2010 2010 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2011 2011 )
2012 2012
2013 2013 # editor
2014 2014 editor = ui.geteditor()
2015 2015 editor = util.expandpath(editor)
2016 2016 editorbin = procutil.shellsplit(editor)[0]
2017 2017 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2018 2018 cmdpath = procutil.findexe(editorbin)
2019 2019 fm.condwrite(
2020 2020 not cmdpath and editor == b'vi',
2021 2021 b'vinotfound',
2022 2022 _(
2023 2023 b" No commit editor set and can't find %s in PATH\n"
2024 2024 b" (specify a commit editor in your configuration"
2025 2025 b" file)\n"
2026 2026 ),
2027 2027 not cmdpath and editor == b'vi' and editorbin,
2028 2028 )
2029 2029 fm.condwrite(
2030 2030 not cmdpath and editor != b'vi',
2031 2031 b'editornotfound',
2032 2032 _(
2033 2033 b" Can't find editor '%s' in PATH\n"
2034 2034 b" (specify a commit editor in your configuration"
2035 2035 b" file)\n"
2036 2036 ),
2037 2037 not cmdpath and editorbin,
2038 2038 )
2039 2039 if not cmdpath and editor != b'vi':
2040 2040 problems += 1
2041 2041
2042 2042 # check username
2043 2043 username = None
2044 2044 err = None
2045 2045 try:
2046 2046 username = ui.username()
2047 2047 except error.Abort as e:
2048 2048 err = e.message
2049 2049 problems += 1
2050 2050
2051 2051 fm.condwrite(
2052 2052 username, b'username', _(b"checking username (%s)\n"), username
2053 2053 )
2054 2054 fm.condwrite(
2055 2055 err,
2056 2056 b'usernameerror',
2057 2057 _(
2058 2058 b"checking username...\n %s\n"
2059 2059 b" (specify a username in your configuration file)\n"
2060 2060 ),
2061 2061 err,
2062 2062 )
2063 2063
2064 2064 for name, mod in extensions.extensions():
2065 2065 handler = getattr(mod, 'debuginstall', None)
2066 2066 if handler is not None:
2067 2067 problems += handler(ui, fm)
2068 2068
2069 2069 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2070 2070 if not problems:
2071 2071 fm.data(problems=problems)
2072 2072 fm.condwrite(
2073 2073 problems,
2074 2074 b'problems',
2075 2075 _(b"%d problems detected, please check your install!\n"),
2076 2076 problems,
2077 2077 )
2078 2078 fm.end()
2079 2079
2080 2080 return problems
2081 2081
2082 2082
2083 2083 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2084 2084 def debugknown(ui, repopath, *ids, **opts):
2085 2085 """test whether node ids are known to a repo
2086 2086
2087 2087 Every ID must be a full-length hex node id string. Returns a list of 0s
2088 2088 and 1s indicating unknown/known.
2089 2089 """
2090 2090 opts = pycompat.byteskwargs(opts)
2091 2091 repo = hg.peer(ui, opts, repopath)
2092 2092 if not repo.capable(b'known'):
2093 2093 raise error.Abort(b"known() not supported by target repository")
2094 2094 flags = repo.known([bin(s) for s in ids])
2095 2095 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2096 2096
2097 2097
2098 2098 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2099 2099 def debuglabelcomplete(ui, repo, *args):
2100 2100 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2101 2101 debugnamecomplete(ui, repo, *args)
2102 2102
2103 2103
2104 2104 @command(
2105 2105 b'debuglocks',
2106 2106 [
2107 2107 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2108 2108 (
2109 2109 b'W',
2110 2110 b'force-free-wlock',
2111 2111 None,
2112 2112 _(b'free the working state lock (DANGEROUS)'),
2113 2113 ),
2114 2114 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2115 2115 (
2116 2116 b'S',
2117 2117 b'set-wlock',
2118 2118 None,
2119 2119 _(b'set the working state lock until stopped'),
2120 2120 ),
2121 2121 ],
2122 2122 _(b'[OPTION]...'),
2123 2123 )
2124 2124 def debuglocks(ui, repo, **opts):
2125 2125 """show or modify state of locks
2126 2126
2127 2127 By default, this command will show which locks are held. This
2128 2128 includes the user and process holding the lock, the amount of time
2129 2129 the lock has been held, and the machine name where the process is
2130 2130 running if it's not local.
2131 2131
2132 2132 Locks protect the integrity of Mercurial's data, so should be
2133 2133 treated with care. System crashes or other interruptions may cause
2134 2134 locks to not be properly released, though Mercurial will usually
2135 2135 detect and remove such stale locks automatically.
2136 2136
2137 2137 However, detecting stale locks may not always be possible (for
2138 2138 instance, on a shared filesystem). Removing locks may also be
2139 2139 blocked by filesystem permissions.
2140 2140
2141 2141 Setting a lock will prevent other commands from changing the data.
2142 2142 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2143 2143 The set locks are removed when the command exits.
2144 2144
2145 2145 Returns 0 if no locks are held.
2146 2146
2147 2147 """
2148 2148
2149 2149 if opts.get('force_free_lock'):
2150 2150 repo.svfs.unlink(b'lock')
2151 2151 if opts.get('force_free_wlock'):
2152 2152 repo.vfs.unlink(b'wlock')
2153 2153 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2154 2154 return 0
2155 2155
2156 2156 locks = []
2157 2157 try:
2158 2158 if opts.get('set_wlock'):
2159 2159 try:
2160 2160 locks.append(repo.wlock(False))
2161 2161 except error.LockHeld:
2162 2162 raise error.Abort(_(b'wlock is already held'))
2163 2163 if opts.get('set_lock'):
2164 2164 try:
2165 2165 locks.append(repo.lock(False))
2166 2166 except error.LockHeld:
2167 2167 raise error.Abort(_(b'lock is already held'))
2168 2168 if len(locks):
2169 2169 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2170 2170 return 0
2171 2171 finally:
2172 2172 release(*locks)
2173 2173
2174 2174 now = time.time()
2175 2175 held = 0
2176 2176
2177 2177 def report(vfs, name, method):
2178 2178 # this causes stale locks to get reaped for more accurate reporting
2179 2179 try:
2180 2180 l = method(False)
2181 2181 except error.LockHeld:
2182 2182 l = None
2183 2183
2184 2184 if l:
2185 2185 l.release()
2186 2186 else:
2187 2187 try:
2188 2188 st = vfs.lstat(name)
2189 2189 age = now - st[stat.ST_MTIME]
2190 2190 user = util.username(st.st_uid)
2191 2191 locker = vfs.readlock(name)
2192 2192 if b":" in locker:
2193 2193 host, pid = locker.split(b':')
2194 2194 if host == socket.gethostname():
2195 2195 locker = b'user %s, process %s' % (user or b'None', pid)
2196 2196 else:
2197 2197 locker = b'user %s, process %s, host %s' % (
2198 2198 user or b'None',
2199 2199 pid,
2200 2200 host,
2201 2201 )
2202 2202 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2203 2203 return 1
2204 2204 except OSError as e:
2205 2205 if e.errno != errno.ENOENT:
2206 2206 raise
2207 2207
2208 2208 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2209 2209 return 0
2210 2210
2211 2211 held += report(repo.svfs, b"lock", repo.lock)
2212 2212 held += report(repo.vfs, b"wlock", repo.wlock)
2213 2213
2214 2214 return held
2215 2215
2216 2216
2217 2217 @command(
2218 2218 b'debugmanifestfulltextcache',
2219 2219 [
2220 2220 (b'', b'clear', False, _(b'clear the cache')),
2221 2221 (
2222 2222 b'a',
2223 2223 b'add',
2224 2224 [],
2225 2225 _(b'add the given manifest nodes to the cache'),
2226 2226 _(b'NODE'),
2227 2227 ),
2228 2228 ],
2229 2229 b'',
2230 2230 )
2231 2231 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2232 2232 """show, clear or amend the contents of the manifest fulltext cache"""
2233 2233
2234 2234 def getcache():
2235 2235 r = repo.manifestlog.getstorage(b'')
2236 2236 try:
2237 2237 return r._fulltextcache
2238 2238 except AttributeError:
2239 2239 msg = _(
2240 2240 b"Current revlog implementation doesn't appear to have a "
2241 2241 b"manifest fulltext cache\n"
2242 2242 )
2243 2243 raise error.Abort(msg)
2244 2244
2245 2245 if opts.get('clear'):
2246 2246 with repo.wlock():
2247 2247 cache = getcache()
2248 2248 cache.clear(clear_persisted_data=True)
2249 2249 return
2250 2250
2251 2251 if add:
2252 2252 with repo.wlock():
2253 2253 m = repo.manifestlog
2254 2254 store = m.getstorage(b'')
2255 2255 for n in add:
2256 2256 try:
2257 2257 manifest = m[store.lookup(n)]
2258 2258 except error.LookupError as e:
2259 2259 raise error.Abort(
2260 2260 bytes(e), hint=b"Check your manifest node id"
2261 2261 )
2262 2262 manifest.read() # stores revisision in cache too
2263 2263 return
2264 2264
2265 2265 cache = getcache()
2266 2266 if not len(cache):
2267 2267 ui.write(_(b'cache empty\n'))
2268 2268 else:
2269 2269 ui.write(
2270 2270 _(
2271 2271 b'cache contains %d manifest entries, in order of most to '
2272 2272 b'least recent:\n'
2273 2273 )
2274 2274 % (len(cache),)
2275 2275 )
2276 2276 totalsize = 0
2277 2277 for nodeid in cache:
2278 2278 # Use cache.get to not update the LRU order
2279 2279 data = cache.peek(nodeid)
2280 2280 size = len(data)
2281 2281 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2282 2282 ui.write(
2283 2283 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2284 2284 )
2285 2285 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2286 2286 ui.write(
2287 2287 _(b'total cache data size %s, on-disk %s\n')
2288 2288 % (util.bytecount(totalsize), util.bytecount(ondisk))
2289 2289 )
2290 2290
2291 2291
2292 2292 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2293 2293 def debugmergestate(ui, repo, *args, **opts):
2294 2294 """print merge state
2295 2295
2296 2296 Use --verbose to print out information about whether v1 or v2 merge state
2297 2297 was chosen."""
2298 2298
2299 2299 if ui.verbose:
2300 2300 ms = mergestatemod.mergestate(repo)
2301 2301
2302 2302 # sort so that reasonable information is on top
2303 2303 v1records = ms._readrecordsv1()
2304 2304 v2records = ms._readrecordsv2()
2305 2305
2306 2306 if not v1records and not v2records:
2307 2307 pass
2308 2308 elif not v2records:
2309 2309 ui.writenoi18n(b'no version 2 merge state\n')
2310 2310 elif ms._v1v2match(v1records, v2records):
2311 2311 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2312 2312 else:
2313 2313 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2314 2314
2315 2315 opts = pycompat.byteskwargs(opts)
2316 2316 if not opts[b'template']:
2317 2317 opts[b'template'] = (
2318 2318 b'{if(commits, "", "no merge state found\n")}'
2319 2319 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2320 2320 b'{files % "file: {path} (state \\"{state}\\")\n'
2321 2321 b'{if(local_path, "'
2322 2322 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2323 2323 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2324 2324 b' other path: {other_path} (node {other_node})\n'
2325 2325 b'")}'
2326 2326 b'{if(rename_side, "'
2327 2327 b' rename side: {rename_side}\n'
2328 2328 b' renamed path: {renamed_path}\n'
2329 2329 b'")}'
2330 2330 b'{extras % " extra: {key} = {value}\n"}'
2331 2331 b'"}'
2332 2332 b'{extras % "extra: {file} ({key} = {value})\n"}'
2333 2333 )
2334 2334
2335 2335 ms = mergestatemod.mergestate.read(repo)
2336 2336
2337 2337 fm = ui.formatter(b'debugmergestate', opts)
2338 2338 fm.startitem()
2339 2339
2340 2340 fm_commits = fm.nested(b'commits')
2341 2341 if ms.active():
2342 2342 for name, node, label_index in (
2343 2343 (b'local', ms.local, 0),
2344 2344 (b'other', ms.other, 1),
2345 2345 ):
2346 2346 fm_commits.startitem()
2347 2347 fm_commits.data(name=name)
2348 2348 fm_commits.data(node=hex(node))
2349 2349 if ms._labels and len(ms._labels) > label_index:
2350 2350 fm_commits.data(label=ms._labels[label_index])
2351 2351 fm_commits.end()
2352 2352
2353 2353 fm_files = fm.nested(b'files')
2354 2354 if ms.active():
2355 2355 for f in ms:
2356 2356 fm_files.startitem()
2357 2357 fm_files.data(path=f)
2358 2358 state = ms._state[f]
2359 2359 fm_files.data(state=state[0])
2360 2360 if state[0] in (
2361 2361 mergestatemod.MERGE_RECORD_UNRESOLVED,
2362 2362 mergestatemod.MERGE_RECORD_RESOLVED,
2363 2363 ):
2364 2364 fm_files.data(local_key=state[1])
2365 2365 fm_files.data(local_path=state[2])
2366 2366 fm_files.data(ancestor_path=state[3])
2367 2367 fm_files.data(ancestor_node=state[4])
2368 2368 fm_files.data(other_path=state[5])
2369 2369 fm_files.data(other_node=state[6])
2370 2370 fm_files.data(local_flags=state[7])
2371 2371 elif state[0] in (
2372 2372 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2373 2373 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2374 2374 ):
2375 2375 fm_files.data(renamed_path=state[1])
2376 2376 fm_files.data(rename_side=state[2])
2377 2377 fm_extras = fm_files.nested(b'extras')
2378 2378 for k, v in sorted(ms.extras(f).items()):
2379 2379 fm_extras.startitem()
2380 2380 fm_extras.data(key=k)
2381 2381 fm_extras.data(value=v)
2382 2382 fm_extras.end()
2383 2383
2384 2384 fm_files.end()
2385 2385
2386 2386 fm_extras = fm.nested(b'extras')
2387 2387 for f, d in sorted(ms.allextras().items()):
2388 2388 if f in ms:
2389 2389 # If file is in mergestate, we have already processed it's extras
2390 2390 continue
2391 2391 for k, v in d.items():
2392 2392 fm_extras.startitem()
2393 2393 fm_extras.data(file=f)
2394 2394 fm_extras.data(key=k)
2395 2395 fm_extras.data(value=v)
2396 2396 fm_extras.end()
2397 2397
2398 2398 fm.end()
2399 2399
2400 2400
2401 2401 @command(b'debugnamecomplete', [], _(b'NAME...'))
2402 2402 def debugnamecomplete(ui, repo, *args):
2403 2403 '''complete "names" - tags, open branch names, bookmark names'''
2404 2404
2405 2405 names = set()
2406 2406 # since we previously only listed open branches, we will handle that
2407 2407 # specially (after this for loop)
2408 2408 for name, ns in repo.names.items():
2409 2409 if name != b'branches':
2410 2410 names.update(ns.listnames(repo))
2411 2411 names.update(
2412 2412 tag
2413 2413 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2414 2414 if not closed
2415 2415 )
2416 2416 completions = set()
2417 2417 if not args:
2418 2418 args = [b'']
2419 2419 for a in args:
2420 2420 completions.update(n for n in names if n.startswith(a))
2421 2421 ui.write(b'\n'.join(sorted(completions)))
2422 2422 ui.write(b'\n')
2423 2423
2424 2424
2425 2425 @command(
2426 2426 b'debugnodemap',
2427 2427 [
2428 2428 (
2429 2429 b'',
2430 2430 b'dump-new',
2431 2431 False,
2432 2432 _(b'write a (new) persistent binary nodemap on stdout'),
2433 2433 ),
2434 2434 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2435 2435 (
2436 2436 b'',
2437 2437 b'check',
2438 2438 False,
2439 2439 _(b'check that the data on disk data are correct.'),
2440 2440 ),
2441 2441 (
2442 2442 b'',
2443 2443 b'metadata',
2444 2444 False,
2445 2445 _(b'display the on disk meta data for the nodemap'),
2446 2446 ),
2447 2447 ],
2448 2448 )
2449 2449 def debugnodemap(ui, repo, **opts):
2450 2450 """write and inspect on disk nodemap"""
2451 2451 if opts['dump_new']:
2452 2452 unfi = repo.unfiltered()
2453 2453 cl = unfi.changelog
2454 2454 if util.safehasattr(cl.index, "nodemap_data_all"):
2455 2455 data = cl.index.nodemap_data_all()
2456 2456 else:
2457 2457 data = nodemap.persistent_data(cl.index)
2458 2458 ui.write(data)
2459 2459 elif opts['dump_disk']:
2460 2460 unfi = repo.unfiltered()
2461 2461 cl = unfi.changelog
2462 2462 nm_data = nodemap.persisted_data(cl)
2463 2463 if nm_data is not None:
2464 2464 docket, data = nm_data
2465 2465 ui.write(data[:])
2466 2466 elif opts['check']:
2467 2467 unfi = repo.unfiltered()
2468 2468 cl = unfi.changelog
2469 2469 nm_data = nodemap.persisted_data(cl)
2470 2470 if nm_data is not None:
2471 2471 docket, data = nm_data
2472 2472 return nodemap.check_data(ui, cl.index, data)
2473 2473 elif opts['metadata']:
2474 2474 unfi = repo.unfiltered()
2475 2475 cl = unfi.changelog
2476 2476 nm_data = nodemap.persisted_data(cl)
2477 2477 if nm_data is not None:
2478 2478 docket, data = nm_data
2479 2479 ui.write((b"uid: %s\n") % docket.uid)
2480 2480 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2481 2481 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2482 2482 ui.write((b"data-length: %d\n") % docket.data_length)
2483 2483 ui.write((b"data-unused: %d\n") % docket.data_unused)
2484 2484 unused_perc = docket.data_unused * 100.0 / docket.data_length
2485 2485 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2486 2486
2487 2487
2488 2488 @command(
2489 2489 b'debugobsolete',
2490 2490 [
2491 2491 (b'', b'flags', 0, _(b'markers flag')),
2492 2492 (
2493 2493 b'',
2494 2494 b'record-parents',
2495 2495 False,
2496 2496 _(b'record parent information for the precursor'),
2497 2497 ),
2498 2498 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2499 2499 (
2500 2500 b'',
2501 2501 b'exclusive',
2502 2502 False,
2503 2503 _(b'restrict display to markers only relevant to REV'),
2504 2504 ),
2505 2505 (b'', b'index', False, _(b'display index of the marker')),
2506 2506 (b'', b'delete', [], _(b'delete markers specified by indices')),
2507 2507 ]
2508 2508 + cmdutil.commitopts2
2509 2509 + cmdutil.formatteropts,
2510 2510 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2511 2511 )
2512 2512 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2513 2513 """create arbitrary obsolete marker
2514 2514
2515 2515 With no arguments, displays the list of obsolescence markers."""
2516 2516
2517 2517 opts = pycompat.byteskwargs(opts)
2518 2518
2519 2519 def parsenodeid(s):
2520 2520 try:
2521 2521 # We do not use revsingle/revrange functions here to accept
2522 2522 # arbitrary node identifiers, possibly not present in the
2523 2523 # local repository.
2524 2524 n = bin(s)
2525 2525 if len(n) != repo.nodeconstants.nodelen:
2526 2526 raise TypeError()
2527 2527 return n
2528 2528 except TypeError:
2529 2529 raise error.InputError(
2530 2530 b'changeset references must be full hexadecimal '
2531 2531 b'node identifiers'
2532 2532 )
2533 2533
2534 2534 if opts.get(b'delete'):
2535 2535 indices = []
2536 2536 for v in opts.get(b'delete'):
2537 2537 try:
2538 2538 indices.append(int(v))
2539 2539 except ValueError:
2540 2540 raise error.InputError(
2541 2541 _(b'invalid index value: %r') % v,
2542 2542 hint=_(b'use integers for indices'),
2543 2543 )
2544 2544
2545 2545 if repo.currenttransaction():
2546 2546 raise error.Abort(
2547 2547 _(b'cannot delete obsmarkers in the middle of transaction.')
2548 2548 )
2549 2549
2550 2550 with repo.lock():
2551 2551 n = repair.deleteobsmarkers(repo.obsstore, indices)
2552 2552 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2553 2553
2554 2554 return
2555 2555
2556 2556 if precursor is not None:
2557 2557 if opts[b'rev']:
2558 2558 raise error.InputError(
2559 2559 b'cannot select revision when creating marker'
2560 2560 )
2561 2561 metadata = {}
2562 2562 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2563 2563 succs = tuple(parsenodeid(succ) for succ in successors)
2564 2564 l = repo.lock()
2565 2565 try:
2566 2566 tr = repo.transaction(b'debugobsolete')
2567 2567 try:
2568 2568 date = opts.get(b'date')
2569 2569 if date:
2570 2570 date = dateutil.parsedate(date)
2571 2571 else:
2572 2572 date = None
2573 2573 prec = parsenodeid(precursor)
2574 2574 parents = None
2575 2575 if opts[b'record_parents']:
2576 2576 if prec not in repo.unfiltered():
2577 2577 raise error.Abort(
2578 2578 b'cannot used --record-parents on '
2579 2579 b'unknown changesets'
2580 2580 )
2581 2581 parents = repo.unfiltered()[prec].parents()
2582 2582 parents = tuple(p.node() for p in parents)
2583 2583 repo.obsstore.create(
2584 2584 tr,
2585 2585 prec,
2586 2586 succs,
2587 2587 opts[b'flags'],
2588 2588 parents=parents,
2589 2589 date=date,
2590 2590 metadata=metadata,
2591 2591 ui=ui,
2592 2592 )
2593 2593 tr.close()
2594 2594 except ValueError as exc:
2595 2595 raise error.Abort(
2596 2596 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2597 2597 )
2598 2598 finally:
2599 2599 tr.release()
2600 2600 finally:
2601 2601 l.release()
2602 2602 else:
2603 2603 if opts[b'rev']:
2604 2604 revs = logcmdutil.revrange(repo, opts[b'rev'])
2605 2605 nodes = [repo[r].node() for r in revs]
2606 2606 markers = list(
2607 2607 obsutil.getmarkers(
2608 2608 repo, nodes=nodes, exclusive=opts[b'exclusive']
2609 2609 )
2610 2610 )
2611 2611 markers.sort(key=lambda x: x._data)
2612 2612 else:
2613 2613 markers = obsutil.getmarkers(repo)
2614 2614
2615 2615 markerstoiter = markers
2616 2616 isrelevant = lambda m: True
2617 2617 if opts.get(b'rev') and opts.get(b'index'):
2618 2618 markerstoiter = obsutil.getmarkers(repo)
2619 2619 markerset = set(markers)
2620 2620 isrelevant = lambda m: m in markerset
2621 2621
2622 2622 fm = ui.formatter(b'debugobsolete', opts)
2623 2623 for i, m in enumerate(markerstoiter):
2624 2624 if not isrelevant(m):
2625 2625 # marker can be irrelevant when we're iterating over a set
2626 2626 # of markers (markerstoiter) which is bigger than the set
2627 2627 # of markers we want to display (markers)
2628 2628 # this can happen if both --index and --rev options are
2629 2629 # provided and thus we need to iterate over all of the markers
2630 2630 # to get the correct indices, but only display the ones that
2631 2631 # are relevant to --rev value
2632 2632 continue
2633 2633 fm.startitem()
2634 2634 ind = i if opts.get(b'index') else None
2635 2635 cmdutil.showmarker(fm, m, index=ind)
2636 2636 fm.end()
2637 2637
2638 2638
2639 2639 @command(
2640 2640 b'debugp1copies',
2641 2641 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2642 2642 _(b'[-r REV]'),
2643 2643 )
2644 2644 def debugp1copies(ui, repo, **opts):
2645 2645 """dump copy information compared to p1"""
2646 2646
2647 2647 opts = pycompat.byteskwargs(opts)
2648 2648 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2649 2649 for dst, src in ctx.p1copies().items():
2650 2650 ui.write(b'%s -> %s\n' % (src, dst))
2651 2651
2652 2652
2653 2653 @command(
2654 2654 b'debugp2copies',
2655 2655 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2656 2656 _(b'[-r REV]'),
2657 2657 )
2658 2658 def debugp1copies(ui, repo, **opts):
2659 2659 """dump copy information compared to p2"""
2660 2660
2661 2661 opts = pycompat.byteskwargs(opts)
2662 2662 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2663 2663 for dst, src in ctx.p2copies().items():
2664 2664 ui.write(b'%s -> %s\n' % (src, dst))
2665 2665
2666 2666
2667 2667 @command(
2668 2668 b'debugpathcomplete',
2669 2669 [
2670 2670 (b'f', b'full', None, _(b'complete an entire path')),
2671 2671 (b'n', b'normal', None, _(b'show only normal files')),
2672 2672 (b'a', b'added', None, _(b'show only added files')),
2673 2673 (b'r', b'removed', None, _(b'show only removed files')),
2674 2674 ],
2675 2675 _(b'FILESPEC...'),
2676 2676 )
2677 2677 def debugpathcomplete(ui, repo, *specs, **opts):
2678 2678 """complete part or all of a tracked path
2679 2679
2680 2680 This command supports shells that offer path name completion. It
2681 2681 currently completes only files already known to the dirstate.
2682 2682
2683 2683 Completion extends only to the next path segment unless
2684 2684 --full is specified, in which case entire paths are used."""
2685 2685
2686 2686 def complete(path, acceptable):
2687 2687 dirstate = repo.dirstate
2688 2688 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2689 2689 rootdir = repo.root + pycompat.ossep
2690 2690 if spec != repo.root and not spec.startswith(rootdir):
2691 2691 return [], []
2692 2692 if os.path.isdir(spec):
2693 2693 spec += b'/'
2694 2694 spec = spec[len(rootdir) :]
2695 2695 fixpaths = pycompat.ossep != b'/'
2696 2696 if fixpaths:
2697 2697 spec = spec.replace(pycompat.ossep, b'/')
2698 2698 speclen = len(spec)
2699 2699 fullpaths = opts['full']
2700 2700 files, dirs = set(), set()
2701 2701 adddir, addfile = dirs.add, files.add
2702 2702 for f, st in dirstate.items():
2703 2703 if f.startswith(spec) and st.state in acceptable:
2704 2704 if fixpaths:
2705 2705 f = f.replace(b'/', pycompat.ossep)
2706 2706 if fullpaths:
2707 2707 addfile(f)
2708 2708 continue
2709 2709 s = f.find(pycompat.ossep, speclen)
2710 2710 if s >= 0:
2711 2711 adddir(f[:s])
2712 2712 else:
2713 2713 addfile(f)
2714 2714 return files, dirs
2715 2715
2716 2716 acceptable = b''
2717 2717 if opts['normal']:
2718 2718 acceptable += b'nm'
2719 2719 if opts['added']:
2720 2720 acceptable += b'a'
2721 2721 if opts['removed']:
2722 2722 acceptable += b'r'
2723 2723 cwd = repo.getcwd()
2724 2724 if not specs:
2725 2725 specs = [b'.']
2726 2726
2727 2727 files, dirs = set(), set()
2728 2728 for spec in specs:
2729 2729 f, d = complete(spec, acceptable or b'nmar')
2730 2730 files.update(f)
2731 2731 dirs.update(d)
2732 2732 files.update(dirs)
2733 2733 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2734 2734 ui.write(b'\n')
2735 2735
2736 2736
2737 2737 @command(
2738 2738 b'debugpathcopies',
2739 2739 cmdutil.walkopts,
2740 2740 b'hg debugpathcopies REV1 REV2 [FILE]',
2741 2741 inferrepo=True,
2742 2742 )
2743 2743 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2744 2744 """show copies between two revisions"""
2745 2745 ctx1 = scmutil.revsingle(repo, rev1)
2746 2746 ctx2 = scmutil.revsingle(repo, rev2)
2747 2747 m = scmutil.match(ctx1, pats, opts)
2748 2748 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2749 2749 ui.write(b'%s -> %s\n' % (src, dst))
2750 2750
2751 2751
2752 2752 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2753 2753 def debugpeer(ui, path):
2754 2754 """establish a connection to a peer repository"""
2755 2755 # Always enable peer request logging. Requires --debug to display
2756 2756 # though.
2757 2757 overrides = {
2758 2758 (b'devel', b'debug.peer-request'): True,
2759 2759 }
2760 2760
2761 2761 with ui.configoverride(overrides):
2762 2762 peer = hg.peer(ui, {}, path)
2763 2763
2764 2764 try:
2765 2765 local = peer.local() is not None
2766 2766 canpush = peer.canpush()
2767 2767
2768 2768 ui.write(_(b'url: %s\n') % peer.url())
2769 2769 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2770 2770 ui.write(
2771 2771 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2772 2772 )
2773 2773 finally:
2774 2774 peer.close()
2775 2775
2776 2776
2777 2777 @command(
2778 2778 b'debugpickmergetool',
2779 2779 [
2780 2780 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2781 2781 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2782 2782 ]
2783 2783 + cmdutil.walkopts
2784 2784 + cmdutil.mergetoolopts,
2785 2785 _(b'[PATTERN]...'),
2786 2786 inferrepo=True,
2787 2787 )
2788 2788 def debugpickmergetool(ui, repo, *pats, **opts):
2789 2789 """examine which merge tool is chosen for specified file
2790 2790
2791 2791 As described in :hg:`help merge-tools`, Mercurial examines
2792 2792 configurations below in this order to decide which merge tool is
2793 2793 chosen for specified file.
2794 2794
2795 2795 1. ``--tool`` option
2796 2796 2. ``HGMERGE`` environment variable
2797 2797 3. configurations in ``merge-patterns`` section
2798 2798 4. configuration of ``ui.merge``
2799 2799 5. configurations in ``merge-tools`` section
2800 2800 6. ``hgmerge`` tool (for historical reason only)
2801 2801 7. default tool for fallback (``:merge`` or ``:prompt``)
2802 2802
2803 2803 This command writes out examination result in the style below::
2804 2804
2805 2805 FILE = MERGETOOL
2806 2806
2807 2807 By default, all files known in the first parent context of the
2808 2808 working directory are examined. Use file patterns and/or -I/-X
2809 2809 options to limit target files. -r/--rev is also useful to examine
2810 2810 files in another context without actual updating to it.
2811 2811
2812 2812 With --debug, this command shows warning messages while matching
2813 2813 against ``merge-patterns`` and so on, too. It is recommended to
2814 2814 use this option with explicit file patterns and/or -I/-X options,
2815 2815 because this option increases amount of output per file according
2816 2816 to configurations in hgrc.
2817 2817
2818 2818 With -v/--verbose, this command shows configurations below at
2819 2819 first (only if specified).
2820 2820
2821 2821 - ``--tool`` option
2822 2822 - ``HGMERGE`` environment variable
2823 2823 - configuration of ``ui.merge``
2824 2824
2825 2825 If merge tool is chosen before matching against
2826 2826 ``merge-patterns``, this command can't show any helpful
2827 2827 information, even with --debug. In such case, information above is
2828 2828 useful to know why a merge tool is chosen.
2829 2829 """
2830 2830 opts = pycompat.byteskwargs(opts)
2831 2831 overrides = {}
2832 2832 if opts[b'tool']:
2833 2833 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2834 2834 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2835 2835
2836 2836 with ui.configoverride(overrides, b'debugmergepatterns'):
2837 2837 hgmerge = encoding.environ.get(b"HGMERGE")
2838 2838 if hgmerge is not None:
2839 2839 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2840 2840 uimerge = ui.config(b"ui", b"merge")
2841 2841 if uimerge:
2842 2842 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2843 2843
2844 2844 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2845 2845 m = scmutil.match(ctx, pats, opts)
2846 2846 changedelete = opts[b'changedelete']
2847 2847 for path in ctx.walk(m):
2848 2848 fctx = ctx[path]
2849 2849 with ui.silent(
2850 2850 error=True
2851 2851 ) if not ui.debugflag else util.nullcontextmanager():
2852 2852 tool, toolpath = filemerge._picktool(
2853 2853 repo,
2854 2854 ui,
2855 2855 path,
2856 2856 fctx.isbinary(),
2857 2857 b'l' in fctx.flags(),
2858 2858 changedelete,
2859 2859 )
2860 2860 ui.write(b'%s = %s\n' % (path, tool))
2861 2861
2862 2862
2863 2863 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2864 2864 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2865 2865 """access the pushkey key/value protocol
2866 2866
2867 2867 With two args, list the keys in the given namespace.
2868 2868
2869 2869 With five args, set a key to new if it currently is set to old.
2870 2870 Reports success or failure.
2871 2871 """
2872 2872
2873 2873 target = hg.peer(ui, {}, repopath)
2874 2874 try:
2875 2875 if keyinfo:
2876 2876 key, old, new = keyinfo
2877 2877 with target.commandexecutor() as e:
2878 2878 r = e.callcommand(
2879 2879 b'pushkey',
2880 2880 {
2881 2881 b'namespace': namespace,
2882 2882 b'key': key,
2883 2883 b'old': old,
2884 2884 b'new': new,
2885 2885 },
2886 2886 ).result()
2887 2887
2888 2888 ui.status(pycompat.bytestr(r) + b'\n')
2889 2889 return not r
2890 2890 else:
2891 2891 for k, v in sorted(target.listkeys(namespace).items()):
2892 2892 ui.write(
2893 2893 b"%s\t%s\n"
2894 2894 % (stringutil.escapestr(k), stringutil.escapestr(v))
2895 2895 )
2896 2896 finally:
2897 2897 target.close()
2898 2898
2899 2899
2900 2900 @command(b'debugpvec', [], _(b'A B'))
2901 2901 def debugpvec(ui, repo, a, b=None):
2902 2902 ca = scmutil.revsingle(repo, a)
2903 2903 cb = scmutil.revsingle(repo, b)
2904 2904 pa = pvec.ctxpvec(ca)
2905 2905 pb = pvec.ctxpvec(cb)
2906 2906 if pa == pb:
2907 2907 rel = b"="
2908 2908 elif pa > pb:
2909 2909 rel = b">"
2910 2910 elif pa < pb:
2911 2911 rel = b"<"
2912 2912 elif pa | pb:
2913 2913 rel = b"|"
2914 2914 ui.write(_(b"a: %s\n") % pa)
2915 2915 ui.write(_(b"b: %s\n") % pb)
2916 2916 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2917 2917 ui.write(
2918 2918 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2919 2919 % (
2920 2920 abs(pa._depth - pb._depth),
2921 2921 pvec._hamming(pa._vec, pb._vec),
2922 2922 pa.distance(pb),
2923 2923 rel,
2924 2924 )
2925 2925 )
2926 2926
2927 2927
2928 2928 @command(
2929 2929 b'debugrebuilddirstate|debugrebuildstate',
2930 2930 [
2931 2931 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2932 2932 (
2933 2933 b'',
2934 2934 b'minimal',
2935 2935 None,
2936 2936 _(
2937 2937 b'only rebuild files that are inconsistent with '
2938 2938 b'the working copy parent'
2939 2939 ),
2940 2940 ),
2941 2941 ],
2942 2942 _(b'[-r REV]'),
2943 2943 )
2944 2944 def debugrebuilddirstate(ui, repo, rev, **opts):
2945 2945 """rebuild the dirstate as it would look like for the given revision
2946 2946
2947 2947 If no revision is specified the first current parent will be used.
2948 2948
2949 2949 The dirstate will be set to the files of the given revision.
2950 2950 The actual working directory content or existing dirstate
2951 2951 information such as adds or removes is not considered.
2952 2952
2953 2953 ``minimal`` will only rebuild the dirstate status for files that claim to be
2954 2954 tracked but are not in the parent manifest, or that exist in the parent
2955 2955 manifest but are not in the dirstate. It will not change adds, removes, or
2956 2956 modified files that are in the working copy parent.
2957 2957
2958 2958 One use of this command is to make the next :hg:`status` invocation
2959 2959 check the actual file content.
2960 2960 """
2961 2961 ctx = scmutil.revsingle(repo, rev)
2962 2962 with repo.wlock():
2963 2963 dirstate = repo.dirstate
2964 2964 changedfiles = None
2965 2965 # See command doc for what minimal does.
2966 2966 if opts.get('minimal'):
2967 2967 manifestfiles = set(ctx.manifest().keys())
2968 2968 dirstatefiles = set(dirstate)
2969 2969 manifestonly = manifestfiles - dirstatefiles
2970 2970 dsonly = dirstatefiles - manifestfiles
2971 2971 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2972 2972 changedfiles = manifestonly | dsnotadded
2973 2973
2974 2974 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2975 2975
2976 2976
2977 2977 @command(
2978 2978 b'debugrebuildfncache',
2979 2979 [
2980 2980 (
2981 2981 b'',
2982 2982 b'only-data',
2983 2983 False,
2984 2984 _(b'only look for wrong .d files (much faster)'),
2985 2985 )
2986 2986 ],
2987 2987 b'',
2988 2988 )
2989 2989 def debugrebuildfncache(ui, repo, **opts):
2990 2990 """rebuild the fncache file"""
2991 2991 opts = pycompat.byteskwargs(opts)
2992 2992 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2993 2993
2994 2994
2995 2995 @command(
2996 2996 b'debugrename',
2997 2997 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2998 2998 _(b'[-r REV] [FILE]...'),
2999 2999 )
3000 3000 def debugrename(ui, repo, *pats, **opts):
3001 3001 """dump rename information"""
3002 3002
3003 3003 opts = pycompat.byteskwargs(opts)
3004 3004 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3005 3005 m = scmutil.match(ctx, pats, opts)
3006 3006 for abs in ctx.walk(m):
3007 3007 fctx = ctx[abs]
3008 3008 o = fctx.filelog().renamed(fctx.filenode())
3009 3009 rel = repo.pathto(abs)
3010 3010 if o:
3011 3011 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3012 3012 else:
3013 3013 ui.write(_(b"%s not renamed\n") % rel)
3014 3014
3015 3015
3016 3016 @command(b'debugrequires|debugrequirements', [], b'')
3017 3017 def debugrequirements(ui, repo):
3018 3018 """print the current repo requirements"""
3019 3019 for r in sorted(repo.requirements):
3020 3020 ui.write(b"%s\n" % r)
3021 3021
3022 3022
3023 3023 @command(
3024 3024 b'debugrevlog',
3025 3025 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3026 3026 _(b'-c|-m|FILE'),
3027 3027 optionalrepo=True,
3028 3028 )
3029 3029 def debugrevlog(ui, repo, file_=None, **opts):
3030 3030 """show data and statistics about a revlog"""
3031 3031 opts = pycompat.byteskwargs(opts)
3032 3032 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3033 3033
3034 3034 if opts.get(b"dump"):
3035 3035 numrevs = len(r)
3036 3036 ui.write(
3037 3037 (
3038 3038 b"# rev p1rev p2rev start end deltastart base p1 p2"
3039 3039 b" rawsize totalsize compression heads chainlen\n"
3040 3040 )
3041 3041 )
3042 3042 ts = 0
3043 3043 heads = set()
3044 3044
3045 3045 for rev in pycompat.xrange(numrevs):
3046 3046 dbase = r.deltaparent(rev)
3047 3047 if dbase == -1:
3048 3048 dbase = rev
3049 3049 cbase = r.chainbase(rev)
3050 3050 clen = r.chainlen(rev)
3051 3051 p1, p2 = r.parentrevs(rev)
3052 3052 rs = r.rawsize(rev)
3053 3053 ts = ts + rs
3054 3054 heads -= set(r.parentrevs(rev))
3055 3055 heads.add(rev)
3056 3056 try:
3057 3057 compression = ts / r.end(rev)
3058 3058 except ZeroDivisionError:
3059 3059 compression = 0
3060 3060 ui.write(
3061 3061 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3062 3062 b"%11d %5d %8d\n"
3063 3063 % (
3064 3064 rev,
3065 3065 p1,
3066 3066 p2,
3067 3067 r.start(rev),
3068 3068 r.end(rev),
3069 3069 r.start(dbase),
3070 3070 r.start(cbase),
3071 3071 r.start(p1),
3072 3072 r.start(p2),
3073 3073 rs,
3074 3074 ts,
3075 3075 compression,
3076 3076 len(heads),
3077 3077 clen,
3078 3078 )
3079 3079 )
3080 3080 return 0
3081 3081
3082 3082 format = r._format_version
3083 3083 v = r._format_flags
3084 3084 flags = []
3085 3085 gdelta = False
3086 3086 if v & revlog.FLAG_INLINE_DATA:
3087 3087 flags.append(b'inline')
3088 3088 if v & revlog.FLAG_GENERALDELTA:
3089 3089 gdelta = True
3090 3090 flags.append(b'generaldelta')
3091 3091 if not flags:
3092 3092 flags = [b'(none)']
3093 3093
3094 3094 ### tracks merge vs single parent
3095 3095 nummerges = 0
3096 3096
3097 3097 ### tracks ways the "delta" are build
3098 3098 # nodelta
3099 3099 numempty = 0
3100 3100 numemptytext = 0
3101 3101 numemptydelta = 0
3102 3102 # full file content
3103 3103 numfull = 0
3104 3104 # intermediate snapshot against a prior snapshot
3105 3105 numsemi = 0
3106 3106 # snapshot count per depth
3107 3107 numsnapdepth = collections.defaultdict(lambda: 0)
3108 3108 # delta against previous revision
3109 3109 numprev = 0
3110 3110 # delta against first or second parent (not prev)
3111 3111 nump1 = 0
3112 3112 nump2 = 0
3113 3113 # delta against neither prev nor parents
3114 3114 numother = 0
3115 3115 # delta against prev that are also first or second parent
3116 3116 # (details of `numprev`)
3117 3117 nump1prev = 0
3118 3118 nump2prev = 0
3119 3119
3120 3120 # data about delta chain of each revs
3121 3121 chainlengths = []
3122 3122 chainbases = []
3123 3123 chainspans = []
3124 3124
3125 3125 # data about each revision
3126 3126 datasize = [None, 0, 0]
3127 3127 fullsize = [None, 0, 0]
3128 3128 semisize = [None, 0, 0]
3129 3129 # snapshot count per depth
3130 3130 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3131 3131 deltasize = [None, 0, 0]
3132 3132 chunktypecounts = {}
3133 3133 chunktypesizes = {}
3134 3134
3135 3135 def addsize(size, l):
3136 3136 if l[0] is None or size < l[0]:
3137 3137 l[0] = size
3138 3138 if size > l[1]:
3139 3139 l[1] = size
3140 3140 l[2] += size
3141 3141
3142 3142 numrevs = len(r)
3143 3143 for rev in pycompat.xrange(numrevs):
3144 3144 p1, p2 = r.parentrevs(rev)
3145 3145 delta = r.deltaparent(rev)
3146 3146 if format > 0:
3147 3147 addsize(r.rawsize(rev), datasize)
3148 3148 if p2 != nullrev:
3149 3149 nummerges += 1
3150 3150 size = r.length(rev)
3151 3151 if delta == nullrev:
3152 3152 chainlengths.append(0)
3153 3153 chainbases.append(r.start(rev))
3154 3154 chainspans.append(size)
3155 3155 if size == 0:
3156 3156 numempty += 1
3157 3157 numemptytext += 1
3158 3158 else:
3159 3159 numfull += 1
3160 3160 numsnapdepth[0] += 1
3161 3161 addsize(size, fullsize)
3162 3162 addsize(size, snapsizedepth[0])
3163 3163 else:
3164 3164 chainlengths.append(chainlengths[delta] + 1)
3165 3165 baseaddr = chainbases[delta]
3166 3166 revaddr = r.start(rev)
3167 3167 chainbases.append(baseaddr)
3168 3168 chainspans.append((revaddr - baseaddr) + size)
3169 3169 if size == 0:
3170 3170 numempty += 1
3171 3171 numemptydelta += 1
3172 3172 elif r.issnapshot(rev):
3173 3173 addsize(size, semisize)
3174 3174 numsemi += 1
3175 3175 depth = r.snapshotdepth(rev)
3176 3176 numsnapdepth[depth] += 1
3177 3177 addsize(size, snapsizedepth[depth])
3178 3178 else:
3179 3179 addsize(size, deltasize)
3180 3180 if delta == rev - 1:
3181 3181 numprev += 1
3182 3182 if delta == p1:
3183 3183 nump1prev += 1
3184 3184 elif delta == p2:
3185 3185 nump2prev += 1
3186 3186 elif delta == p1:
3187 3187 nump1 += 1
3188 3188 elif delta == p2:
3189 3189 nump2 += 1
3190 3190 elif delta != nullrev:
3191 3191 numother += 1
3192 3192
3193 3193 # Obtain data on the raw chunks in the revlog.
3194 3194 if util.safehasattr(r, b'_getsegmentforrevs'):
3195 3195 segment = r._getsegmentforrevs(rev, rev)[1]
3196 3196 else:
3197 3197 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3198 3198 if segment:
3199 3199 chunktype = bytes(segment[0:1])
3200 3200 else:
3201 3201 chunktype = b'empty'
3202 3202
3203 3203 if chunktype not in chunktypecounts:
3204 3204 chunktypecounts[chunktype] = 0
3205 3205 chunktypesizes[chunktype] = 0
3206 3206
3207 3207 chunktypecounts[chunktype] += 1
3208 3208 chunktypesizes[chunktype] += size
3209 3209
3210 3210 # Adjust size min value for empty cases
3211 3211 for size in (datasize, fullsize, semisize, deltasize):
3212 3212 if size[0] is None:
3213 3213 size[0] = 0
3214 3214
3215 3215 numdeltas = numrevs - numfull - numempty - numsemi
3216 3216 numoprev = numprev - nump1prev - nump2prev
3217 3217 totalrawsize = datasize[2]
3218 3218 datasize[2] /= numrevs
3219 3219 fulltotal = fullsize[2]
3220 3220 if numfull == 0:
3221 3221 fullsize[2] = 0
3222 3222 else:
3223 3223 fullsize[2] /= numfull
3224 3224 semitotal = semisize[2]
3225 3225 snaptotal = {}
3226 3226 if numsemi > 0:
3227 3227 semisize[2] /= numsemi
3228 3228 for depth in snapsizedepth:
3229 3229 snaptotal[depth] = snapsizedepth[depth][2]
3230 3230 snapsizedepth[depth][2] /= numsnapdepth[depth]
3231 3231
3232 3232 deltatotal = deltasize[2]
3233 3233 if numdeltas > 0:
3234 3234 deltasize[2] /= numdeltas
3235 3235 totalsize = fulltotal + semitotal + deltatotal
3236 3236 avgchainlen = sum(chainlengths) / numrevs
3237 3237 maxchainlen = max(chainlengths)
3238 3238 maxchainspan = max(chainspans)
3239 3239 compratio = 1
3240 3240 if totalsize:
3241 3241 compratio = totalrawsize / totalsize
3242 3242
3243 3243 basedfmtstr = b'%%%dd\n'
3244 3244 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3245 3245
3246 3246 def dfmtstr(max):
3247 3247 return basedfmtstr % len(str(max))
3248 3248
3249 3249 def pcfmtstr(max, padding=0):
3250 3250 return basepcfmtstr % (len(str(max)), b' ' * padding)
3251 3251
3252 3252 def pcfmt(value, total):
3253 3253 if total:
3254 3254 return (value, 100 * float(value) / total)
3255 3255 else:
3256 3256 return value, 100.0
3257 3257
3258 3258 ui.writenoi18n(b'format : %d\n' % format)
3259 3259 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3260 3260
3261 3261 ui.write(b'\n')
3262 3262 fmt = pcfmtstr(totalsize)
3263 3263 fmt2 = dfmtstr(totalsize)
3264 3264 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3265 3265 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3266 3266 ui.writenoi18n(
3267 3267 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3268 3268 )
3269 3269 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3270 3270 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3271 3271 ui.writenoi18n(
3272 3272 b' text : '
3273 3273 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3274 3274 )
3275 3275 ui.writenoi18n(
3276 3276 b' delta : '
3277 3277 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3278 3278 )
3279 3279 ui.writenoi18n(
3280 3280 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3281 3281 )
3282 3282 for depth in sorted(numsnapdepth):
3283 3283 ui.write(
3284 3284 (b' lvl-%-3d : ' % depth)
3285 3285 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3286 3286 )
3287 3287 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3288 3288 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3289 3289 ui.writenoi18n(
3290 3290 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3291 3291 )
3292 3292 for depth in sorted(numsnapdepth):
3293 3293 ui.write(
3294 3294 (b' lvl-%-3d : ' % depth)
3295 3295 + fmt % pcfmt(snaptotal[depth], totalsize)
3296 3296 )
3297 3297 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3298 3298
3299 3299 def fmtchunktype(chunktype):
3300 3300 if chunktype == b'empty':
3301 3301 return b' %s : ' % chunktype
3302 3302 elif chunktype in pycompat.bytestr(string.ascii_letters):
3303 3303 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3304 3304 else:
3305 3305 return b' 0x%s : ' % hex(chunktype)
3306 3306
3307 3307 ui.write(b'\n')
3308 3308 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3309 3309 for chunktype in sorted(chunktypecounts):
3310 3310 ui.write(fmtchunktype(chunktype))
3311 3311 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3312 3312 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3313 3313 for chunktype in sorted(chunktypecounts):
3314 3314 ui.write(fmtchunktype(chunktype))
3315 3315 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3316 3316
3317 3317 ui.write(b'\n')
3318 3318 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3319 3319 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3320 3320 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3321 3321 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3322 3322 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3323 3323
3324 3324 if format > 0:
3325 3325 ui.write(b'\n')
3326 3326 ui.writenoi18n(
3327 3327 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3328 3328 % tuple(datasize)
3329 3329 )
3330 3330 ui.writenoi18n(
3331 3331 b'full revision size (min/max/avg) : %d / %d / %d\n'
3332 3332 % tuple(fullsize)
3333 3333 )
3334 3334 ui.writenoi18n(
3335 3335 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3336 3336 % tuple(semisize)
3337 3337 )
3338 3338 for depth in sorted(snapsizedepth):
3339 3339 if depth == 0:
3340 3340 continue
3341 3341 ui.writenoi18n(
3342 3342 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3343 3343 % ((depth,) + tuple(snapsizedepth[depth]))
3344 3344 )
3345 3345 ui.writenoi18n(
3346 3346 b'delta size (min/max/avg) : %d / %d / %d\n'
3347 3347 % tuple(deltasize)
3348 3348 )
3349 3349
3350 3350 if numdeltas > 0:
3351 3351 ui.write(b'\n')
3352 3352 fmt = pcfmtstr(numdeltas)
3353 3353 fmt2 = pcfmtstr(numdeltas, 4)
3354 3354 ui.writenoi18n(
3355 3355 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3356 3356 )
3357 3357 if numprev > 0:
3358 3358 ui.writenoi18n(
3359 3359 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3360 3360 )
3361 3361 ui.writenoi18n(
3362 3362 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3363 3363 )
3364 3364 ui.writenoi18n(
3365 3365 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3366 3366 )
3367 3367 if gdelta:
3368 3368 ui.writenoi18n(
3369 3369 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3370 3370 )
3371 3371 ui.writenoi18n(
3372 3372 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3373 3373 )
3374 3374 ui.writenoi18n(
3375 3375 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3376 3376 )
3377 3377
3378 3378
3379 3379 @command(
3380 3380 b'debugrevlogindex',
3381 3381 cmdutil.debugrevlogopts
3382 3382 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3383 3383 _(b'[-f FORMAT] -c|-m|FILE'),
3384 3384 optionalrepo=True,
3385 3385 )
3386 3386 def debugrevlogindex(ui, repo, file_=None, **opts):
3387 3387 """dump the contents of a revlog index"""
3388 3388 opts = pycompat.byteskwargs(opts)
3389 3389 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3390 3390 format = opts.get(b'format', 0)
3391 3391 if format not in (0, 1):
3392 3392 raise error.Abort(_(b"unknown format %d") % format)
3393 3393
3394 3394 if ui.debugflag:
3395 3395 shortfn = hex
3396 3396 else:
3397 3397 shortfn = short
3398 3398
3399 3399 # There might not be anything in r, so have a sane default
3400 3400 idlen = 12
3401 3401 for i in r:
3402 3402 idlen = len(shortfn(r.node(i)))
3403 3403 break
3404 3404
3405 3405 if format == 0:
3406 3406 if ui.verbose:
3407 3407 ui.writenoi18n(
3408 3408 b" rev offset length linkrev %s %s p2\n"
3409 3409 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3410 3410 )
3411 3411 else:
3412 3412 ui.writenoi18n(
3413 3413 b" rev linkrev %s %s p2\n"
3414 3414 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3415 3415 )
3416 3416 elif format == 1:
3417 3417 if ui.verbose:
3418 3418 ui.writenoi18n(
3419 3419 (
3420 3420 b" rev flag offset length size link p1"
3421 3421 b" p2 %s\n"
3422 3422 )
3423 3423 % b"nodeid".rjust(idlen)
3424 3424 )
3425 3425 else:
3426 3426 ui.writenoi18n(
3427 3427 b" rev flag size link p1 p2 %s\n"
3428 3428 % b"nodeid".rjust(idlen)
3429 3429 )
3430 3430
3431 3431 for i in r:
3432 3432 node = r.node(i)
3433 3433 if format == 0:
3434 3434 try:
3435 3435 pp = r.parents(node)
3436 3436 except Exception:
3437 3437 pp = [repo.nullid, repo.nullid]
3438 3438 if ui.verbose:
3439 3439 ui.write(
3440 3440 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3441 3441 % (
3442 3442 i,
3443 3443 r.start(i),
3444 3444 r.length(i),
3445 3445 r.linkrev(i),
3446 3446 shortfn(node),
3447 3447 shortfn(pp[0]),
3448 3448 shortfn(pp[1]),
3449 3449 )
3450 3450 )
3451 3451 else:
3452 3452 ui.write(
3453 3453 b"% 6d % 7d %s %s %s\n"
3454 3454 % (
3455 3455 i,
3456 3456 r.linkrev(i),
3457 3457 shortfn(node),
3458 3458 shortfn(pp[0]),
3459 3459 shortfn(pp[1]),
3460 3460 )
3461 3461 )
3462 3462 elif format == 1:
3463 3463 pr = r.parentrevs(i)
3464 3464 if ui.verbose:
3465 3465 ui.write(
3466 3466 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3467 3467 % (
3468 3468 i,
3469 3469 r.flags(i),
3470 3470 r.start(i),
3471 3471 r.length(i),
3472 3472 r.rawsize(i),
3473 3473 r.linkrev(i),
3474 3474 pr[0],
3475 3475 pr[1],
3476 3476 shortfn(node),
3477 3477 )
3478 3478 )
3479 3479 else:
3480 3480 ui.write(
3481 3481 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3482 3482 % (
3483 3483 i,
3484 3484 r.flags(i),
3485 3485 r.rawsize(i),
3486 3486 r.linkrev(i),
3487 3487 pr[0],
3488 3488 pr[1],
3489 3489 shortfn(node),
3490 3490 )
3491 3491 )
3492 3492
3493 3493
3494 3494 @command(
3495 3495 b'debugrevspec',
3496 3496 [
3497 3497 (
3498 3498 b'',
3499 3499 b'optimize',
3500 3500 None,
3501 3501 _(b'print parsed tree after optimizing (DEPRECATED)'),
3502 3502 ),
3503 3503 (
3504 3504 b'',
3505 3505 b'show-revs',
3506 3506 True,
3507 3507 _(b'print list of result revisions (default)'),
3508 3508 ),
3509 3509 (
3510 3510 b's',
3511 3511 b'show-set',
3512 3512 None,
3513 3513 _(b'print internal representation of result set'),
3514 3514 ),
3515 3515 (
3516 3516 b'p',
3517 3517 b'show-stage',
3518 3518 [],
3519 3519 _(b'print parsed tree at the given stage'),
3520 3520 _(b'NAME'),
3521 3521 ),
3522 3522 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3523 3523 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3524 3524 ],
3525 3525 b'REVSPEC',
3526 3526 )
3527 3527 def debugrevspec(ui, repo, expr, **opts):
3528 3528 """parse and apply a revision specification
3529 3529
3530 3530 Use -p/--show-stage option to print the parsed tree at the given stages.
3531 3531 Use -p all to print tree at every stage.
3532 3532
3533 3533 Use --no-show-revs option with -s or -p to print only the set
3534 3534 representation or the parsed tree respectively.
3535 3535
3536 3536 Use --verify-optimized to compare the optimized result with the unoptimized
3537 3537 one. Returns 1 if the optimized result differs.
3538 3538 """
3539 3539 opts = pycompat.byteskwargs(opts)
3540 3540 aliases = ui.configitems(b'revsetalias')
3541 3541 stages = [
3542 3542 (b'parsed', lambda tree: tree),
3543 3543 (
3544 3544 b'expanded',
3545 3545 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3546 3546 ),
3547 3547 (b'concatenated', revsetlang.foldconcat),
3548 3548 (b'analyzed', revsetlang.analyze),
3549 3549 (b'optimized', revsetlang.optimize),
3550 3550 ]
3551 3551 if opts[b'no_optimized']:
3552 3552 stages = stages[:-1]
3553 3553 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3554 3554 raise error.Abort(
3555 3555 _(b'cannot use --verify-optimized with --no-optimized')
3556 3556 )
3557 3557 stagenames = {n for n, f in stages}
3558 3558
3559 3559 showalways = set()
3560 3560 showchanged = set()
3561 3561 if ui.verbose and not opts[b'show_stage']:
3562 3562 # show parsed tree by --verbose (deprecated)
3563 3563 showalways.add(b'parsed')
3564 3564 showchanged.update([b'expanded', b'concatenated'])
3565 3565 if opts[b'optimize']:
3566 3566 showalways.add(b'optimized')
3567 3567 if opts[b'show_stage'] and opts[b'optimize']:
3568 3568 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3569 3569 if opts[b'show_stage'] == [b'all']:
3570 3570 showalways.update(stagenames)
3571 3571 else:
3572 3572 for n in opts[b'show_stage']:
3573 3573 if n not in stagenames:
3574 3574 raise error.Abort(_(b'invalid stage name: %s') % n)
3575 3575 showalways.update(opts[b'show_stage'])
3576 3576
3577 3577 treebystage = {}
3578 3578 printedtree = None
3579 3579 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3580 3580 for n, f in stages:
3581 3581 treebystage[n] = tree = f(tree)
3582 3582 if n in showalways or (n in showchanged and tree != printedtree):
3583 3583 if opts[b'show_stage'] or n != b'parsed':
3584 3584 ui.write(b"* %s:\n" % n)
3585 3585 ui.write(revsetlang.prettyformat(tree), b"\n")
3586 3586 printedtree = tree
3587 3587
3588 3588 if opts[b'verify_optimized']:
3589 3589 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3590 3590 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3591 3591 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3592 3592 ui.writenoi18n(
3593 3593 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3594 3594 )
3595 3595 ui.writenoi18n(
3596 3596 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3597 3597 )
3598 3598 arevs = list(arevs)
3599 3599 brevs = list(brevs)
3600 3600 if arevs == brevs:
3601 3601 return 0
3602 3602 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3603 3603 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3604 3604 sm = difflib.SequenceMatcher(None, arevs, brevs)
3605 3605 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3606 3606 if tag in ('delete', 'replace'):
3607 3607 for c in arevs[alo:ahi]:
3608 3608 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3609 3609 if tag in ('insert', 'replace'):
3610 3610 for c in brevs[blo:bhi]:
3611 3611 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3612 3612 if tag == 'equal':
3613 3613 for c in arevs[alo:ahi]:
3614 3614 ui.write(b' %d\n' % c)
3615 3615 return 1
3616 3616
3617 3617 func = revset.makematcher(tree)
3618 3618 revs = func(repo)
3619 3619 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3620 3620 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3621 3621 if not opts[b'show_revs']:
3622 3622 return
3623 3623 for c in revs:
3624 3624 ui.write(b"%d\n" % c)
3625 3625
3626 3626
3627 3627 @command(
3628 3628 b'debugserve',
3629 3629 [
3630 3630 (
3631 3631 b'',
3632 3632 b'sshstdio',
3633 3633 False,
3634 3634 _(b'run an SSH server bound to process handles'),
3635 3635 ),
3636 3636 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3637 3637 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3638 3638 ],
3639 3639 b'',
3640 3640 )
3641 3641 def debugserve(ui, repo, **opts):
3642 3642 """run a server with advanced settings
3643 3643
3644 3644 This command is similar to :hg:`serve`. It exists partially as a
3645 3645 workaround to the fact that ``hg serve --stdio`` must have specific
3646 3646 arguments for security reasons.
3647 3647 """
3648 3648 opts = pycompat.byteskwargs(opts)
3649 3649
3650 3650 if not opts[b'sshstdio']:
3651 3651 raise error.Abort(_(b'only --sshstdio is currently supported'))
3652 3652
3653 3653 logfh = None
3654 3654
3655 3655 if opts[b'logiofd'] and opts[b'logiofile']:
3656 3656 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3657 3657
3658 3658 if opts[b'logiofd']:
3659 3659 # Ideally we would be line buffered. But line buffering in binary
3660 3660 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3661 3661 # buffering could have performance impacts. But since this isn't
3662 3662 # performance critical code, it should be fine.
3663 3663 try:
3664 3664 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3665 3665 except OSError as e:
3666 3666 if e.errno != errno.ESPIPE:
3667 3667 raise
3668 3668 # can't seek a pipe, so `ab` mode fails on py3
3669 3669 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3670 3670 elif opts[b'logiofile']:
3671 3671 logfh = open(opts[b'logiofile'], b'ab', 0)
3672 3672
3673 3673 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3674 3674 s.serve_forever()
3675 3675
3676 3676
3677 3677 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3678 3678 def debugsetparents(ui, repo, rev1, rev2=None):
3679 3679 """manually set the parents of the current working directory (DANGEROUS)
3680 3680
3681 3681 This command is not what you are looking for and should not be used. Using
3682 3682 this command will most certainly results in slight corruption of the file
3683 3683 level histories withing your repository. DO NOT USE THIS COMMAND.
3684 3684
3685 3685 The command update the p1 and p2 field in the dirstate, and not touching
3686 3686 anything else. This useful for writing repository conversion tools, but
3687 3687 should be used with extreme care. For example, neither the working
3688 3688 directory nor the dirstate is updated, so file status may be incorrect
3689 3689 after running this command. Only used if you are one of the few people that
3690 3690 deeply unstand both conversion tools and file level histories. If you are
3691 3691 reading this help, you are not one of this people (most of them sailed west
3692 3692 from Mithlond anyway.
3693 3693
3694 3694 So one last time DO NOT USE THIS COMMAND.
3695 3695
3696 3696 Returns 0 on success.
3697 3697 """
3698 3698
3699 3699 node1 = scmutil.revsingle(repo, rev1).node()
3700 3700 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3701 3701
3702 3702 with repo.wlock():
3703 3703 repo.setparents(node1, node2)
3704 3704
3705 3705
3706 3706 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3707 3707 def debugsidedata(ui, repo, file_, rev=None, **opts):
3708 3708 """dump the side data for a cl/manifest/file revision
3709 3709
3710 3710 Use --verbose to dump the sidedata content."""
3711 3711 opts = pycompat.byteskwargs(opts)
3712 3712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3713 3713 if rev is not None:
3714 3714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3715 3715 file_, rev = None, file_
3716 3716 elif rev is None:
3717 3717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3718 3718 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3719 3719 r = getattr(r, '_revlog', r)
3720 3720 try:
3721 3721 sidedata = r.sidedata(r.lookup(rev))
3722 3722 except KeyError:
3723 3723 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3724 3724 if sidedata:
3725 3725 sidedata = list(sidedata.items())
3726 3726 sidedata.sort()
3727 3727 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3728 3728 for key, value in sidedata:
3729 3729 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3730 3730 if ui.verbose:
3731 3731 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3732 3732
3733 3733
3734 3734 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3735 3735 def debugssl(ui, repo, source=None, **opts):
3736 3736 """test a secure connection to a server
3737 3737
3738 3738 This builds the certificate chain for the server on Windows, installing the
3739 3739 missing intermediates and trusted root via Windows Update if necessary. It
3740 3740 does nothing on other platforms.
3741 3741
3742 3742 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3743 3743 that server is used. See :hg:`help urls` for more information.
3744 3744
3745 3745 If the update succeeds, retry the original operation. Otherwise, the cause
3746 3746 of the SSL error is likely another issue.
3747 3747 """
3748 3748 if not pycompat.iswindows:
3749 3749 raise error.Abort(
3750 3750 _(b'certificate chain building is only possible on Windows')
3751 3751 )
3752 3752
3753 3753 if not source:
3754 3754 if not repo:
3755 3755 raise error.Abort(
3756 3756 _(
3757 3757 b"there is no Mercurial repository here, and no "
3758 3758 b"server specified"
3759 3759 )
3760 3760 )
3761 3761 source = b"default"
3762 3762
3763 3763 source, branches = urlutil.get_unique_pull_path(
3764 3764 b'debugssl', repo, ui, source
3765 3765 )
3766 3766 url = urlutil.url(source)
3767 3767
3768 3768 defaultport = {b'https': 443, b'ssh': 22}
3769 3769 if url.scheme in defaultport:
3770 3770 try:
3771 3771 addr = (url.host, int(url.port or defaultport[url.scheme]))
3772 3772 except ValueError:
3773 3773 raise error.Abort(_(b"malformed port number in URL"))
3774 3774 else:
3775 3775 raise error.Abort(_(b"only https and ssh connections are supported"))
3776 3776
3777 3777 from . import win32
3778 3778
3779 3779 s = ssl.wrap_socket(
3780 3780 socket.socket(),
3781 3781 ssl_version=ssl.PROTOCOL_TLS,
3782 3782 cert_reqs=ssl.CERT_NONE,
3783 3783 ca_certs=None,
3784 3784 )
3785 3785
3786 3786 try:
3787 3787 s.connect(addr)
3788 3788 cert = s.getpeercert(True)
3789 3789
3790 3790 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3791 3791
3792 3792 complete = win32.checkcertificatechain(cert, build=False)
3793 3793
3794 3794 if not complete:
3795 3795 ui.status(_(b'certificate chain is incomplete, updating... '))
3796 3796
3797 3797 if not win32.checkcertificatechain(cert):
3798 3798 ui.status(_(b'failed.\n'))
3799 3799 else:
3800 3800 ui.status(_(b'done.\n'))
3801 3801 else:
3802 3802 ui.status(_(b'full certificate chain is available\n'))
3803 3803 finally:
3804 3804 s.close()
3805 3805
3806 3806
3807 3807 @command(
3808 3808 b"debugbackupbundle",
3809 3809 [
3810 3810 (
3811 3811 b"",
3812 3812 b"recover",
3813 3813 b"",
3814 3814 b"brings the specified changeset back into the repository",
3815 3815 )
3816 3816 ]
3817 3817 + cmdutil.logopts,
3818 3818 _(b"hg debugbackupbundle [--recover HASH]"),
3819 3819 )
3820 3820 def debugbackupbundle(ui, repo, *pats, **opts):
3821 3821 """lists the changesets available in backup bundles
3822 3822
3823 3823 Without any arguments, this command prints a list of the changesets in each
3824 3824 backup bundle.
3825 3825
3826 3826 --recover takes a changeset hash and unbundles the first bundle that
3827 3827 contains that hash, which puts that changeset back in your repository.
3828 3828
3829 3829 --verbose will print the entire commit message and the bundle path for that
3830 3830 backup.
3831 3831 """
3832 3832 backups = list(
3833 3833 filter(
3834 3834 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3835 3835 )
3836 3836 )
3837 3837 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3838 3838
3839 3839 opts = pycompat.byteskwargs(opts)
3840 3840 opts[b"bundle"] = b""
3841 3841 opts[b"force"] = None
3842 3842 limit = logcmdutil.getlimit(opts)
3843 3843
3844 3844 def display(other, chlist, displayer):
3845 3845 if opts.get(b"newest_first"):
3846 3846 chlist.reverse()
3847 3847 count = 0
3848 3848 for n in chlist:
3849 3849 if limit is not None and count >= limit:
3850 3850 break
3851 3851 parents = [
3852 3852 True for p in other.changelog.parents(n) if p != repo.nullid
3853 3853 ]
3854 3854 if opts.get(b"no_merges") and len(parents) == 2:
3855 3855 continue
3856 3856 count += 1
3857 3857 displayer.show(other[n])
3858 3858
3859 3859 recovernode = opts.get(b"recover")
3860 3860 if recovernode:
3861 3861 if scmutil.isrevsymbol(repo, recovernode):
3862 3862 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3863 3863 return
3864 3864 elif backups:
3865 3865 msg = _(
3866 3866 b"Recover changesets using: hg debugbackupbundle --recover "
3867 3867 b"<changeset hash>\n\nAvailable backup changesets:"
3868 3868 )
3869 3869 ui.status(msg, label=b"status.removed")
3870 3870 else:
3871 3871 ui.status(_(b"no backup changesets found\n"))
3872 3872 return
3873 3873
3874 3874 for backup in backups:
3875 3875 # Much of this is copied from the hg incoming logic
3876 3876 source = os.path.relpath(backup, encoding.getcwd())
3877 3877 source, branches = urlutil.get_unique_pull_path(
3878 3878 b'debugbackupbundle',
3879 3879 repo,
3880 3880 ui,
3881 3881 source,
3882 3882 default_branches=opts.get(b'branch'),
3883 3883 )
3884 3884 try:
3885 3885 other = hg.peer(repo, opts, source)
3886 3886 except error.LookupError as ex:
3887 3887 msg = _(b"\nwarning: unable to open bundle %s") % source
3888 3888 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3889 3889 ui.warn(msg, hint=hint)
3890 3890 continue
3891 3891 revs, checkout = hg.addbranchrevs(
3892 3892 repo, other, branches, opts.get(b"rev")
3893 3893 )
3894 3894
3895 3895 if revs:
3896 3896 revs = [other.lookup(rev) for rev in revs]
3897 3897
3898 3898 with ui.silent():
3899 3899 try:
3900 3900 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3901 3901 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3902 3902 )
3903 3903 except error.LookupError:
3904 3904 continue
3905 3905
3906 3906 try:
3907 3907 if not chlist:
3908 3908 continue
3909 3909 if recovernode:
3910 3910 with repo.lock(), repo.transaction(b"unbundle") as tr:
3911 3911 if scmutil.isrevsymbol(other, recovernode):
3912 3912 ui.status(_(b"Unbundling %s\n") % (recovernode))
3913 3913 f = hg.openpath(ui, source)
3914 3914 gen = exchange.readbundle(ui, f, source)
3915 3915 if isinstance(gen, bundle2.unbundle20):
3916 3916 bundle2.applybundle(
3917 3917 repo,
3918 3918 gen,
3919 3919 tr,
3920 3920 source=b"unbundle",
3921 3921 url=b"bundle:" + source,
3922 3922 )
3923 3923 else:
3924 3924 gen.apply(repo, b"unbundle", b"bundle:" + source)
3925 3925 break
3926 3926 else:
3927 3927 backupdate = encoding.strtolocal(
3928 3928 time.strftime(
3929 3929 "%a %H:%M, %Y-%m-%d",
3930 3930 time.localtime(os.path.getmtime(source)),
3931 3931 )
3932 3932 )
3933 3933 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3934 3934 if ui.verbose:
3935 3935 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3936 3936 else:
3937 3937 opts[
3938 3938 b"template"
3939 3939 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3940 3940 displayer = logcmdutil.changesetdisplayer(
3941 3941 ui, other, opts, False
3942 3942 )
3943 3943 display(other, chlist, displayer)
3944 3944 displayer.close()
3945 3945 finally:
3946 3946 cleanupfn()
3947 3947
3948 3948
3949 3949 @command(
3950 3950 b'debugsub',
3951 3951 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3952 3952 _(b'[-r REV] [REV]'),
3953 3953 )
3954 3954 def debugsub(ui, repo, rev=None):
3955 3955 ctx = scmutil.revsingle(repo, rev, None)
3956 3956 for k, v in sorted(ctx.substate.items()):
3957 3957 ui.writenoi18n(b'path %s\n' % k)
3958 3958 ui.writenoi18n(b' source %s\n' % v[0])
3959 3959 ui.writenoi18n(b' revision %s\n' % v[1])
3960 3960
3961 3961
3962 3962 @command(b'debugshell', optionalrepo=True)
3963 3963 def debugshell(ui, repo):
3964 3964 """run an interactive Python interpreter
3965 3965
3966 3966 The local namespace is provided with a reference to the ui and
3967 3967 the repo instance (if available).
3968 3968 """
3969 3969 import code
3970 3970
3971 3971 imported_objects = {
3972 3972 'ui': ui,
3973 3973 'repo': repo,
3974 3974 }
3975 3975
3976 3976 code.interact(local=imported_objects)
3977 3977
3978 3978
3979 3979 @command(
3980 3980 b'debugsuccessorssets',
3981 3981 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3982 3982 _(b'[REV]'),
3983 3983 )
3984 3984 def debugsuccessorssets(ui, repo, *revs, **opts):
3985 3985 """show set of successors for revision
3986 3986
3987 3987 A successors set of changeset A is a consistent group of revisions that
3988 3988 succeed A. It contains non-obsolete changesets only unless closests
3989 3989 successors set is set.
3990 3990
3991 3991 In most cases a changeset A has a single successors set containing a single
3992 3992 successor (changeset A replaced by A').
3993 3993
3994 3994 A changeset that is made obsolete with no successors are called "pruned".
3995 3995 Such changesets have no successors sets at all.
3996 3996
3997 3997 A changeset that has been "split" will have a successors set containing
3998 3998 more than one successor.
3999 3999
4000 4000 A changeset that has been rewritten in multiple different ways is called
4001 4001 "divergent". Such changesets have multiple successor sets (each of which
4002 4002 may also be split, i.e. have multiple successors).
4003 4003
4004 4004 Results are displayed as follows::
4005 4005
4006 4006 <rev1>
4007 4007 <successors-1A>
4008 4008 <rev2>
4009 4009 <successors-2A>
4010 4010 <successors-2B1> <successors-2B2> <successors-2B3>
4011 4011
4012 4012 Here rev2 has two possible (i.e. divergent) successors sets. The first
4013 4013 holds one element, whereas the second holds three (i.e. the changeset has
4014 4014 been split).
4015 4015 """
4016 4016 # passed to successorssets caching computation from one call to another
4017 4017 cache = {}
4018 4018 ctx2str = bytes
4019 4019 node2str = short
4020 4020 for rev in logcmdutil.revrange(repo, revs):
4021 4021 ctx = repo[rev]
4022 4022 ui.write(b'%s\n' % ctx2str(ctx))
4023 4023 for succsset in obsutil.successorssets(
4024 4024 repo, ctx.node(), closest=opts['closest'], cache=cache
4025 4025 ):
4026 4026 if succsset:
4027 4027 ui.write(b' ')
4028 4028 ui.write(node2str(succsset[0]))
4029 4029 for node in succsset[1:]:
4030 4030 ui.write(b' ')
4031 4031 ui.write(node2str(node))
4032 4032 ui.write(b'\n')
4033 4033
4034 4034
4035 4035 @command(b'debugtagscache', [])
4036 4036 def debugtagscache(ui, repo):
4037 4037 """display the contents of .hg/cache/hgtagsfnodes1"""
4038 4038 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4039 4039 flog = repo.file(b'.hgtags')
4040 4040 for r in repo:
4041 4041 node = repo[r].node()
4042 4042 tagsnode = cache.getfnode(node, computemissing=False)
4043 4043 if tagsnode:
4044 4044 tagsnodedisplay = hex(tagsnode)
4045 4045 if not flog.hasnode(tagsnode):
4046 4046 tagsnodedisplay += b' (unknown node)'
4047 4047 elif tagsnode is None:
4048 4048 tagsnodedisplay = b'missing'
4049 4049 else:
4050 4050 tagsnodedisplay = b'invalid'
4051 4051
4052 4052 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4053 4053
4054 4054
4055 4055 @command(
4056 4056 b'debugtemplate',
4057 4057 [
4058 4058 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4059 4059 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4060 4060 ],
4061 4061 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4062 4062 optionalrepo=True,
4063 4063 )
4064 4064 def debugtemplate(ui, repo, tmpl, **opts):
4065 4065 """parse and apply a template
4066 4066
4067 4067 If -r/--rev is given, the template is processed as a log template and
4068 4068 applied to the given changesets. Otherwise, it is processed as a generic
4069 4069 template.
4070 4070
4071 4071 Use --verbose to print the parsed tree.
4072 4072 """
4073 4073 revs = None
4074 4074 if opts['rev']:
4075 4075 if repo is None:
4076 4076 raise error.RepoError(
4077 4077 _(b'there is no Mercurial repository here (.hg not found)')
4078 4078 )
4079 4079 revs = logcmdutil.revrange(repo, opts['rev'])
4080 4080
4081 4081 props = {}
4082 4082 for d in opts['define']:
4083 4083 try:
4084 4084 k, v = (e.strip() for e in d.split(b'=', 1))
4085 4085 if not k or k == b'ui':
4086 4086 raise ValueError
4087 4087 props[k] = v
4088 4088 except ValueError:
4089 4089 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4090 4090
4091 4091 if ui.verbose:
4092 4092 aliases = ui.configitems(b'templatealias')
4093 4093 tree = templater.parse(tmpl)
4094 4094 ui.note(templater.prettyformat(tree), b'\n')
4095 4095 newtree = templater.expandaliases(tree, aliases)
4096 4096 if newtree != tree:
4097 4097 ui.notenoi18n(
4098 4098 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4099 4099 )
4100 4100
4101 4101 if revs is None:
4102 4102 tres = formatter.templateresources(ui, repo)
4103 4103 t = formatter.maketemplater(ui, tmpl, resources=tres)
4104 4104 if ui.verbose:
4105 4105 kwds, funcs = t.symbolsuseddefault()
4106 4106 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4107 4107 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4108 4108 ui.write(t.renderdefault(props))
4109 4109 else:
4110 4110 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4111 4111 if ui.verbose:
4112 4112 kwds, funcs = displayer.t.symbolsuseddefault()
4113 4113 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4114 4114 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4115 4115 for r in revs:
4116 4116 displayer.show(repo[r], **pycompat.strkwargs(props))
4117 4117 displayer.close()
4118 4118
4119 4119
4120 4120 @command(
4121 4121 b'debuguigetpass',
4122 4122 [
4123 4123 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4124 4124 ],
4125 4125 _(b'[-p TEXT]'),
4126 4126 norepo=True,
4127 4127 )
4128 4128 def debuguigetpass(ui, prompt=b''):
4129 4129 """show prompt to type password"""
4130 4130 r = ui.getpass(prompt)
4131 4131 if r is None:
4132 4132 r = b"<default response>"
4133 4133 ui.writenoi18n(b'response: %s\n' % r)
4134 4134
4135 4135
4136 4136 @command(
4137 4137 b'debuguiprompt',
4138 4138 [
4139 4139 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4140 4140 ],
4141 4141 _(b'[-p TEXT]'),
4142 4142 norepo=True,
4143 4143 )
4144 4144 def debuguiprompt(ui, prompt=b''):
4145 4145 """show plain prompt"""
4146 4146 r = ui.prompt(prompt)
4147 4147 ui.writenoi18n(b'response: %s\n' % r)
4148 4148
4149 4149
4150 4150 @command(b'debugupdatecaches', [])
4151 4151 def debugupdatecaches(ui, repo, *pats, **opts):
4152 4152 """warm all known caches in the repository"""
4153 4153 with repo.wlock(), repo.lock():
4154 4154 repo.updatecaches(caches=repository.CACHES_ALL)
4155 4155
4156 4156
4157 4157 @command(
4158 4158 b'debugupgraderepo',
4159 4159 [
4160 4160 (
4161 4161 b'o',
4162 4162 b'optimize',
4163 4163 [],
4164 4164 _(b'extra optimization to perform'),
4165 4165 _(b'NAME'),
4166 4166 ),
4167 4167 (b'', b'run', False, _(b'performs an upgrade')),
4168 4168 (b'', b'backup', True, _(b'keep the old repository content around')),
4169 4169 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4170 4170 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4171 4171 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4172 4172 ],
4173 4173 )
4174 4174 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4175 4175 """upgrade a repository to use different features
4176 4176
4177 4177 If no arguments are specified, the repository is evaluated for upgrade
4178 4178 and a list of problems and potential optimizations is printed.
4179 4179
4180 4180 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4181 4181 can be influenced via additional arguments. More details will be provided
4182 4182 by the command output when run without ``--run``.
4183 4183
4184 4184 During the upgrade, the repository will be locked and no writes will be
4185 4185 allowed.
4186 4186
4187 4187 At the end of the upgrade, the repository may not be readable while new
4188 4188 repository data is swapped in. This window will be as long as it takes to
4189 4189 rename some directories inside the ``.hg`` directory. On most machines, this
4190 4190 should complete almost instantaneously and the chances of a consumer being
4191 4191 unable to access the repository should be low.
4192 4192
4193 4193 By default, all revlogs will be upgraded. You can restrict this using flags
4194 4194 such as `--manifest`:
4195 4195
4196 4196 * `--manifest`: only optimize the manifest
4197 4197 * `--no-manifest`: optimize all revlog but the manifest
4198 4198 * `--changelog`: optimize the changelog only
4199 4199 * `--no-changelog --no-manifest`: optimize filelogs only
4200 4200 * `--filelogs`: optimize the filelogs only
4201 4201 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4202 4202 """
4203 4203 return upgrade.upgraderepo(
4204 4204 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4205 4205 )
4206 4206
4207 4207
4208 4208 @command(
4209 4209 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4210 4210 )
4211 4211 def debugwalk(ui, repo, *pats, **opts):
4212 4212 """show how files match on given patterns"""
4213 4213 opts = pycompat.byteskwargs(opts)
4214 4214 m = scmutil.match(repo[None], pats, opts)
4215 4215 if ui.verbose:
4216 4216 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4217 4217 items = list(repo[None].walk(m))
4218 4218 if not items:
4219 4219 return
4220 4220 f = lambda fn: fn
4221 4221 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4222 4222 f = lambda fn: util.normpath(fn)
4223 4223 fmt = b'f %%-%ds %%-%ds %%s' % (
4224 4224 max([len(abs) for abs in items]),
4225 4225 max([len(repo.pathto(abs)) for abs in items]),
4226 4226 )
4227 4227 for abs in items:
4228 4228 line = fmt % (
4229 4229 abs,
4230 4230 f(repo.pathto(abs)),
4231 4231 m.exact(abs) and b'exact' or b'',
4232 4232 )
4233 4233 ui.write(b"%s\n" % line.rstrip())
4234 4234
4235 4235
4236 4236 @command(b'debugwhyunstable', [], _(b'REV'))
4237 4237 def debugwhyunstable(ui, repo, rev):
4238 4238 """explain instabilities of a changeset"""
4239 4239 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4240 4240 dnodes = b''
4241 4241 if entry.get(b'divergentnodes'):
4242 4242 dnodes = (
4243 4243 b' '.join(
4244 4244 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4245 4245 for ctx in entry[b'divergentnodes']
4246 4246 )
4247 4247 + b' '
4248 4248 )
4249 4249 ui.write(
4250 4250 b'%s: %s%s %s\n'
4251 4251 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4252 4252 )
4253 4253
4254 4254
4255 4255 @command(
4256 4256 b'debugwireargs',
4257 4257 [
4258 4258 (b'', b'three', b'', b'three'),
4259 4259 (b'', b'four', b'', b'four'),
4260 4260 (b'', b'five', b'', b'five'),
4261 4261 ]
4262 4262 + cmdutil.remoteopts,
4263 4263 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4264 4264 norepo=True,
4265 4265 )
4266 4266 def debugwireargs(ui, repopath, *vals, **opts):
4267 4267 opts = pycompat.byteskwargs(opts)
4268 4268 repo = hg.peer(ui, opts, repopath)
4269 4269 try:
4270 4270 for opt in cmdutil.remoteopts:
4271 4271 del opts[opt[1]]
4272 4272 args = {}
4273 4273 for k, v in opts.items():
4274 4274 if v:
4275 4275 args[k] = v
4276 4276 args = pycompat.strkwargs(args)
4277 4277 # run twice to check that we don't mess up the stream for the next command
4278 4278 res1 = repo.debugwireargs(*vals, **args)
4279 4279 res2 = repo.debugwireargs(*vals, **args)
4280 4280 ui.write(b"%s\n" % res1)
4281 4281 if res1 != res2:
4282 4282 ui.warn(b"%s\n" % res2)
4283 4283 finally:
4284 4284 repo.close()
4285 4285
4286 4286
4287 4287 def _parsewirelangblocks(fh):
4288 4288 activeaction = None
4289 4289 blocklines = []
4290 4290 lastindent = 0
4291 4291
4292 4292 for line in fh:
4293 4293 line = line.rstrip()
4294 4294 if not line:
4295 4295 continue
4296 4296
4297 4297 if line.startswith(b'#'):
4298 4298 continue
4299 4299
4300 4300 if not line.startswith(b' '):
4301 4301 # New block. Flush previous one.
4302 4302 if activeaction:
4303 4303 yield activeaction, blocklines
4304 4304
4305 4305 activeaction = line
4306 4306 blocklines = []
4307 4307 lastindent = 0
4308 4308 continue
4309 4309
4310 4310 # Else we start with an indent.
4311 4311
4312 4312 if not activeaction:
4313 4313 raise error.Abort(_(b'indented line outside of block'))
4314 4314
4315 4315 indent = len(line) - len(line.lstrip())
4316 4316
4317 4317 # If this line is indented more than the last line, concatenate it.
4318 4318 if indent > lastindent and blocklines:
4319 4319 blocklines[-1] += line.lstrip()
4320 4320 else:
4321 4321 blocklines.append(line)
4322 4322 lastindent = indent
4323 4323
4324 4324 # Flush last block.
4325 4325 if activeaction:
4326 4326 yield activeaction, blocklines
4327 4327
4328 4328
4329 4329 @command(
4330 4330 b'debugwireproto',
4331 4331 [
4332 4332 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4333 4333 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4334 4334 (
4335 4335 b'',
4336 4336 b'noreadstderr',
4337 4337 False,
4338 4338 _(b'do not read from stderr of the remote'),
4339 4339 ),
4340 4340 (
4341 4341 b'',
4342 4342 b'nologhandshake',
4343 4343 False,
4344 4344 _(b'do not log I/O related to the peer handshake'),
4345 4345 ),
4346 4346 ]
4347 4347 + cmdutil.remoteopts,
4348 4348 _(b'[PATH]'),
4349 4349 optionalrepo=True,
4350 4350 )
4351 4351 def debugwireproto(ui, repo, path=None, **opts):
4352 4352 """send wire protocol commands to a server
4353 4353
4354 4354 This command can be used to issue wire protocol commands to remote
4355 4355 peers and to debug the raw data being exchanged.
4356 4356
4357 4357 ``--localssh`` will start an SSH server against the current repository
4358 4358 and connect to that. By default, the connection will perform a handshake
4359 4359 and establish an appropriate peer instance.
4360 4360
4361 4361 ``--peer`` can be used to bypass the handshake protocol and construct a
4362 4362 peer instance using the specified class type. Valid values are ``raw``,
4363 4363 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4364 4364 don't support higher-level command actions.
4365 4365
4366 4366 ``--noreadstderr`` can be used to disable automatic reading from stderr
4367 4367 of the peer (for SSH connections only). Disabling automatic reading of
4368 4368 stderr is useful for making output more deterministic.
4369 4369
4370 4370 Commands are issued via a mini language which is specified via stdin.
4371 4371 The language consists of individual actions to perform. An action is
4372 4372 defined by a block. A block is defined as a line with no leading
4373 4373 space followed by 0 or more lines with leading space. Blocks are
4374 4374 effectively a high-level command with additional metadata.
4375 4375
4376 4376 Lines beginning with ``#`` are ignored.
4377 4377
4378 4378 The following sections denote available actions.
4379 4379
4380 4380 raw
4381 4381 ---
4382 4382
4383 4383 Send raw data to the server.
4384 4384
4385 4385 The block payload contains the raw data to send as one atomic send
4386 4386 operation. The data may not actually be delivered in a single system
4387 4387 call: it depends on the abilities of the transport being used.
4388 4388
4389 4389 Each line in the block is de-indented and concatenated. Then, that
4390 4390 value is evaluated as a Python b'' literal. This allows the use of
4391 4391 backslash escaping, etc.
4392 4392
4393 4393 raw+
4394 4394 ----
4395 4395
4396 4396 Behaves like ``raw`` except flushes output afterwards.
4397 4397
4398 4398 command <X>
4399 4399 -----------
4400 4400
4401 4401 Send a request to run a named command, whose name follows the ``command``
4402 4402 string.
4403 4403
4404 4404 Arguments to the command are defined as lines in this block. The format of
4405 4405 each line is ``<key> <value>``. e.g.::
4406 4406
4407 4407 command listkeys
4408 4408 namespace bookmarks
4409 4409
4410 4410 If the value begins with ``eval:``, it will be interpreted as a Python
4411 4411 literal expression. Otherwise values are interpreted as Python b'' literals.
4412 4412 This allows sending complex types and encoding special byte sequences via
4413 4413 backslash escaping.
4414 4414
4415 4415 The following arguments have special meaning:
4416 4416
4417 4417 ``PUSHFILE``
4418 4418 When defined, the *push* mechanism of the peer will be used instead
4419 4419 of the static request-response mechanism and the content of the
4420 4420 file specified in the value of this argument will be sent as the
4421 4421 command payload.
4422 4422
4423 4423 This can be used to submit a local bundle file to the remote.
4424 4424
4425 4425 batchbegin
4426 4426 ----------
4427 4427
4428 4428 Instruct the peer to begin a batched send.
4429 4429
4430 4430 All ``command`` blocks are queued for execution until the next
4431 4431 ``batchsubmit`` block.
4432 4432
4433 4433 batchsubmit
4434 4434 -----------
4435 4435
4436 4436 Submit previously queued ``command`` blocks as a batch request.
4437 4437
4438 4438 This action MUST be paired with a ``batchbegin`` action.
4439 4439
4440 4440 httprequest <method> <path>
4441 4441 ---------------------------
4442 4442
4443 4443 (HTTP peer only)
4444 4444
4445 4445 Send an HTTP request to the peer.
4446 4446
4447 4447 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4448 4448
4449 4449 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4450 4450 headers to add to the request. e.g. ``Accept: foo``.
4451 4451
4452 4452 The following arguments are special:
4453 4453
4454 4454 ``BODYFILE``
4455 4455 The content of the file defined as the value to this argument will be
4456 4456 transferred verbatim as the HTTP request body.
4457 4457
4458 4458 ``frame <type> <flags> <payload>``
4459 4459 Send a unified protocol frame as part of the request body.
4460 4460
4461 4461 All frames will be collected and sent as the body to the HTTP
4462 4462 request.
4463 4463
4464 4464 close
4465 4465 -----
4466 4466
4467 4467 Close the connection to the server.
4468 4468
4469 4469 flush
4470 4470 -----
4471 4471
4472 4472 Flush data written to the server.
4473 4473
4474 4474 readavailable
4475 4475 -------------
4476 4476
4477 4477 Close the write end of the connection and read all available data from
4478 4478 the server.
4479 4479
4480 4480 If the connection to the server encompasses multiple pipes, we poll both
4481 4481 pipes and read available data.
4482 4482
4483 4483 readline
4484 4484 --------
4485 4485
4486 4486 Read a line of output from the server. If there are multiple output
4487 4487 pipes, reads only the main pipe.
4488 4488
4489 4489 ereadline
4490 4490 ---------
4491 4491
4492 4492 Like ``readline``, but read from the stderr pipe, if available.
4493 4493
4494 4494 read <X>
4495 4495 --------
4496 4496
4497 4497 ``read()`` N bytes from the server's main output pipe.
4498 4498
4499 4499 eread <X>
4500 4500 ---------
4501 4501
4502 4502 ``read()`` N bytes from the server's stderr pipe, if available.
4503 4503
4504 4504 Specifying Unified Frame-Based Protocol Frames
4505 4505 ----------------------------------------------
4506 4506
4507 4507 It is possible to emit a *Unified Frame-Based Protocol* by using special
4508 4508 syntax.
4509 4509
4510 4510 A frame is composed as a type, flags, and payload. These can be parsed
4511 4511 from a string of the form:
4512 4512
4513 4513 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4514 4514
4515 4515 ``request-id`` and ``stream-id`` are integers defining the request and
4516 4516 stream identifiers.
4517 4517
4518 4518 ``type`` can be an integer value for the frame type or the string name
4519 4519 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4520 4520 ``command-name``.
4521 4521
4522 4522 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4523 4523 components. Each component (and there can be just one) can be an integer
4524 4524 or a flag name for stream flags or frame flags, respectively. Values are
4525 4525 resolved to integers and then bitwise OR'd together.
4526 4526
4527 4527 ``payload`` represents the raw frame payload. If it begins with
4528 4528 ``cbor:``, the following string is evaluated as Python code and the
4529 4529 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4530 4530 as a Python byte string literal.
4531 4531 """
4532 4532 opts = pycompat.byteskwargs(opts)
4533 4533
4534 4534 if opts[b'localssh'] and not repo:
4535 4535 raise error.Abort(_(b'--localssh requires a repository'))
4536 4536
4537 4537 if opts[b'peer'] and opts[b'peer'] not in (
4538 4538 b'raw',
4539 4539 b'ssh1',
4540 4540 ):
4541 4541 raise error.Abort(
4542 4542 _(b'invalid value for --peer'),
4543 4543 hint=_(b'valid values are "raw" and "ssh1"'),
4544 4544 )
4545 4545
4546 4546 if path and opts[b'localssh']:
4547 4547 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4548 4548
4549 4549 if ui.interactive():
4550 4550 ui.write(_(b'(waiting for commands on stdin)\n'))
4551 4551
4552 4552 blocks = list(_parsewirelangblocks(ui.fin))
4553 4553
4554 4554 proc = None
4555 4555 stdin = None
4556 4556 stdout = None
4557 4557 stderr = None
4558 4558 opener = None
4559 4559
4560 4560 if opts[b'localssh']:
4561 4561 # We start the SSH server in its own process so there is process
4562 4562 # separation. This prevents a whole class of potential bugs around
4563 4563 # shared state from interfering with server operation.
4564 4564 args = procutil.hgcmd() + [
4565 4565 b'-R',
4566 4566 repo.root,
4567 4567 b'debugserve',
4568 4568 b'--sshstdio',
4569 4569 ]
4570 4570 proc = subprocess.Popen(
4571 4571 pycompat.rapply(procutil.tonativestr, args),
4572 4572 stdin=subprocess.PIPE,
4573 4573 stdout=subprocess.PIPE,
4574 4574 stderr=subprocess.PIPE,
4575 4575 bufsize=0,
4576 4576 )
4577 4577
4578 4578 stdin = proc.stdin
4579 4579 stdout = proc.stdout
4580 4580 stderr = proc.stderr
4581 4581
4582 4582 # We turn the pipes into observers so we can log I/O.
4583 4583 if ui.verbose or opts[b'peer'] == b'raw':
4584 4584 stdin = util.makeloggingfileobject(
4585 4585 ui, proc.stdin, b'i', logdata=True
4586 4586 )
4587 4587 stdout = util.makeloggingfileobject(
4588 4588 ui, proc.stdout, b'o', logdata=True
4589 4589 )
4590 4590 stderr = util.makeloggingfileobject(
4591 4591 ui, proc.stderr, b'e', logdata=True
4592 4592 )
4593 4593
4594 4594 # --localssh also implies the peer connection settings.
4595 4595
4596 4596 url = b'ssh://localserver'
4597 4597 autoreadstderr = not opts[b'noreadstderr']
4598 4598
4599 4599 if opts[b'peer'] == b'ssh1':
4600 4600 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4601 4601 peer = sshpeer.sshv1peer(
4602 4602 ui,
4603 4603 url,
4604 4604 proc,
4605 4605 stdin,
4606 4606 stdout,
4607 4607 stderr,
4608 4608 None,
4609 4609 autoreadstderr=autoreadstderr,
4610 4610 )
4611 4611 elif opts[b'peer'] == b'raw':
4612 4612 ui.write(_(b'using raw connection to peer\n'))
4613 4613 peer = None
4614 4614 else:
4615 4615 ui.write(_(b'creating ssh peer from handshake results\n'))
4616 4616 peer = sshpeer.makepeer(
4617 4617 ui,
4618 4618 url,
4619 4619 proc,
4620 4620 stdin,
4621 4621 stdout,
4622 4622 stderr,
4623 4623 autoreadstderr=autoreadstderr,
4624 4624 )
4625 4625
4626 4626 elif path:
4627 4627 # We bypass hg.peer() so we can proxy the sockets.
4628 4628 # TODO consider not doing this because we skip
4629 4629 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4630 4630 u = urlutil.url(path)
4631 4631 if u.scheme != b'http':
4632 4632 raise error.Abort(_(b'only http:// paths are currently supported'))
4633 4633
4634 4634 url, authinfo = u.authinfo()
4635 4635 openerargs = {
4636 4636 'useragent': b'Mercurial debugwireproto',
4637 4637 }
4638 4638
4639 4639 # Turn pipes/sockets into observers so we can log I/O.
4640 4640 if ui.verbose:
4641 4641 openerargs.update(
4642 4642 {
4643 4643 'loggingfh': ui,
4644 4644 'loggingname': b's',
4645 4645 'loggingopts': {
4646 4646 'logdata': True,
4647 4647 'logdataapis': False,
4648 4648 },
4649 4649 }
4650 4650 )
4651 4651
4652 4652 if ui.debugflag:
4653 4653 openerargs['loggingopts']['logdataapis'] = True
4654 4654
4655 4655 # Don't send default headers when in raw mode. This allows us to
4656 4656 # bypass most of the behavior of our URL handling code so we can
4657 4657 # have near complete control over what's sent on the wire.
4658 4658 if opts[b'peer'] == b'raw':
4659 4659 openerargs['sendaccept'] = False
4660 4660
4661 4661 opener = urlmod.opener(ui, authinfo, **openerargs)
4662 4662
4663 4663 if opts[b'peer'] == b'raw':
4664 4664 ui.write(_(b'using raw connection to peer\n'))
4665 4665 peer = None
4666 4666 elif opts[b'peer']:
4667 4667 raise error.Abort(
4668 4668 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4669 4669 )
4670 4670 else:
4671 4671 peer = httppeer.makepeer(ui, path, opener=opener)
4672 4672
4673 4673 # We /could/ populate stdin/stdout with sock.makefile()...
4674 4674 else:
4675 4675 raise error.Abort(_(b'unsupported connection configuration'))
4676 4676
4677 4677 batchedcommands = None
4678 4678
4679 4679 # Now perform actions based on the parsed wire language instructions.
4680 4680 for action, lines in blocks:
4681 4681 if action in (b'raw', b'raw+'):
4682 4682 if not stdin:
4683 4683 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4684 4684
4685 4685 # Concatenate the data together.
4686 4686 data = b''.join(l.lstrip() for l in lines)
4687 4687 data = stringutil.unescapestr(data)
4688 4688 stdin.write(data)
4689 4689
4690 4690 if action == b'raw+':
4691 4691 stdin.flush()
4692 4692 elif action == b'flush':
4693 4693 if not stdin:
4694 4694 raise error.Abort(_(b'cannot call flush on this peer'))
4695 4695 stdin.flush()
4696 4696 elif action.startswith(b'command'):
4697 4697 if not peer:
4698 4698 raise error.Abort(
4699 4699 _(
4700 4700 b'cannot send commands unless peer instance '
4701 4701 b'is available'
4702 4702 )
4703 4703 )
4704 4704
4705 4705 command = action.split(b' ', 1)[1]
4706 4706
4707 4707 args = {}
4708 4708 for line in lines:
4709 4709 # We need to allow empty values.
4710 4710 fields = line.lstrip().split(b' ', 1)
4711 4711 if len(fields) == 1:
4712 4712 key = fields[0]
4713 4713 value = b''
4714 4714 else:
4715 4715 key, value = fields
4716 4716
4717 4717 if value.startswith(b'eval:'):
4718 4718 value = stringutil.evalpythonliteral(value[5:])
4719 4719 else:
4720 4720 value = stringutil.unescapestr(value)
4721 4721
4722 4722 args[key] = value
4723 4723
4724 4724 if batchedcommands is not None:
4725 4725 batchedcommands.append((command, args))
4726 4726 continue
4727 4727
4728 4728 ui.status(_(b'sending %s command\n') % command)
4729 4729
4730 4730 if b'PUSHFILE' in args:
4731 4731 with open(args[b'PUSHFILE'], 'rb') as fh:
4732 4732 del args[b'PUSHFILE']
4733 4733 res, output = peer._callpush(
4734 4734 command, fh, **pycompat.strkwargs(args)
4735 4735 )
4736 4736 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4737 4737 ui.status(
4738 4738 _(b'remote output: %s\n') % stringutil.escapestr(output)
4739 4739 )
4740 4740 else:
4741 4741 with peer.commandexecutor() as e:
4742 4742 res = e.callcommand(command, args).result()
4743 4743
4744 4744 ui.status(
4745 4745 _(b'response: %s\n')
4746 4746 % stringutil.pprint(res, bprefix=True, indent=2)
4747 4747 )
4748 4748
4749 4749 elif action == b'batchbegin':
4750 4750 if batchedcommands is not None:
4751 4751 raise error.Abort(_(b'nested batchbegin not allowed'))
4752 4752
4753 4753 batchedcommands = []
4754 4754 elif action == b'batchsubmit':
4755 4755 # There is a batching API we could go through. But it would be
4756 4756 # difficult to normalize requests into function calls. It is easier
4757 4757 # to bypass this layer and normalize to commands + args.
4758 4758 ui.status(
4759 4759 _(b'sending batch with %d sub-commands\n')
4760 4760 % len(batchedcommands)
4761 4761 )
4762 4762 assert peer is not None
4763 4763 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4764 4764 ui.status(
4765 4765 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4766 4766 )
4767 4767
4768 4768 batchedcommands = None
4769 4769
4770 4770 elif action.startswith(b'httprequest '):
4771 4771 if not opener:
4772 4772 raise error.Abort(
4773 4773 _(b'cannot use httprequest without an HTTP peer')
4774 4774 )
4775 4775
4776 4776 request = action.split(b' ', 2)
4777 4777 if len(request) != 3:
4778 4778 raise error.Abort(
4779 4779 _(
4780 4780 b'invalid httprequest: expected format is '
4781 4781 b'"httprequest <method> <path>'
4782 4782 )
4783 4783 )
4784 4784
4785 4785 method, httppath = request[1:]
4786 4786 headers = {}
4787 4787 body = None
4788 4788 frames = []
4789 4789 for line in lines:
4790 4790 line = line.lstrip()
4791 4791 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4792 4792 if m:
4793 4793 # Headers need to use native strings.
4794 4794 key = pycompat.strurl(m.group(1))
4795 4795 value = pycompat.strurl(m.group(2))
4796 4796 headers[key] = value
4797 4797 continue
4798 4798
4799 4799 if line.startswith(b'BODYFILE '):
4800 4800 with open(line.split(b' ', 1), b'rb') as fh:
4801 4801 body = fh.read()
4802 4802 elif line.startswith(b'frame '):
4803 4803 frame = wireprotoframing.makeframefromhumanstring(
4804 4804 line[len(b'frame ') :]
4805 4805 )
4806 4806
4807 4807 frames.append(frame)
4808 4808 else:
4809 4809 raise error.Abort(
4810 4810 _(b'unknown argument to httprequest: %s') % line
4811 4811 )
4812 4812
4813 4813 url = path + httppath
4814 4814
4815 4815 if frames:
4816 4816 body = b''.join(bytes(f) for f in frames)
4817 4817
4818 4818 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4819 4819
4820 4820 # urllib.Request insists on using has_data() as a proxy for
4821 4821 # determining the request method. Override that to use our
4822 4822 # explicitly requested method.
4823 4823 req.get_method = lambda: pycompat.sysstr(method)
4824 4824
4825 4825 try:
4826 4826 res = opener.open(req)
4827 4827 body = res.read()
4828 4828 except util.urlerr.urlerror as e:
4829 4829 # read() method must be called, but only exists in Python 2
4830 4830 getattr(e, 'read', lambda: None)()
4831 4831 continue
4832 4832
4833 4833 ct = res.headers.get('Content-Type')
4834 4834 if ct == 'application/mercurial-cbor':
4835 4835 ui.write(
4836 4836 _(b'cbor> %s\n')
4837 4837 % stringutil.pprint(
4838 4838 cborutil.decodeall(body), bprefix=True, indent=2
4839 4839 )
4840 4840 )
4841 4841
4842 4842 elif action == b'close':
4843 4843 assert peer is not None
4844 4844 peer.close()
4845 4845 elif action == b'readavailable':
4846 4846 if not stdout or not stderr:
4847 4847 raise error.Abort(
4848 4848 _(b'readavailable not available on this peer')
4849 4849 )
4850 4850
4851 4851 stdin.close()
4852 4852 stdout.read()
4853 4853 stderr.read()
4854 4854
4855 4855 elif action == b'readline':
4856 4856 if not stdout:
4857 4857 raise error.Abort(_(b'readline not available on this peer'))
4858 4858 stdout.readline()
4859 4859 elif action == b'ereadline':
4860 4860 if not stderr:
4861 4861 raise error.Abort(_(b'ereadline not available on this peer'))
4862 4862 stderr.readline()
4863 4863 elif action.startswith(b'read '):
4864 4864 count = int(action.split(b' ', 1)[1])
4865 4865 if not stdout:
4866 4866 raise error.Abort(_(b'read not available on this peer'))
4867 4867 stdout.read(count)
4868 4868 elif action.startswith(b'eread '):
4869 4869 count = int(action.split(b' ', 1)[1])
4870 4870 if not stderr:
4871 4871 raise error.Abort(_(b'eread not available on this peer'))
4872 4872 stderr.read(count)
4873 4873 else:
4874 4874 raise error.Abort(_(b'unknown action: %s') % action)
4875 4875
4876 4876 if batchedcommands is not None:
4877 4877 raise error.Abort(_(b'unclosed "batchbegin" request'))
4878 4878
4879 4879 if peer:
4880 4880 peer.close()
4881 4881
4882 4882 if proc:
4883 4883 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now