##// END OF EJS Templates
debuglock: ignore ENOENT error when unlocking...
marmoute -
r49924:020378f3 stable
parent child Browse files
Show More
@@ -1,4884 +1,4884 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import string
26 26 import subprocess
27 27 import sys
28 28 import time
29 29
30 30 from .i18n import _
31 31 from .node import (
32 32 bin,
33 33 hex,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 106 deltas as deltautil,
107 107 nodemap,
108 108 rewrite,
109 109 sidedata,
110 110 )
111 111
112 112 release = lockmod.release
113 113
114 114 table = {}
115 115 table.update(strip.command._table)
116 116 command = registrar.command(table)
117 117
118 118
119 119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 120 def debugancestor(ui, repo, *args):
121 121 """find the ancestor revision of two revisions in a given index"""
122 122 if len(args) == 3:
123 123 index, rev1, rev2 = args
124 124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 125 lookup = r.lookup
126 126 elif len(args) == 2:
127 127 if not repo:
128 128 raise error.Abort(
129 129 _(b'there is no Mercurial repository here (.hg not found)')
130 130 )
131 131 rev1, rev2 = args
132 132 r = repo.changelog
133 133 lookup = repo.lookup
134 134 else:
135 135 raise error.Abort(_(b'either two or three arguments required'))
136 136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 138
139 139
140 140 @command(b'debugantivirusrunning', [])
141 141 def debugantivirusrunning(ui, repo):
142 142 """attempt to trigger an antivirus scanner to see if one is active"""
143 143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 144 f.write(
145 145 util.b85decode(
146 146 # This is a base85-armored version of the EICAR test file. See
147 147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 150 )
151 151 )
152 152 # Give an AV engine time to scan the file.
153 153 time.sleep(2)
154 154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 155
156 156
157 157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 158 def debugapplystreamclonebundle(ui, repo, fname):
159 159 """apply a stream clone bundle file"""
160 160 f = hg.openpath(ui, fname)
161 161 gen = exchange.readbundle(ui, f, fname)
162 162 gen.apply(repo)
163 163
164 164
165 165 @command(
166 166 b'debugbuilddag',
167 167 [
168 168 (
169 169 b'm',
170 170 b'mergeable-file',
171 171 None,
172 172 _(b'add single file mergeable changes'),
173 173 ),
174 174 (
175 175 b'o',
176 176 b'overwritten-file',
177 177 None,
178 178 _(b'add single file all revs overwrite'),
179 179 ),
180 180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 181 (
182 182 b'',
183 183 b'from-existing',
184 184 None,
185 185 _(b'continue from a non-empty repository'),
186 186 ),
187 187 ],
188 188 _(b'[OPTION]... [TEXT]'),
189 189 )
190 190 def debugbuilddag(
191 191 ui,
192 192 repo,
193 193 text=None,
194 194 mergeable_file=False,
195 195 overwritten_file=False,
196 196 new_file=False,
197 197 from_existing=False,
198 198 ):
199 199 """builds a repo with a given DAG from scratch in the current empty repo
200 200
201 201 The description of the DAG is read from stdin if not given on the
202 202 command line.
203 203
204 204 Elements:
205 205
206 206 - "+n" is a linear run of n nodes based on the current default parent
207 207 - "." is a single node based on the current default parent
208 208 - "$" resets the default parent to null (implied at the start);
209 209 otherwise the default parent is always the last node created
210 210 - "<p" sets the default parent to the backref p
211 211 - "*p" is a fork at parent p, which is a backref
212 212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 213 - "/p2" is a merge of the preceding node and p2
214 214 - ":tag" defines a local tag for the preceding node
215 215 - "@branch" sets the named branch for subsequent nodes
216 216 - "#...\\n" is a comment up to the end of the line
217 217
218 218 Whitespace between the above elements is ignored.
219 219
220 220 A backref is either
221 221
222 222 - a number n, which references the node curr-n, where curr is the current
223 223 node, or
224 224 - the name of a local tag you placed earlier using ":tag", or
225 225 - empty to denote the default parent.
226 226
227 227 All string valued-elements are either strictly alphanumeric, or must
228 228 be enclosed in double quotes ("..."), with "\\" as escape character.
229 229 """
230 230
231 231 if text is None:
232 232 ui.status(_(b"reading DAG from stdin\n"))
233 233 text = ui.fin.read()
234 234
235 235 cl = repo.changelog
236 236 if len(cl) > 0 and not from_existing:
237 237 raise error.Abort(_(b'repository is not empty'))
238 238
239 239 # determine number of revs in DAG
240 240 total = 0
241 241 for type, data in dagparser.parsedag(text):
242 242 if type == b'n':
243 243 total += 1
244 244
245 245 if mergeable_file:
246 246 linesperrev = 2
247 247 # make a file with k lines per rev
248 248 initialmergedlines = [
249 249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 250 ]
251 251 initialmergedlines.append(b"")
252 252
253 253 tags = []
254 254 progress = ui.makeprogress(
255 255 _(b'building'), unit=_(b'revisions'), total=total
256 256 )
257 257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 258 at = -1
259 259 atbranch = b'default'
260 260 nodeids = []
261 261 id = 0
262 262 progress.update(id)
263 263 for type, data in dagparser.parsedag(text):
264 264 if type == b'n':
265 265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 266 id, ps = data
267 267
268 268 files = []
269 269 filecontent = {}
270 270
271 271 p2 = None
272 272 if mergeable_file:
273 273 fn = b"mf"
274 274 p1 = repo[ps[0]]
275 275 if len(ps) > 1:
276 276 p2 = repo[ps[1]]
277 277 pa = p1.ancestor(p2)
278 278 base, local, other = [
279 279 x[fn].data() for x in (pa, p1, p2)
280 280 ]
281 281 m3 = simplemerge.Merge3Text(base, local, other)
282 282 ml = [
283 283 l.strip()
284 284 for l in simplemerge.render_minimized(m3)[0]
285 285 ]
286 286 ml.append(b"")
287 287 elif at > 0:
288 288 ml = p1[fn].data().split(b"\n")
289 289 else:
290 290 ml = initialmergedlines
291 291 ml[id * linesperrev] += b" r%i" % id
292 292 mergedtext = b"\n".join(ml)
293 293 files.append(fn)
294 294 filecontent[fn] = mergedtext
295 295
296 296 if overwritten_file:
297 297 fn = b"of"
298 298 files.append(fn)
299 299 filecontent[fn] = b"r%i\n" % id
300 300
301 301 if new_file:
302 302 fn = b"nf%i" % id
303 303 files.append(fn)
304 304 filecontent[fn] = b"r%i\n" % id
305 305 if len(ps) > 1:
306 306 if not p2:
307 307 p2 = repo[ps[1]]
308 308 for fn in p2:
309 309 if fn.startswith(b"nf"):
310 310 files.append(fn)
311 311 filecontent[fn] = p2[fn].data()
312 312
313 313 def fctxfn(repo, cx, path):
314 314 if path in filecontent:
315 315 return context.memfilectx(
316 316 repo, cx, path, filecontent[path]
317 317 )
318 318 return None
319 319
320 320 if len(ps) == 0 or ps[0] < 0:
321 321 pars = [None, None]
322 322 elif len(ps) == 1:
323 323 pars = [nodeids[ps[0]], None]
324 324 else:
325 325 pars = [nodeids[p] for p in ps]
326 326 cx = context.memctx(
327 327 repo,
328 328 pars,
329 329 b"r%i" % id,
330 330 files,
331 331 fctxfn,
332 332 date=(id, 0),
333 333 user=b"debugbuilddag",
334 334 extra={b'branch': atbranch},
335 335 )
336 336 nodeid = repo.commitctx(cx)
337 337 nodeids.append(nodeid)
338 338 at = id
339 339 elif type == b'l':
340 340 id, name = data
341 341 ui.note((b'tag %s\n' % name))
342 342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 343 elif type == b'a':
344 344 ui.note((b'branch %s\n' % data))
345 345 atbranch = data
346 346 progress.update(id)
347 347
348 348 if tags:
349 349 repo.vfs.write(b"localtags", b"".join(tags))
350 350
351 351
352 352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 353 indent_string = b' ' * indent
354 354 if all:
355 355 ui.writenoi18n(
356 356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 357 % indent_string
358 358 )
359 359
360 360 def showchunks(named):
361 361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 362 for deltadata in gen.deltaiter():
363 363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 364 ui.write(
365 365 b"%s%s %s %s %s %s %d\n"
366 366 % (
367 367 indent_string,
368 368 hex(node),
369 369 hex(p1),
370 370 hex(p2),
371 371 hex(cs),
372 372 hex(deltabase),
373 373 len(delta),
374 374 )
375 375 )
376 376
377 377 gen.changelogheader()
378 378 showchunks(b"changelog")
379 379 gen.manifestheader()
380 380 showchunks(b"manifest")
381 381 for chunkdata in iter(gen.filelogheader, {}):
382 382 fname = chunkdata[b'filename']
383 383 showchunks(fname)
384 384 else:
385 385 if isinstance(gen, bundle2.unbundle20):
386 386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 387 gen.changelogheader()
388 388 for deltadata in gen.deltaiter():
389 389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 391
392 392
393 393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 394 """display version and markers contained in 'data'"""
395 395 opts = pycompat.byteskwargs(opts)
396 396 data = part.read()
397 397 indent_string = b' ' * indent
398 398 try:
399 399 version, markers = obsolete._readmarkers(data)
400 400 except error.UnknownVersion as exc:
401 401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 402 msg %= indent_string, exc.version, len(data)
403 403 ui.write(msg)
404 404 else:
405 405 msg = b"%sversion: %d (%d bytes)\n"
406 406 msg %= indent_string, version, len(data)
407 407 ui.write(msg)
408 408 fm = ui.formatter(b'debugobsolete', opts)
409 409 for rawmarker in sorted(markers):
410 410 m = obsutil.marker(None, rawmarker)
411 411 fm.startitem()
412 412 fm.plain(indent_string)
413 413 cmdutil.showmarker(fm, m)
414 414 fm.end()
415 415
416 416
417 417 def _debugphaseheads(ui, data, indent=0):
418 418 """display version and markers contained in 'data'"""
419 419 indent_string = b' ' * indent
420 420 headsbyphase = phases.binarydecode(data)
421 421 for phase in phases.allphases:
422 422 for head in headsbyphase[phase]:
423 423 ui.write(indent_string)
424 424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 425
426 426
427 427 def _quasirepr(thing):
428 428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 429 return b'{%s}' % (
430 430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 431 )
432 432 return pycompat.bytestr(repr(thing))
433 433
434 434
435 435 def _debugbundle2(ui, gen, all=None, **opts):
436 436 """lists the contents of a bundle2"""
437 437 if not isinstance(gen, bundle2.unbundle20):
438 438 raise error.Abort(_(b'not a bundle2 file'))
439 439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 440 parttypes = opts.get('part_type', [])
441 441 for part in gen.iterparts():
442 442 if parttypes and part.type not in parttypes:
443 443 continue
444 444 msg = b'%s -- %s (mandatory: %r)\n'
445 445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 446 if part.type == b'changegroup':
447 447 version = part.params.get(b'version', b'01')
448 448 cg = changegroup.getunbundler(version, part, b'UN')
449 449 if not ui.quiet:
450 450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 451 if part.type == b'obsmarkers':
452 452 if not ui.quiet:
453 453 _debugobsmarkers(ui, part, indent=4, **opts)
454 454 if part.type == b'phase-heads':
455 455 if not ui.quiet:
456 456 _debugphaseheads(ui, part, indent=4)
457 457
458 458
459 459 @command(
460 460 b'debugbundle',
461 461 [
462 462 (b'a', b'all', None, _(b'show all details')),
463 463 (b'', b'part-type', [], _(b'show only the named part type')),
464 464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 465 ],
466 466 _(b'FILE'),
467 467 norepo=True,
468 468 )
469 469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 470 """lists the contents of a bundle"""
471 471 with hg.openpath(ui, bundlepath) as f:
472 472 if spec:
473 473 spec = exchange.getbundlespec(ui, f)
474 474 ui.write(b'%s\n' % spec)
475 475 return
476 476
477 477 gen = exchange.readbundle(ui, f, bundlepath)
478 478 if isinstance(gen, bundle2.unbundle20):
479 479 return _debugbundle2(ui, gen, all=all, **opts)
480 480 _debugchangegroup(ui, gen, all=all, **opts)
481 481
482 482
483 483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 484 def debugcapabilities(ui, path, **opts):
485 485 """lists the capabilities of a remote peer"""
486 486 opts = pycompat.byteskwargs(opts)
487 487 peer = hg.peer(ui, opts, path)
488 488 try:
489 489 caps = peer.capabilities()
490 490 ui.writenoi18n(b'Main capabilities:\n')
491 491 for c in sorted(caps):
492 492 ui.write(b' %s\n' % c)
493 493 b2caps = bundle2.bundle2caps(peer)
494 494 if b2caps:
495 495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 496 for key, values in sorted(pycompat.iteritems(b2caps)):
497 497 ui.write(b' %s\n' % key)
498 498 for v in values:
499 499 ui.write(b' %s\n' % v)
500 500 finally:
501 501 peer.close()
502 502
503 503
504 504 @command(
505 505 b'debugchangedfiles',
506 506 [
507 507 (
508 508 b'',
509 509 b'compute',
510 510 False,
511 511 b"compute information instead of reading it from storage",
512 512 ),
513 513 ],
514 514 b'REV',
515 515 )
516 516 def debugchangedfiles(ui, repo, rev, **opts):
517 517 """list the stored files changes for a revision"""
518 518 ctx = logcmdutil.revsingle(repo, rev, None)
519 519 files = None
520 520
521 521 if opts['compute']:
522 522 files = metadata.compute_all_files_changes(ctx)
523 523 else:
524 524 sd = repo.changelog.sidedata(ctx.rev())
525 525 files_block = sd.get(sidedata.SD_FILES)
526 526 if files_block is not None:
527 527 files = metadata.decode_files_sidedata(sd)
528 528 if files is not None:
529 529 for f in sorted(files.touched):
530 530 if f in files.added:
531 531 action = b"added"
532 532 elif f in files.removed:
533 533 action = b"removed"
534 534 elif f in files.merged:
535 535 action = b"merged"
536 536 elif f in files.salvaged:
537 537 action = b"salvaged"
538 538 else:
539 539 action = b"touched"
540 540
541 541 copy_parent = b""
542 542 copy_source = b""
543 543 if f in files.copied_from_p1:
544 544 copy_parent = b"p1"
545 545 copy_source = files.copied_from_p1[f]
546 546 elif f in files.copied_from_p2:
547 547 copy_parent = b"p2"
548 548 copy_source = files.copied_from_p2[f]
549 549
550 550 data = (action, copy_parent, f, copy_source)
551 551 template = b"%-8s %2s: %s, %s;\n"
552 552 ui.write(template % data)
553 553
554 554
555 555 @command(b'debugcheckstate', [], b'')
556 556 def debugcheckstate(ui, repo):
557 557 """validate the correctness of the current dirstate"""
558 558 parent1, parent2 = repo.dirstate.parents()
559 559 m1 = repo[parent1].manifest()
560 560 m2 = repo[parent2].manifest()
561 561 errors = 0
562 562 for err in repo.dirstate.verify(m1, m2):
563 563 ui.warn(err[0] % err[1:])
564 564 errors += 1
565 565 if errors:
566 566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 567 raise error.Abort(errstr)
568 568
569 569
570 570 @command(
571 571 b'debugcolor',
572 572 [(b'', b'style', None, _(b'show all configured styles'))],
573 573 b'hg debugcolor',
574 574 )
575 575 def debugcolor(ui, repo, **opts):
576 576 """show available color, effects or style"""
577 577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 578 if opts.get('style'):
579 579 return _debugdisplaystyle(ui)
580 580 else:
581 581 return _debugdisplaycolor(ui)
582 582
583 583
584 584 def _debugdisplaycolor(ui):
585 585 ui = ui.copy()
586 586 ui._styles.clear()
587 587 for effect in color._activeeffects(ui).keys():
588 588 ui._styles[effect] = effect
589 589 if ui._terminfoparams:
590 590 for k, v in ui.configitems(b'color'):
591 591 if k.startswith(b'color.'):
592 592 ui._styles[k] = k[6:]
593 593 elif k.startswith(b'terminfo.'):
594 594 ui._styles[k] = k[9:]
595 595 ui.write(_(b'available colors:\n'))
596 596 # sort label with a '_' after the other to group '_background' entry.
597 597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 598 for colorname, label in items:
599 599 ui.write(b'%s\n' % colorname, label=label)
600 600
601 601
602 602 def _debugdisplaystyle(ui):
603 603 ui.write(_(b'available style:\n'))
604 604 if not ui._styles:
605 605 return
606 606 width = max(len(s) for s in ui._styles)
607 607 for label, effects in sorted(ui._styles.items()):
608 608 ui.write(b'%s' % label, label=label)
609 609 if effects:
610 610 # 50
611 611 ui.write(b': ')
612 612 ui.write(b' ' * (max(0, width - len(label))))
613 613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 614 ui.write(b'\n')
615 615
616 616
617 617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 618 def debugcreatestreamclonebundle(ui, repo, fname):
619 619 """create a stream clone bundle file
620 620
621 621 Stream bundles are special bundles that are essentially archives of
622 622 revlog files. They are commonly used for cloning very quickly.
623 623 """
624 624 # TODO we may want to turn this into an abort when this functionality
625 625 # is moved into `hg bundle`.
626 626 if phases.hassecret(repo):
627 627 ui.warn(
628 628 _(
629 629 b'(warning: stream clone bundle will contain secret '
630 630 b'revisions)\n'
631 631 )
632 632 )
633 633
634 634 requirements, gen = streamclone.generatebundlev1(repo)
635 635 changegroup.writechunks(ui, gen, fname)
636 636
637 637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 638
639 639
640 640 @command(
641 641 b'debugdag',
642 642 [
643 643 (b't', b'tags', None, _(b'use tags as labels')),
644 644 (b'b', b'branches', None, _(b'annotate with branch names')),
645 645 (b'', b'dots', None, _(b'use dots for runs')),
646 646 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 647 ],
648 648 _(b'[OPTION]... [FILE [REV]...]'),
649 649 optionalrepo=True,
650 650 )
651 651 def debugdag(ui, repo, file_=None, *revs, **opts):
652 652 """format the changelog or an index DAG as a concise textual description
653 653
654 654 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 655 revision numbers, they get labeled in the output as rN.
656 656
657 657 Otherwise, the changelog DAG of the current repo is emitted.
658 658 """
659 659 spaces = opts.get('spaces')
660 660 dots = opts.get('dots')
661 661 if file_:
662 662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 663 revs = {int(r) for r in revs}
664 664
665 665 def events():
666 666 for r in rlog:
667 667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 668 if r in revs:
669 669 yield b'l', (r, b"r%i" % r)
670 670
671 671 elif repo:
672 672 cl = repo.changelog
673 673 tags = opts.get('tags')
674 674 branches = opts.get('branches')
675 675 if tags:
676 676 labels = {}
677 677 for l, n in repo.tags().items():
678 678 labels.setdefault(cl.rev(n), []).append(l)
679 679
680 680 def events():
681 681 b = b"default"
682 682 for r in cl:
683 683 if branches:
684 684 newb = cl.read(cl.node(r))[5][b'branch']
685 685 if newb != b:
686 686 yield b'a', newb
687 687 b = newb
688 688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 689 if tags:
690 690 ls = labels.get(r)
691 691 if ls:
692 692 for l in ls:
693 693 yield b'l', (r, l)
694 694
695 695 else:
696 696 raise error.Abort(_(b'need repo for changelog dag'))
697 697
698 698 for line in dagparser.dagtextlines(
699 699 events(),
700 700 addspaces=spaces,
701 701 wraplabels=True,
702 702 wrapannotations=True,
703 703 wrapnonlinear=dots,
704 704 usedots=dots,
705 705 maxlinewidth=70,
706 706 ):
707 707 ui.write(line)
708 708 ui.write(b"\n")
709 709
710 710
711 711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 712 def debugdata(ui, repo, file_, rev=None, **opts):
713 713 """dump the contents of a data file revision"""
714 714 opts = pycompat.byteskwargs(opts)
715 715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 716 if rev is not None:
717 717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 718 file_, rev = None, file_
719 719 elif rev is None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 722 try:
723 723 ui.write(r.rawdata(r.lookup(rev)))
724 724 except KeyError:
725 725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 726
727 727
728 728 @command(
729 729 b'debugdate',
730 730 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 731 _(b'[-e] DATE [RANGE]'),
732 732 norepo=True,
733 733 optionalrepo=True,
734 734 )
735 735 def debugdate(ui, date, range=None, **opts):
736 736 """parse and display a date"""
737 737 if opts["extended"]:
738 738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 739 else:
740 740 d = dateutil.parsedate(date)
741 741 ui.writenoi18n(b"internal: %d %d\n" % d)
742 742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 743 if range:
744 744 m = dateutil.matchdate(range)
745 745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 746
747 747
748 748 @command(
749 749 b'debugdeltachain',
750 750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 751 _(b'-c|-m|FILE'),
752 752 optionalrepo=True,
753 753 )
754 754 def debugdeltachain(ui, repo, file_=None, **opts):
755 755 """dump information about delta chains in a revlog
756 756
757 757 Output can be templatized. Available template keywords are:
758 758
759 759 :``rev``: revision number
760 760 :``chainid``: delta chain identifier (numbered by unique base)
761 761 :``chainlen``: delta chain length to this revision
762 762 :``prevrev``: previous revision in delta chain
763 763 :``deltatype``: role of delta / how it was computed
764 764 :``compsize``: compressed size of revision
765 765 :``uncompsize``: uncompressed size of revision
766 766 :``chainsize``: total size of compressed revisions in chain
767 767 :``chainratio``: total chain size divided by uncompressed revision size
768 768 (new delta chains typically start at ratio 2.00)
769 769 :``lindist``: linear distance from base revision in delta chain to end
770 770 of this revision
771 771 :``extradist``: total size of revisions not part of this delta chain from
772 772 base of delta chain to end of this revision; a measurement
773 773 of how much extra data we need to read/seek across to read
774 774 the delta chain for this revision
775 775 :``extraratio``: extradist divided by chainsize; another representation of
776 776 how much unrelated data is needed to load this delta chain
777 777
778 778 If the repository is configured to use the sparse read, additional keywords
779 779 are available:
780 780
781 781 :``readsize``: total size of data read from the disk for a revision
782 782 (sum of the sizes of all the blocks)
783 783 :``largestblock``: size of the largest block of data read from the disk
784 784 :``readdensity``: density of useful bytes in the data read from the disk
785 785 :``srchunks``: in how many data hunks the whole revision would be read
786 786
787 787 The sparse read can be enabled with experimental.sparse-read = True
788 788 """
789 789 opts = pycompat.byteskwargs(opts)
790 790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
791 791 index = r.index
792 792 start = r.start
793 793 length = r.length
794 794 generaldelta = r._generaldelta
795 795 withsparseread = getattr(r, '_withsparseread', False)
796 796
797 797 def revinfo(rev):
798 798 e = index[rev]
799 799 compsize = e[1]
800 800 uncompsize = e[2]
801 801 chainsize = 0
802 802
803 803 if generaldelta:
804 804 if e[3] == e[5]:
805 805 deltatype = b'p1'
806 806 elif e[3] == e[6]:
807 807 deltatype = b'p2'
808 808 elif e[3] == rev - 1:
809 809 deltatype = b'prev'
810 810 elif e[3] == rev:
811 811 deltatype = b'base'
812 812 else:
813 813 deltatype = b'other'
814 814 else:
815 815 if e[3] == rev:
816 816 deltatype = b'base'
817 817 else:
818 818 deltatype = b'prev'
819 819
820 820 chain = r._deltachain(rev)[0]
821 821 for iterrev in chain:
822 822 e = index[iterrev]
823 823 chainsize += e[1]
824 824
825 825 return compsize, uncompsize, deltatype, chain, chainsize
826 826
827 827 fm = ui.formatter(b'debugdeltachain', opts)
828 828
829 829 fm.plain(
830 830 b' rev chain# chainlen prev delta '
831 831 b'size rawsize chainsize ratio lindist extradist '
832 832 b'extraratio'
833 833 )
834 834 if withsparseread:
835 835 fm.plain(b' readsize largestblk rddensity srchunks')
836 836 fm.plain(b'\n')
837 837
838 838 chainbases = {}
839 839 for rev in r:
840 840 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
841 841 chainbase = chain[0]
842 842 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
843 843 basestart = start(chainbase)
844 844 revstart = start(rev)
845 845 lineardist = revstart + comp - basestart
846 846 extradist = lineardist - chainsize
847 847 try:
848 848 prevrev = chain[-2]
849 849 except IndexError:
850 850 prevrev = -1
851 851
852 852 if uncomp != 0:
853 853 chainratio = float(chainsize) / float(uncomp)
854 854 else:
855 855 chainratio = chainsize
856 856
857 857 if chainsize != 0:
858 858 extraratio = float(extradist) / float(chainsize)
859 859 else:
860 860 extraratio = extradist
861 861
862 862 fm.startitem()
863 863 fm.write(
864 864 b'rev chainid chainlen prevrev deltatype compsize '
865 865 b'uncompsize chainsize chainratio lindist extradist '
866 866 b'extraratio',
867 867 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
868 868 rev,
869 869 chainid,
870 870 len(chain),
871 871 prevrev,
872 872 deltatype,
873 873 comp,
874 874 uncomp,
875 875 chainsize,
876 876 chainratio,
877 877 lineardist,
878 878 extradist,
879 879 extraratio,
880 880 rev=rev,
881 881 chainid=chainid,
882 882 chainlen=len(chain),
883 883 prevrev=prevrev,
884 884 deltatype=deltatype,
885 885 compsize=comp,
886 886 uncompsize=uncomp,
887 887 chainsize=chainsize,
888 888 chainratio=chainratio,
889 889 lindist=lineardist,
890 890 extradist=extradist,
891 891 extraratio=extraratio,
892 892 )
893 893 if withsparseread:
894 894 readsize = 0
895 895 largestblock = 0
896 896 srchunks = 0
897 897
898 898 for revschunk in deltautil.slicechunk(r, chain):
899 899 srchunks += 1
900 900 blkend = start(revschunk[-1]) + length(revschunk[-1])
901 901 blksize = blkend - start(revschunk[0])
902 902
903 903 readsize += blksize
904 904 if largestblock < blksize:
905 905 largestblock = blksize
906 906
907 907 if readsize:
908 908 readdensity = float(chainsize) / float(readsize)
909 909 else:
910 910 readdensity = 1
911 911
912 912 fm.write(
913 913 b'readsize largestblock readdensity srchunks',
914 914 b' %10d %10d %9.5f %8d',
915 915 readsize,
916 916 largestblock,
917 917 readdensity,
918 918 srchunks,
919 919 readsize=readsize,
920 920 largestblock=largestblock,
921 921 readdensity=readdensity,
922 922 srchunks=srchunks,
923 923 )
924 924
925 925 fm.plain(b'\n')
926 926
927 927 fm.end()
928 928
929 929
930 930 @command(
931 931 b'debugdirstate|debugstate',
932 932 [
933 933 (
934 934 b'',
935 935 b'nodates',
936 936 None,
937 937 _(b'do not display the saved mtime (DEPRECATED)'),
938 938 ),
939 939 (b'', b'dates', True, _(b'display the saved mtime')),
940 940 (b'', b'datesort', None, _(b'sort by saved mtime')),
941 941 (
942 942 b'',
943 943 b'all',
944 944 False,
945 945 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
946 946 ),
947 947 ],
948 948 _(b'[OPTION]...'),
949 949 )
950 950 def debugstate(ui, repo, **opts):
951 951 """show the contents of the current dirstate"""
952 952
953 953 nodates = not opts['dates']
954 954 if opts.get('nodates') is not None:
955 955 nodates = True
956 956 datesort = opts.get('datesort')
957 957
958 958 if datesort:
959 959
960 960 def keyfunc(entry):
961 961 filename, _state, _mode, _size, mtime = entry
962 962 return (mtime, filename)
963 963
964 964 else:
965 965 keyfunc = None # sort by filename
966 966 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
967 967 entries.sort(key=keyfunc)
968 968 for entry in entries:
969 969 filename, state, mode, size, mtime = entry
970 970 if mtime == -1:
971 971 timestr = b'unset '
972 972 elif nodates:
973 973 timestr = b'set '
974 974 else:
975 975 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
976 976 timestr = encoding.strtolocal(timestr)
977 977 if mode & 0o20000:
978 978 mode = b'lnk'
979 979 else:
980 980 mode = b'%3o' % (mode & 0o777 & ~util.umask)
981 981 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
982 982 for f in repo.dirstate.copies():
983 983 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
984 984
985 985
986 986 @command(
987 987 b'debugdirstateignorepatternshash',
988 988 [],
989 989 _(b''),
990 990 )
991 991 def debugdirstateignorepatternshash(ui, repo, **opts):
992 992 """show the hash of ignore patterns stored in dirstate if v2,
993 993 or nothing for dirstate-v2
994 994 """
995 995 if repo.dirstate._use_dirstate_v2:
996 996 docket = repo.dirstate._map.docket
997 997 hash_len = 20 # 160 bits for SHA-1
998 998 hash_bytes = docket.tree_metadata[-hash_len:]
999 999 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1000 1000
1001 1001
1002 1002 @command(
1003 1003 b'debugdiscovery',
1004 1004 [
1005 1005 (b'', b'old', None, _(b'use old-style discovery')),
1006 1006 (
1007 1007 b'',
1008 1008 b'nonheads',
1009 1009 None,
1010 1010 _(b'use old-style discovery with non-heads included'),
1011 1011 ),
1012 1012 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1013 1013 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1014 1014 (
1015 1015 b'',
1016 1016 b'local-as-revs',
1017 1017 b"",
1018 1018 b'treat local has having these revisions only',
1019 1019 ),
1020 1020 (
1021 1021 b'',
1022 1022 b'remote-as-revs',
1023 1023 b"",
1024 1024 b'use local as remote, with only these these revisions',
1025 1025 ),
1026 1026 ]
1027 1027 + cmdutil.remoteopts
1028 1028 + cmdutil.formatteropts,
1029 1029 _(b'[--rev REV] [OTHER]'),
1030 1030 )
1031 1031 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1032 1032 """runs the changeset discovery protocol in isolation
1033 1033
1034 1034 The local peer can be "replaced" by a subset of the local repository by
1035 1035 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1036 1036 be "replaced" by a subset of the local repository using the
1037 1037 `--local-as-revs` flag. This is useful to efficiently debug pathological
1038 1038 discovery situation.
1039 1039
1040 1040 The following developer oriented config are relevant for people playing with this command:
1041 1041
1042 1042 * devel.discovery.exchange-heads=True
1043 1043
1044 1044 If False, the discovery will not start with
1045 1045 remote head fetching and local head querying.
1046 1046
1047 1047 * devel.discovery.grow-sample=True
1048 1048
1049 1049 If False, the sample size used in set discovery will not be increased
1050 1050 through the process
1051 1051
1052 1052 * devel.discovery.grow-sample.dynamic=True
1053 1053
1054 1054 When discovery.grow-sample.dynamic is True, the default, the sample size is
1055 1055 adapted to the shape of the undecided set (it is set to the max of:
1056 1056 <target-size>, len(roots(undecided)), len(heads(undecided)
1057 1057
1058 1058 * devel.discovery.grow-sample.rate=1.05
1059 1059
1060 1060 the rate at which the sample grow
1061 1061
1062 1062 * devel.discovery.randomize=True
1063 1063
1064 1064 If andom sampling during discovery are deterministic. It is meant for
1065 1065 integration tests.
1066 1066
1067 1067 * devel.discovery.sample-size=200
1068 1068
1069 1069 Control the initial size of the discovery sample
1070 1070
1071 1071 * devel.discovery.sample-size.initial=100
1072 1072
1073 1073 Control the initial size of the discovery for initial change
1074 1074 """
1075 1075 opts = pycompat.byteskwargs(opts)
1076 1076 unfi = repo.unfiltered()
1077 1077
1078 1078 # setup potential extra filtering
1079 1079 local_revs = opts[b"local_as_revs"]
1080 1080 remote_revs = opts[b"remote_as_revs"]
1081 1081
1082 1082 # make sure tests are repeatable
1083 1083 random.seed(int(opts[b'seed']))
1084 1084
1085 1085 if not remote_revs:
1086 1086
1087 1087 remoteurl, branches = urlutil.get_unique_pull_path(
1088 1088 b'debugdiscovery', repo, ui, remoteurl
1089 1089 )
1090 1090 remote = hg.peer(repo, opts, remoteurl)
1091 1091 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1092 1092 else:
1093 1093 branches = (None, [])
1094 1094 remote_filtered_revs = logcmdutil.revrange(
1095 1095 unfi, [b"not (::(%s))" % remote_revs]
1096 1096 )
1097 1097 remote_filtered_revs = frozenset(remote_filtered_revs)
1098 1098
1099 1099 def remote_func(x):
1100 1100 return remote_filtered_revs
1101 1101
1102 1102 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1103 1103
1104 1104 remote = repo.peer()
1105 1105 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1106 1106
1107 1107 if local_revs:
1108 1108 local_filtered_revs = logcmdutil.revrange(
1109 1109 unfi, [b"not (::(%s))" % local_revs]
1110 1110 )
1111 1111 local_filtered_revs = frozenset(local_filtered_revs)
1112 1112
1113 1113 def local_func(x):
1114 1114 return local_filtered_revs
1115 1115
1116 1116 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1117 1117 repo = repo.filtered(b'debug-discovery-local-filter')
1118 1118
1119 1119 data = {}
1120 1120 if opts.get(b'old'):
1121 1121
1122 1122 def doit(pushedrevs, remoteheads, remote=remote):
1123 1123 if not util.safehasattr(remote, b'branches'):
1124 1124 # enable in-client legacy support
1125 1125 remote = localrepo.locallegacypeer(remote.local())
1126 1126 common, _in, hds = treediscovery.findcommonincoming(
1127 1127 repo, remote, force=True, audit=data
1128 1128 )
1129 1129 common = set(common)
1130 1130 if not opts.get(b'nonheads'):
1131 1131 ui.writenoi18n(
1132 1132 b"unpruned common: %s\n"
1133 1133 % b" ".join(sorted(short(n) for n in common))
1134 1134 )
1135 1135
1136 1136 clnode = repo.changelog.node
1137 1137 common = repo.revs(b'heads(::%ln)', common)
1138 1138 common = {clnode(r) for r in common}
1139 1139 return common, hds
1140 1140
1141 1141 else:
1142 1142
1143 1143 def doit(pushedrevs, remoteheads, remote=remote):
1144 1144 nodes = None
1145 1145 if pushedrevs:
1146 1146 revs = logcmdutil.revrange(repo, pushedrevs)
1147 1147 nodes = [repo[r].node() for r in revs]
1148 1148 common, any, hds = setdiscovery.findcommonheads(
1149 1149 ui, repo, remote, ancestorsof=nodes, audit=data
1150 1150 )
1151 1151 return common, hds
1152 1152
1153 1153 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1154 1154 localrevs = opts[b'rev']
1155 1155
1156 1156 fm = ui.formatter(b'debugdiscovery', opts)
1157 1157 if fm.strict_format:
1158 1158
1159 1159 @contextlib.contextmanager
1160 1160 def may_capture_output():
1161 1161 ui.pushbuffer()
1162 1162 yield
1163 1163 data[b'output'] = ui.popbuffer()
1164 1164
1165 1165 else:
1166 1166 may_capture_output = util.nullcontextmanager
1167 1167 with may_capture_output():
1168 1168 with util.timedcm('debug-discovery') as t:
1169 1169 common, hds = doit(localrevs, remoterevs)
1170 1170
1171 1171 # compute all statistics
1172 1172 heads_common = set(common)
1173 1173 heads_remote = set(hds)
1174 1174 heads_local = set(repo.heads())
1175 1175 # note: they cannot be a local or remote head that is in common and not
1176 1176 # itself a head of common.
1177 1177 heads_common_local = heads_common & heads_local
1178 1178 heads_common_remote = heads_common & heads_remote
1179 1179 heads_common_both = heads_common & heads_remote & heads_local
1180 1180
1181 1181 all = repo.revs(b'all()')
1182 1182 common = repo.revs(b'::%ln', common)
1183 1183 roots_common = repo.revs(b'roots(::%ld)', common)
1184 1184 missing = repo.revs(b'not ::%ld', common)
1185 1185 heads_missing = repo.revs(b'heads(%ld)', missing)
1186 1186 roots_missing = repo.revs(b'roots(%ld)', missing)
1187 1187 assert len(common) + len(missing) == len(all)
1188 1188
1189 1189 initial_undecided = repo.revs(
1190 1190 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1191 1191 )
1192 1192 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1193 1193 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1194 1194 common_initial_undecided = initial_undecided & common
1195 1195 missing_initial_undecided = initial_undecided & missing
1196 1196
1197 1197 data[b'elapsed'] = t.elapsed
1198 1198 data[b'nb-common-heads'] = len(heads_common)
1199 1199 data[b'nb-common-heads-local'] = len(heads_common_local)
1200 1200 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1201 1201 data[b'nb-common-heads-both'] = len(heads_common_both)
1202 1202 data[b'nb-common-roots'] = len(roots_common)
1203 1203 data[b'nb-head-local'] = len(heads_local)
1204 1204 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1205 1205 data[b'nb-head-remote'] = len(heads_remote)
1206 1206 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1207 1207 heads_common_remote
1208 1208 )
1209 1209 data[b'nb-revs'] = len(all)
1210 1210 data[b'nb-revs-common'] = len(common)
1211 1211 data[b'nb-revs-missing'] = len(missing)
1212 1212 data[b'nb-missing-heads'] = len(heads_missing)
1213 1213 data[b'nb-missing-roots'] = len(roots_missing)
1214 1214 data[b'nb-ini_und'] = len(initial_undecided)
1215 1215 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1216 1216 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1217 1217 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1218 1218 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1219 1219
1220 1220 fm.startitem()
1221 1221 fm.data(**pycompat.strkwargs(data))
1222 1222 # display discovery summary
1223 1223 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1224 1224 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1225 1225 fm.plain(b"heads summary:\n")
1226 1226 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1227 1227 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1228 1228 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1229 1229 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1230 1230 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1231 1231 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1232 1232 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1233 1233 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1234 1234 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1235 1235 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1236 1236 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1237 1237 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1238 1238 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1239 1239 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1240 1240 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1241 1241 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1242 1242 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1243 1243 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1244 1244 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1245 1245 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1246 1246 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1247 1247 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1248 1248
1249 1249 if ui.verbose:
1250 1250 fm.plain(
1251 1251 b"common heads: %s\n"
1252 1252 % b" ".join(sorted(short(n) for n in heads_common))
1253 1253 )
1254 1254 fm.end()
1255 1255
1256 1256
1257 1257 _chunksize = 4 << 10
1258 1258
1259 1259
1260 1260 @command(
1261 1261 b'debugdownload',
1262 1262 [
1263 1263 (b'o', b'output', b'', _(b'path')),
1264 1264 ],
1265 1265 optionalrepo=True,
1266 1266 )
1267 1267 def debugdownload(ui, repo, url, output=None, **opts):
1268 1268 """download a resource using Mercurial logic and config"""
1269 1269 fh = urlmod.open(ui, url, output)
1270 1270
1271 1271 dest = ui
1272 1272 if output:
1273 1273 dest = open(output, b"wb", _chunksize)
1274 1274 try:
1275 1275 data = fh.read(_chunksize)
1276 1276 while data:
1277 1277 dest.write(data)
1278 1278 data = fh.read(_chunksize)
1279 1279 finally:
1280 1280 if output:
1281 1281 dest.close()
1282 1282
1283 1283
1284 1284 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1285 1285 def debugextensions(ui, repo, **opts):
1286 1286 '''show information about active extensions'''
1287 1287 opts = pycompat.byteskwargs(opts)
1288 1288 exts = extensions.extensions(ui)
1289 1289 hgver = util.version()
1290 1290 fm = ui.formatter(b'debugextensions', opts)
1291 1291 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1292 1292 isinternal = extensions.ismoduleinternal(extmod)
1293 1293 extsource = None
1294 1294
1295 1295 if util.safehasattr(extmod, '__file__'):
1296 1296 extsource = pycompat.fsencode(extmod.__file__)
1297 1297 elif getattr(sys, 'oxidized', False):
1298 1298 extsource = pycompat.sysexecutable
1299 1299 if isinternal:
1300 1300 exttestedwith = [] # never expose magic string to users
1301 1301 else:
1302 1302 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1303 1303 extbuglink = getattr(extmod, 'buglink', None)
1304 1304
1305 1305 fm.startitem()
1306 1306
1307 1307 if ui.quiet or ui.verbose:
1308 1308 fm.write(b'name', b'%s\n', extname)
1309 1309 else:
1310 1310 fm.write(b'name', b'%s', extname)
1311 1311 if isinternal or hgver in exttestedwith:
1312 1312 fm.plain(b'\n')
1313 1313 elif not exttestedwith:
1314 1314 fm.plain(_(b' (untested!)\n'))
1315 1315 else:
1316 1316 lasttestedversion = exttestedwith[-1]
1317 1317 fm.plain(b' (%s!)\n' % lasttestedversion)
1318 1318
1319 1319 fm.condwrite(
1320 1320 ui.verbose and extsource,
1321 1321 b'source',
1322 1322 _(b' location: %s\n'),
1323 1323 extsource or b"",
1324 1324 )
1325 1325
1326 1326 if ui.verbose:
1327 1327 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1328 1328 fm.data(bundled=isinternal)
1329 1329
1330 1330 fm.condwrite(
1331 1331 ui.verbose and exttestedwith,
1332 1332 b'testedwith',
1333 1333 _(b' tested with: %s\n'),
1334 1334 fm.formatlist(exttestedwith, name=b'ver'),
1335 1335 )
1336 1336
1337 1337 fm.condwrite(
1338 1338 ui.verbose and extbuglink,
1339 1339 b'buglink',
1340 1340 _(b' bug reporting: %s\n'),
1341 1341 extbuglink or b"",
1342 1342 )
1343 1343
1344 1344 fm.end()
1345 1345
1346 1346
1347 1347 @command(
1348 1348 b'debugfileset',
1349 1349 [
1350 1350 (
1351 1351 b'r',
1352 1352 b'rev',
1353 1353 b'',
1354 1354 _(b'apply the filespec on this revision'),
1355 1355 _(b'REV'),
1356 1356 ),
1357 1357 (
1358 1358 b'',
1359 1359 b'all-files',
1360 1360 False,
1361 1361 _(b'test files from all revisions and working directory'),
1362 1362 ),
1363 1363 (
1364 1364 b's',
1365 1365 b'show-matcher',
1366 1366 None,
1367 1367 _(b'print internal representation of matcher'),
1368 1368 ),
1369 1369 (
1370 1370 b'p',
1371 1371 b'show-stage',
1372 1372 [],
1373 1373 _(b'print parsed tree at the given stage'),
1374 1374 _(b'NAME'),
1375 1375 ),
1376 1376 ],
1377 1377 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1378 1378 )
1379 1379 def debugfileset(ui, repo, expr, **opts):
1380 1380 '''parse and apply a fileset specification'''
1381 1381 from . import fileset
1382 1382
1383 1383 fileset.symbols # force import of fileset so we have predicates to optimize
1384 1384 opts = pycompat.byteskwargs(opts)
1385 1385 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1386 1386
1387 1387 stages = [
1388 1388 (b'parsed', pycompat.identity),
1389 1389 (b'analyzed', filesetlang.analyze),
1390 1390 (b'optimized', filesetlang.optimize),
1391 1391 ]
1392 1392 stagenames = {n for n, f in stages}
1393 1393
1394 1394 showalways = set()
1395 1395 if ui.verbose and not opts[b'show_stage']:
1396 1396 # show parsed tree by --verbose (deprecated)
1397 1397 showalways.add(b'parsed')
1398 1398 if opts[b'show_stage'] == [b'all']:
1399 1399 showalways.update(stagenames)
1400 1400 else:
1401 1401 for n in opts[b'show_stage']:
1402 1402 if n not in stagenames:
1403 1403 raise error.Abort(_(b'invalid stage name: %s') % n)
1404 1404 showalways.update(opts[b'show_stage'])
1405 1405
1406 1406 tree = filesetlang.parse(expr)
1407 1407 for n, f in stages:
1408 1408 tree = f(tree)
1409 1409 if n in showalways:
1410 1410 if opts[b'show_stage'] or n != b'parsed':
1411 1411 ui.write(b"* %s:\n" % n)
1412 1412 ui.write(filesetlang.prettyformat(tree), b"\n")
1413 1413
1414 1414 files = set()
1415 1415 if opts[b'all_files']:
1416 1416 for r in repo:
1417 1417 c = repo[r]
1418 1418 files.update(c.files())
1419 1419 files.update(c.substate)
1420 1420 if opts[b'all_files'] or ctx.rev() is None:
1421 1421 wctx = repo[None]
1422 1422 files.update(
1423 1423 repo.dirstate.walk(
1424 1424 scmutil.matchall(repo),
1425 1425 subrepos=list(wctx.substate),
1426 1426 unknown=True,
1427 1427 ignored=True,
1428 1428 )
1429 1429 )
1430 1430 files.update(wctx.substate)
1431 1431 else:
1432 1432 files.update(ctx.files())
1433 1433 files.update(ctx.substate)
1434 1434
1435 1435 m = ctx.matchfileset(repo.getcwd(), expr)
1436 1436 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1437 1437 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1438 1438 for f in sorted(files):
1439 1439 if not m(f):
1440 1440 continue
1441 1441 ui.write(b"%s\n" % f)
1442 1442
1443 1443
1444 1444 @command(
1445 1445 b"debug-repair-issue6528",
1446 1446 [
1447 1447 (
1448 1448 b'',
1449 1449 b'to-report',
1450 1450 b'',
1451 1451 _(b'build a report of affected revisions to this file'),
1452 1452 _(b'FILE'),
1453 1453 ),
1454 1454 (
1455 1455 b'',
1456 1456 b'from-report',
1457 1457 b'',
1458 1458 _(b'repair revisions listed in this report file'),
1459 1459 _(b'FILE'),
1460 1460 ),
1461 1461 (
1462 1462 b'',
1463 1463 b'paranoid',
1464 1464 False,
1465 1465 _(b'check that both detection methods do the same thing'),
1466 1466 ),
1467 1467 ]
1468 1468 + cmdutil.dryrunopts,
1469 1469 )
1470 1470 def debug_repair_issue6528(ui, repo, **opts):
1471 1471 """find affected revisions and repair them. See issue6528 for more details.
1472 1472
1473 1473 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1474 1474 computation of affected revisions for a given repository across clones.
1475 1475 The report format is line-based (with empty lines ignored):
1476 1476
1477 1477 ```
1478 1478 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1479 1479 ```
1480 1480
1481 1481 There can be multiple broken revisions per filelog, they are separated by
1482 1482 a comma with no spaces. The only space is between the revision(s) and the
1483 1483 filename.
1484 1484
1485 1485 Note that this does *not* mean that this repairs future affected revisions,
1486 1486 that needs a separate fix at the exchange level that was introduced in
1487 1487 Mercurial 5.9.1.
1488 1488
1489 1489 There is a `--paranoid` flag to test that the fast implementation is correct
1490 1490 by checking it against the slow implementation. Since this matter is quite
1491 1491 urgent and testing every edge-case is probably quite costly, we use this
1492 1492 method to test on large repositories as a fuzzing method of sorts.
1493 1493 """
1494 1494 cmdutil.check_incompatible_arguments(
1495 1495 opts, 'to_report', ['from_report', 'dry_run']
1496 1496 )
1497 1497 dry_run = opts.get('dry_run')
1498 1498 to_report = opts.get('to_report')
1499 1499 from_report = opts.get('from_report')
1500 1500 paranoid = opts.get('paranoid')
1501 1501 # TODO maybe add filelog pattern and revision pattern parameters to help
1502 1502 # narrow down the search for users that know what they're looking for?
1503 1503
1504 1504 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1505 1505 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1506 1506 raise error.Abort(_(msg))
1507 1507
1508 1508 rewrite.repair_issue6528(
1509 1509 ui,
1510 1510 repo,
1511 1511 dry_run=dry_run,
1512 1512 to_report=to_report,
1513 1513 from_report=from_report,
1514 1514 paranoid=paranoid,
1515 1515 )
1516 1516
1517 1517
1518 1518 @command(b'debugformat', [] + cmdutil.formatteropts)
1519 1519 def debugformat(ui, repo, **opts):
1520 1520 """display format information about the current repository
1521 1521
1522 1522 Use --verbose to get extra information about current config value and
1523 1523 Mercurial default."""
1524 1524 opts = pycompat.byteskwargs(opts)
1525 1525 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1526 1526 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1527 1527
1528 1528 def makeformatname(name):
1529 1529 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1530 1530
1531 1531 fm = ui.formatter(b'debugformat', opts)
1532 1532 if fm.isplain():
1533 1533
1534 1534 def formatvalue(value):
1535 1535 if util.safehasattr(value, b'startswith'):
1536 1536 return value
1537 1537 if value:
1538 1538 return b'yes'
1539 1539 else:
1540 1540 return b'no'
1541 1541
1542 1542 else:
1543 1543 formatvalue = pycompat.identity
1544 1544
1545 1545 fm.plain(b'format-variant')
1546 1546 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1547 1547 fm.plain(b' repo')
1548 1548 if ui.verbose:
1549 1549 fm.plain(b' config default')
1550 1550 fm.plain(b'\n')
1551 1551 for fv in upgrade.allformatvariant:
1552 1552 fm.startitem()
1553 1553 repovalue = fv.fromrepo(repo)
1554 1554 configvalue = fv.fromconfig(repo)
1555 1555
1556 1556 if repovalue != configvalue:
1557 1557 namelabel = b'formatvariant.name.mismatchconfig'
1558 1558 repolabel = b'formatvariant.repo.mismatchconfig'
1559 1559 elif repovalue != fv.default:
1560 1560 namelabel = b'formatvariant.name.mismatchdefault'
1561 1561 repolabel = b'formatvariant.repo.mismatchdefault'
1562 1562 else:
1563 1563 namelabel = b'formatvariant.name.uptodate'
1564 1564 repolabel = b'formatvariant.repo.uptodate'
1565 1565
1566 1566 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1567 1567 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1568 1568 if fv.default != configvalue:
1569 1569 configlabel = b'formatvariant.config.special'
1570 1570 else:
1571 1571 configlabel = b'formatvariant.config.default'
1572 1572 fm.condwrite(
1573 1573 ui.verbose,
1574 1574 b'config',
1575 1575 b' %6s',
1576 1576 formatvalue(configvalue),
1577 1577 label=configlabel,
1578 1578 )
1579 1579 fm.condwrite(
1580 1580 ui.verbose,
1581 1581 b'default',
1582 1582 b' %7s',
1583 1583 formatvalue(fv.default),
1584 1584 label=b'formatvariant.default',
1585 1585 )
1586 1586 fm.plain(b'\n')
1587 1587 fm.end()
1588 1588
1589 1589
1590 1590 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1591 1591 def debugfsinfo(ui, path=b"."):
1592 1592 """show information detected about current filesystem"""
1593 1593 ui.writenoi18n(b'path: %s\n' % path)
1594 1594 ui.writenoi18n(
1595 1595 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1596 1596 )
1597 1597 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1598 1598 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1599 1599 ui.writenoi18n(
1600 1600 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1601 1601 )
1602 1602 ui.writenoi18n(
1603 1603 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1604 1604 )
1605 1605 casesensitive = b'(unknown)'
1606 1606 try:
1607 1607 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1608 1608 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1609 1609 except OSError:
1610 1610 pass
1611 1611 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1612 1612
1613 1613
1614 1614 @command(
1615 1615 b'debuggetbundle',
1616 1616 [
1617 1617 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1618 1618 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1619 1619 (
1620 1620 b't',
1621 1621 b'type',
1622 1622 b'bzip2',
1623 1623 _(b'bundle compression type to use'),
1624 1624 _(b'TYPE'),
1625 1625 ),
1626 1626 ],
1627 1627 _(b'REPO FILE [-H|-C ID]...'),
1628 1628 norepo=True,
1629 1629 )
1630 1630 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1631 1631 """retrieves a bundle from a repo
1632 1632
1633 1633 Every ID must be a full-length hex node id string. Saves the bundle to the
1634 1634 given file.
1635 1635 """
1636 1636 opts = pycompat.byteskwargs(opts)
1637 1637 repo = hg.peer(ui, opts, repopath)
1638 1638 if not repo.capable(b'getbundle'):
1639 1639 raise error.Abort(b"getbundle() not supported by target repository")
1640 1640 args = {}
1641 1641 if common:
1642 1642 args['common'] = [bin(s) for s in common]
1643 1643 if head:
1644 1644 args['heads'] = [bin(s) for s in head]
1645 1645 # TODO: get desired bundlecaps from command line.
1646 1646 args['bundlecaps'] = None
1647 1647 bundle = repo.getbundle(b'debug', **args)
1648 1648
1649 1649 bundletype = opts.get(b'type', b'bzip2').lower()
1650 1650 btypes = {
1651 1651 b'none': b'HG10UN',
1652 1652 b'bzip2': b'HG10BZ',
1653 1653 b'gzip': b'HG10GZ',
1654 1654 b'bundle2': b'HG20',
1655 1655 }
1656 1656 bundletype = btypes.get(bundletype)
1657 1657 if bundletype not in bundle2.bundletypes:
1658 1658 raise error.Abort(_(b'unknown bundle type specified with --type'))
1659 1659 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1660 1660
1661 1661
1662 1662 @command(b'debugignore', [], b'[FILE]')
1663 1663 def debugignore(ui, repo, *files, **opts):
1664 1664 """display the combined ignore pattern and information about ignored files
1665 1665
1666 1666 With no argument display the combined ignore pattern.
1667 1667
1668 1668 Given space separated file names, shows if the given file is ignored and
1669 1669 if so, show the ignore rule (file and line number) that matched it.
1670 1670 """
1671 1671 ignore = repo.dirstate._ignore
1672 1672 if not files:
1673 1673 # Show all the patterns
1674 1674 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1675 1675 else:
1676 1676 m = scmutil.match(repo[None], pats=files)
1677 1677 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1678 1678 for f in m.files():
1679 1679 nf = util.normpath(f)
1680 1680 ignored = None
1681 1681 ignoredata = None
1682 1682 if nf != b'.':
1683 1683 if ignore(nf):
1684 1684 ignored = nf
1685 1685 ignoredata = repo.dirstate._ignorefileandline(nf)
1686 1686 else:
1687 1687 for p in pathutil.finddirs(nf):
1688 1688 if ignore(p):
1689 1689 ignored = p
1690 1690 ignoredata = repo.dirstate._ignorefileandline(p)
1691 1691 break
1692 1692 if ignored:
1693 1693 if ignored == nf:
1694 1694 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1695 1695 else:
1696 1696 ui.write(
1697 1697 _(
1698 1698 b"%s is ignored because of "
1699 1699 b"containing directory %s\n"
1700 1700 )
1701 1701 % (uipathfn(f), ignored)
1702 1702 )
1703 1703 ignorefile, lineno, line = ignoredata
1704 1704 ui.write(
1705 1705 _(b"(ignore rule in %s, line %d: '%s')\n")
1706 1706 % (ignorefile, lineno, line)
1707 1707 )
1708 1708 else:
1709 1709 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1710 1710
1711 1711
1712 1712 @command(
1713 1713 b'debugindex',
1714 1714 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1715 1715 _(b'-c|-m|FILE'),
1716 1716 )
1717 1717 def debugindex(ui, repo, file_=None, **opts):
1718 1718 """dump index data for a storage primitive"""
1719 1719 opts = pycompat.byteskwargs(opts)
1720 1720 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1721 1721
1722 1722 if ui.debugflag:
1723 1723 shortfn = hex
1724 1724 else:
1725 1725 shortfn = short
1726 1726
1727 1727 idlen = 12
1728 1728 for i in store:
1729 1729 idlen = len(shortfn(store.node(i)))
1730 1730 break
1731 1731
1732 1732 fm = ui.formatter(b'debugindex', opts)
1733 1733 fm.plain(
1734 1734 b' rev linkrev %s %s p2\n'
1735 1735 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1736 1736 )
1737 1737
1738 1738 for rev in store:
1739 1739 node = store.node(rev)
1740 1740 parents = store.parents(node)
1741 1741
1742 1742 fm.startitem()
1743 1743 fm.write(b'rev', b'%6d ', rev)
1744 1744 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1745 1745 fm.write(b'node', b'%s ', shortfn(node))
1746 1746 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1747 1747 fm.write(b'p2', b'%s', shortfn(parents[1]))
1748 1748 fm.plain(b'\n')
1749 1749
1750 1750 fm.end()
1751 1751
1752 1752
1753 1753 @command(
1754 1754 b'debugindexdot',
1755 1755 cmdutil.debugrevlogopts,
1756 1756 _(b'-c|-m|FILE'),
1757 1757 optionalrepo=True,
1758 1758 )
1759 1759 def debugindexdot(ui, repo, file_=None, **opts):
1760 1760 """dump an index DAG as a graphviz dot file"""
1761 1761 opts = pycompat.byteskwargs(opts)
1762 1762 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1763 1763 ui.writenoi18n(b"digraph G {\n")
1764 1764 for i in r:
1765 1765 node = r.node(i)
1766 1766 pp = r.parents(node)
1767 1767 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1768 1768 if pp[1] != repo.nullid:
1769 1769 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1770 1770 ui.write(b"}\n")
1771 1771
1772 1772
1773 1773 @command(b'debugindexstats', [])
1774 1774 def debugindexstats(ui, repo):
1775 1775 """show stats related to the changelog index"""
1776 1776 repo.changelog.shortest(repo.nullid, 1)
1777 1777 index = repo.changelog.index
1778 1778 if not util.safehasattr(index, b'stats'):
1779 1779 raise error.Abort(_(b'debugindexstats only works with native code'))
1780 1780 for k, v in sorted(index.stats().items()):
1781 1781 ui.write(b'%s: %d\n' % (k, v))
1782 1782
1783 1783
1784 1784 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1785 1785 def debuginstall(ui, **opts):
1786 1786 """test Mercurial installation
1787 1787
1788 1788 Returns 0 on success.
1789 1789 """
1790 1790 opts = pycompat.byteskwargs(opts)
1791 1791
1792 1792 problems = 0
1793 1793
1794 1794 fm = ui.formatter(b'debuginstall', opts)
1795 1795 fm.startitem()
1796 1796
1797 1797 # encoding might be unknown or wrong. don't translate these messages.
1798 1798 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1799 1799 err = None
1800 1800 try:
1801 1801 codecs.lookup(pycompat.sysstr(encoding.encoding))
1802 1802 except LookupError as inst:
1803 1803 err = stringutil.forcebytestr(inst)
1804 1804 problems += 1
1805 1805 fm.condwrite(
1806 1806 err,
1807 1807 b'encodingerror',
1808 1808 b" %s\n (check that your locale is properly set)\n",
1809 1809 err,
1810 1810 )
1811 1811
1812 1812 # Python
1813 1813 pythonlib = None
1814 1814 if util.safehasattr(os, '__file__'):
1815 1815 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1816 1816 elif getattr(sys, 'oxidized', False):
1817 1817 pythonlib = pycompat.sysexecutable
1818 1818
1819 1819 fm.write(
1820 1820 b'pythonexe',
1821 1821 _(b"checking Python executable (%s)\n"),
1822 1822 pycompat.sysexecutable or _(b"unknown"),
1823 1823 )
1824 1824 fm.write(
1825 1825 b'pythonimplementation',
1826 1826 _(b"checking Python implementation (%s)\n"),
1827 1827 pycompat.sysbytes(platform.python_implementation()),
1828 1828 )
1829 1829 fm.write(
1830 1830 b'pythonver',
1831 1831 _(b"checking Python version (%s)\n"),
1832 1832 (b"%d.%d.%d" % sys.version_info[:3]),
1833 1833 )
1834 1834 fm.write(
1835 1835 b'pythonlib',
1836 1836 _(b"checking Python lib (%s)...\n"),
1837 1837 pythonlib or _(b"unknown"),
1838 1838 )
1839 1839
1840 1840 try:
1841 1841 from . import rustext # pytype: disable=import-error
1842 1842
1843 1843 rustext.__doc__ # trigger lazy import
1844 1844 except ImportError:
1845 1845 rustext = None
1846 1846
1847 1847 security = set(sslutil.supportedprotocols)
1848 1848 if sslutil.hassni:
1849 1849 security.add(b'sni')
1850 1850
1851 1851 fm.write(
1852 1852 b'pythonsecurity',
1853 1853 _(b"checking Python security support (%s)\n"),
1854 1854 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1855 1855 )
1856 1856
1857 1857 # These are warnings, not errors. So don't increment problem count. This
1858 1858 # may change in the future.
1859 1859 if b'tls1.2' not in security:
1860 1860 fm.plain(
1861 1861 _(
1862 1862 b' TLS 1.2 not supported by Python install; '
1863 1863 b'network connections lack modern security\n'
1864 1864 )
1865 1865 )
1866 1866 if b'sni' not in security:
1867 1867 fm.plain(
1868 1868 _(
1869 1869 b' SNI not supported by Python install; may have '
1870 1870 b'connectivity issues with some servers\n'
1871 1871 )
1872 1872 )
1873 1873
1874 1874 fm.plain(
1875 1875 _(
1876 1876 b"checking Rust extensions (%s)\n"
1877 1877 % (b'missing' if rustext is None else b'installed')
1878 1878 ),
1879 1879 )
1880 1880
1881 1881 # TODO print CA cert info
1882 1882
1883 1883 # hg version
1884 1884 hgver = util.version()
1885 1885 fm.write(
1886 1886 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1887 1887 )
1888 1888 fm.write(
1889 1889 b'hgverextra',
1890 1890 _(b"checking Mercurial custom build (%s)\n"),
1891 1891 b'+'.join(hgver.split(b'+')[1:]),
1892 1892 )
1893 1893
1894 1894 # compiled modules
1895 1895 hgmodules = None
1896 1896 if util.safehasattr(sys.modules[__name__], '__file__'):
1897 1897 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1898 1898 elif getattr(sys, 'oxidized', False):
1899 1899 hgmodules = pycompat.sysexecutable
1900 1900
1901 1901 fm.write(
1902 1902 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1903 1903 )
1904 1904 fm.write(
1905 1905 b'hgmodules',
1906 1906 _(b"checking installed modules (%s)...\n"),
1907 1907 hgmodules or _(b"unknown"),
1908 1908 )
1909 1909
1910 1910 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1911 1911 rustext = rustandc # for now, that's the only case
1912 1912 cext = policy.policy in (b'c', b'allow') or rustandc
1913 1913 nopure = cext or rustext
1914 1914 if nopure:
1915 1915 err = None
1916 1916 try:
1917 1917 if cext:
1918 1918 from .cext import ( # pytype: disable=import-error
1919 1919 base85,
1920 1920 bdiff,
1921 1921 mpatch,
1922 1922 osutil,
1923 1923 )
1924 1924
1925 1925 # quiet pyflakes
1926 1926 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1927 1927 if rustext:
1928 1928 from .rustext import ( # pytype: disable=import-error
1929 1929 ancestor,
1930 1930 dirstate,
1931 1931 )
1932 1932
1933 1933 dir(ancestor), dir(dirstate) # quiet pyflakes
1934 1934 except Exception as inst:
1935 1935 err = stringutil.forcebytestr(inst)
1936 1936 problems += 1
1937 1937 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1938 1938
1939 1939 compengines = util.compengines._engines.values()
1940 1940 fm.write(
1941 1941 b'compengines',
1942 1942 _(b'checking registered compression engines (%s)\n'),
1943 1943 fm.formatlist(
1944 1944 sorted(e.name() for e in compengines),
1945 1945 name=b'compengine',
1946 1946 fmt=b'%s',
1947 1947 sep=b', ',
1948 1948 ),
1949 1949 )
1950 1950 fm.write(
1951 1951 b'compenginesavail',
1952 1952 _(b'checking available compression engines (%s)\n'),
1953 1953 fm.formatlist(
1954 1954 sorted(e.name() for e in compengines if e.available()),
1955 1955 name=b'compengine',
1956 1956 fmt=b'%s',
1957 1957 sep=b', ',
1958 1958 ),
1959 1959 )
1960 1960 wirecompengines = compression.compengines.supportedwireengines(
1961 1961 compression.SERVERROLE
1962 1962 )
1963 1963 fm.write(
1964 1964 b'compenginesserver',
1965 1965 _(
1966 1966 b'checking available compression engines '
1967 1967 b'for wire protocol (%s)\n'
1968 1968 ),
1969 1969 fm.formatlist(
1970 1970 [e.name() for e in wirecompengines if e.wireprotosupport()],
1971 1971 name=b'compengine',
1972 1972 fmt=b'%s',
1973 1973 sep=b', ',
1974 1974 ),
1975 1975 )
1976 1976 re2 = b'missing'
1977 1977 if util._re2:
1978 1978 re2 = b'available'
1979 1979 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1980 1980 fm.data(re2=bool(util._re2))
1981 1981
1982 1982 # templates
1983 1983 p = templater.templatedir()
1984 1984 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1985 1985 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1986 1986 if p:
1987 1987 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1988 1988 if m:
1989 1989 # template found, check if it is working
1990 1990 err = None
1991 1991 try:
1992 1992 templater.templater.frommapfile(m)
1993 1993 except Exception as inst:
1994 1994 err = stringutil.forcebytestr(inst)
1995 1995 p = None
1996 1996 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1997 1997 else:
1998 1998 p = None
1999 1999 fm.condwrite(
2000 2000 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2001 2001 )
2002 2002 fm.condwrite(
2003 2003 not m,
2004 2004 b'defaulttemplatenotfound',
2005 2005 _(b" template '%s' not found\n"),
2006 2006 b"default",
2007 2007 )
2008 2008 if not p:
2009 2009 problems += 1
2010 2010 fm.condwrite(
2011 2011 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2012 2012 )
2013 2013
2014 2014 # editor
2015 2015 editor = ui.geteditor()
2016 2016 editor = util.expandpath(editor)
2017 2017 editorbin = procutil.shellsplit(editor)[0]
2018 2018 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2019 2019 cmdpath = procutil.findexe(editorbin)
2020 2020 fm.condwrite(
2021 2021 not cmdpath and editor == b'vi',
2022 2022 b'vinotfound',
2023 2023 _(
2024 2024 b" No commit editor set and can't find %s in PATH\n"
2025 2025 b" (specify a commit editor in your configuration"
2026 2026 b" file)\n"
2027 2027 ),
2028 2028 not cmdpath and editor == b'vi' and editorbin,
2029 2029 )
2030 2030 fm.condwrite(
2031 2031 not cmdpath and editor != b'vi',
2032 2032 b'editornotfound',
2033 2033 _(
2034 2034 b" Can't find editor '%s' in PATH\n"
2035 2035 b" (specify a commit editor in your configuration"
2036 2036 b" file)\n"
2037 2037 ),
2038 2038 not cmdpath and editorbin,
2039 2039 )
2040 2040 if not cmdpath and editor != b'vi':
2041 2041 problems += 1
2042 2042
2043 2043 # check username
2044 2044 username = None
2045 2045 err = None
2046 2046 try:
2047 2047 username = ui.username()
2048 2048 except error.Abort as e:
2049 2049 err = e.message
2050 2050 problems += 1
2051 2051
2052 2052 fm.condwrite(
2053 2053 username, b'username', _(b"checking username (%s)\n"), username
2054 2054 )
2055 2055 fm.condwrite(
2056 2056 err,
2057 2057 b'usernameerror',
2058 2058 _(
2059 2059 b"checking username...\n %s\n"
2060 2060 b" (specify a username in your configuration file)\n"
2061 2061 ),
2062 2062 err,
2063 2063 )
2064 2064
2065 2065 for name, mod in extensions.extensions():
2066 2066 handler = getattr(mod, 'debuginstall', None)
2067 2067 if handler is not None:
2068 2068 problems += handler(ui, fm)
2069 2069
2070 2070 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2071 2071 if not problems:
2072 2072 fm.data(problems=problems)
2073 2073 fm.condwrite(
2074 2074 problems,
2075 2075 b'problems',
2076 2076 _(b"%d problems detected, please check your install!\n"),
2077 2077 problems,
2078 2078 )
2079 2079 fm.end()
2080 2080
2081 2081 return problems
2082 2082
2083 2083
2084 2084 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2085 2085 def debugknown(ui, repopath, *ids, **opts):
2086 2086 """test whether node ids are known to a repo
2087 2087
2088 2088 Every ID must be a full-length hex node id string. Returns a list of 0s
2089 2089 and 1s indicating unknown/known.
2090 2090 """
2091 2091 opts = pycompat.byteskwargs(opts)
2092 2092 repo = hg.peer(ui, opts, repopath)
2093 2093 if not repo.capable(b'known'):
2094 2094 raise error.Abort(b"known() not supported by target repository")
2095 2095 flags = repo.known([bin(s) for s in ids])
2096 2096 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2097 2097
2098 2098
2099 2099 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2100 2100 def debuglabelcomplete(ui, repo, *args):
2101 2101 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2102 2102 debugnamecomplete(ui, repo, *args)
2103 2103
2104 2104
2105 2105 @command(
2106 2106 b'debuglocks',
2107 2107 [
2108 2108 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2109 2109 (
2110 2110 b'W',
2111 2111 b'force-free-wlock',
2112 2112 None,
2113 2113 _(b'free the working state lock (DANGEROUS)'),
2114 2114 ),
2115 2115 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2116 2116 (
2117 2117 b'S',
2118 2118 b'set-wlock',
2119 2119 None,
2120 2120 _(b'set the working state lock until stopped'),
2121 2121 ),
2122 2122 ],
2123 2123 _(b'[OPTION]...'),
2124 2124 )
2125 2125 def debuglocks(ui, repo, **opts):
2126 2126 """show or modify state of locks
2127 2127
2128 2128 By default, this command will show which locks are held. This
2129 2129 includes the user and process holding the lock, the amount of time
2130 2130 the lock has been held, and the machine name where the process is
2131 2131 running if it's not local.
2132 2132
2133 2133 Locks protect the integrity of Mercurial's data, so should be
2134 2134 treated with care. System crashes or other interruptions may cause
2135 2135 locks to not be properly released, though Mercurial will usually
2136 2136 detect and remove such stale locks automatically.
2137 2137
2138 2138 However, detecting stale locks may not always be possible (for
2139 2139 instance, on a shared filesystem). Removing locks may also be
2140 2140 blocked by filesystem permissions.
2141 2141
2142 2142 Setting a lock will prevent other commands from changing the data.
2143 2143 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2144 2144 The set locks are removed when the command exits.
2145 2145
2146 2146 Returns 0 if no locks are held.
2147 2147
2148 2148 """
2149 2149
2150 2150 if opts.get('force_free_lock'):
2151 repo.svfs.unlink(b'lock')
2151 repo.svfs.tryunlink(b'lock')
2152 2152 if opts.get('force_free_wlock'):
2153 repo.vfs.unlink(b'wlock')
2153 repo.vfs.tryunlink(b'wlock')
2154 2154 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2155 2155 return 0
2156 2156
2157 2157 locks = []
2158 2158 try:
2159 2159 if opts.get('set_wlock'):
2160 2160 try:
2161 2161 locks.append(repo.wlock(False))
2162 2162 except error.LockHeld:
2163 2163 raise error.Abort(_(b'wlock is already held'))
2164 2164 if opts.get('set_lock'):
2165 2165 try:
2166 2166 locks.append(repo.lock(False))
2167 2167 except error.LockHeld:
2168 2168 raise error.Abort(_(b'lock is already held'))
2169 2169 if len(locks):
2170 2170 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2171 2171 return 0
2172 2172 finally:
2173 2173 release(*locks)
2174 2174
2175 2175 now = time.time()
2176 2176 held = 0
2177 2177
2178 2178 def report(vfs, name, method):
2179 2179 # this causes stale locks to get reaped for more accurate reporting
2180 2180 try:
2181 2181 l = method(False)
2182 2182 except error.LockHeld:
2183 2183 l = None
2184 2184
2185 2185 if l:
2186 2186 l.release()
2187 2187 else:
2188 2188 try:
2189 2189 st = vfs.lstat(name)
2190 2190 age = now - st[stat.ST_MTIME]
2191 2191 user = util.username(st.st_uid)
2192 2192 locker = vfs.readlock(name)
2193 2193 if b":" in locker:
2194 2194 host, pid = locker.split(b':')
2195 2195 if host == socket.gethostname():
2196 2196 locker = b'user %s, process %s' % (user or b'None', pid)
2197 2197 else:
2198 2198 locker = b'user %s, process %s, host %s' % (
2199 2199 user or b'None',
2200 2200 pid,
2201 2201 host,
2202 2202 )
2203 2203 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2204 2204 return 1
2205 2205 except OSError as e:
2206 2206 if e.errno != errno.ENOENT:
2207 2207 raise
2208 2208
2209 2209 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2210 2210 return 0
2211 2211
2212 2212 held += report(repo.svfs, b"lock", repo.lock)
2213 2213 held += report(repo.vfs, b"wlock", repo.wlock)
2214 2214
2215 2215 return held
2216 2216
2217 2217
2218 2218 @command(
2219 2219 b'debugmanifestfulltextcache',
2220 2220 [
2221 2221 (b'', b'clear', False, _(b'clear the cache')),
2222 2222 (
2223 2223 b'a',
2224 2224 b'add',
2225 2225 [],
2226 2226 _(b'add the given manifest nodes to the cache'),
2227 2227 _(b'NODE'),
2228 2228 ),
2229 2229 ],
2230 2230 b'',
2231 2231 )
2232 2232 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2233 2233 """show, clear or amend the contents of the manifest fulltext cache"""
2234 2234
2235 2235 def getcache():
2236 2236 r = repo.manifestlog.getstorage(b'')
2237 2237 try:
2238 2238 return r._fulltextcache
2239 2239 except AttributeError:
2240 2240 msg = _(
2241 2241 b"Current revlog implementation doesn't appear to have a "
2242 2242 b"manifest fulltext cache\n"
2243 2243 )
2244 2244 raise error.Abort(msg)
2245 2245
2246 2246 if opts.get('clear'):
2247 2247 with repo.wlock():
2248 2248 cache = getcache()
2249 2249 cache.clear(clear_persisted_data=True)
2250 2250 return
2251 2251
2252 2252 if add:
2253 2253 with repo.wlock():
2254 2254 m = repo.manifestlog
2255 2255 store = m.getstorage(b'')
2256 2256 for n in add:
2257 2257 try:
2258 2258 manifest = m[store.lookup(n)]
2259 2259 except error.LookupError as e:
2260 2260 raise error.Abort(
2261 2261 bytes(e), hint=b"Check your manifest node id"
2262 2262 )
2263 2263 manifest.read() # stores revisision in cache too
2264 2264 return
2265 2265
2266 2266 cache = getcache()
2267 2267 if not len(cache):
2268 2268 ui.write(_(b'cache empty\n'))
2269 2269 else:
2270 2270 ui.write(
2271 2271 _(
2272 2272 b'cache contains %d manifest entries, in order of most to '
2273 2273 b'least recent:\n'
2274 2274 )
2275 2275 % (len(cache),)
2276 2276 )
2277 2277 totalsize = 0
2278 2278 for nodeid in cache:
2279 2279 # Use cache.get to not update the LRU order
2280 2280 data = cache.peek(nodeid)
2281 2281 size = len(data)
2282 2282 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2283 2283 ui.write(
2284 2284 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2285 2285 )
2286 2286 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2287 2287 ui.write(
2288 2288 _(b'total cache data size %s, on-disk %s\n')
2289 2289 % (util.bytecount(totalsize), util.bytecount(ondisk))
2290 2290 )
2291 2291
2292 2292
2293 2293 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2294 2294 def debugmergestate(ui, repo, *args, **opts):
2295 2295 """print merge state
2296 2296
2297 2297 Use --verbose to print out information about whether v1 or v2 merge state
2298 2298 was chosen."""
2299 2299
2300 2300 if ui.verbose:
2301 2301 ms = mergestatemod.mergestate(repo)
2302 2302
2303 2303 # sort so that reasonable information is on top
2304 2304 v1records = ms._readrecordsv1()
2305 2305 v2records = ms._readrecordsv2()
2306 2306
2307 2307 if not v1records and not v2records:
2308 2308 pass
2309 2309 elif not v2records:
2310 2310 ui.writenoi18n(b'no version 2 merge state\n')
2311 2311 elif ms._v1v2match(v1records, v2records):
2312 2312 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2313 2313 else:
2314 2314 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2315 2315
2316 2316 opts = pycompat.byteskwargs(opts)
2317 2317 if not opts[b'template']:
2318 2318 opts[b'template'] = (
2319 2319 b'{if(commits, "", "no merge state found\n")}'
2320 2320 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2321 2321 b'{files % "file: {path} (state \\"{state}\\")\n'
2322 2322 b'{if(local_path, "'
2323 2323 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2324 2324 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2325 2325 b' other path: {other_path} (node {other_node})\n'
2326 2326 b'")}'
2327 2327 b'{if(rename_side, "'
2328 2328 b' rename side: {rename_side}\n'
2329 2329 b' renamed path: {renamed_path}\n'
2330 2330 b'")}'
2331 2331 b'{extras % " extra: {key} = {value}\n"}'
2332 2332 b'"}'
2333 2333 b'{extras % "extra: {file} ({key} = {value})\n"}'
2334 2334 )
2335 2335
2336 2336 ms = mergestatemod.mergestate.read(repo)
2337 2337
2338 2338 fm = ui.formatter(b'debugmergestate', opts)
2339 2339 fm.startitem()
2340 2340
2341 2341 fm_commits = fm.nested(b'commits')
2342 2342 if ms.active():
2343 2343 for name, node, label_index in (
2344 2344 (b'local', ms.local, 0),
2345 2345 (b'other', ms.other, 1),
2346 2346 ):
2347 2347 fm_commits.startitem()
2348 2348 fm_commits.data(name=name)
2349 2349 fm_commits.data(node=hex(node))
2350 2350 if ms._labels and len(ms._labels) > label_index:
2351 2351 fm_commits.data(label=ms._labels[label_index])
2352 2352 fm_commits.end()
2353 2353
2354 2354 fm_files = fm.nested(b'files')
2355 2355 if ms.active():
2356 2356 for f in ms:
2357 2357 fm_files.startitem()
2358 2358 fm_files.data(path=f)
2359 2359 state = ms._state[f]
2360 2360 fm_files.data(state=state[0])
2361 2361 if state[0] in (
2362 2362 mergestatemod.MERGE_RECORD_UNRESOLVED,
2363 2363 mergestatemod.MERGE_RECORD_RESOLVED,
2364 2364 ):
2365 2365 fm_files.data(local_key=state[1])
2366 2366 fm_files.data(local_path=state[2])
2367 2367 fm_files.data(ancestor_path=state[3])
2368 2368 fm_files.data(ancestor_node=state[4])
2369 2369 fm_files.data(other_path=state[5])
2370 2370 fm_files.data(other_node=state[6])
2371 2371 fm_files.data(local_flags=state[7])
2372 2372 elif state[0] in (
2373 2373 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2374 2374 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2375 2375 ):
2376 2376 fm_files.data(renamed_path=state[1])
2377 2377 fm_files.data(rename_side=state[2])
2378 2378 fm_extras = fm_files.nested(b'extras')
2379 2379 for k, v in sorted(ms.extras(f).items()):
2380 2380 fm_extras.startitem()
2381 2381 fm_extras.data(key=k)
2382 2382 fm_extras.data(value=v)
2383 2383 fm_extras.end()
2384 2384
2385 2385 fm_files.end()
2386 2386
2387 2387 fm_extras = fm.nested(b'extras')
2388 2388 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2389 2389 if f in ms:
2390 2390 # If file is in mergestate, we have already processed it's extras
2391 2391 continue
2392 2392 for k, v in pycompat.iteritems(d):
2393 2393 fm_extras.startitem()
2394 2394 fm_extras.data(file=f)
2395 2395 fm_extras.data(key=k)
2396 2396 fm_extras.data(value=v)
2397 2397 fm_extras.end()
2398 2398
2399 2399 fm.end()
2400 2400
2401 2401
2402 2402 @command(b'debugnamecomplete', [], _(b'NAME...'))
2403 2403 def debugnamecomplete(ui, repo, *args):
2404 2404 '''complete "names" - tags, open branch names, bookmark names'''
2405 2405
2406 2406 names = set()
2407 2407 # since we previously only listed open branches, we will handle that
2408 2408 # specially (after this for loop)
2409 2409 for name, ns in pycompat.iteritems(repo.names):
2410 2410 if name != b'branches':
2411 2411 names.update(ns.listnames(repo))
2412 2412 names.update(
2413 2413 tag
2414 2414 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2415 2415 if not closed
2416 2416 )
2417 2417 completions = set()
2418 2418 if not args:
2419 2419 args = [b'']
2420 2420 for a in args:
2421 2421 completions.update(n for n in names if n.startswith(a))
2422 2422 ui.write(b'\n'.join(sorted(completions)))
2423 2423 ui.write(b'\n')
2424 2424
2425 2425
2426 2426 @command(
2427 2427 b'debugnodemap',
2428 2428 [
2429 2429 (
2430 2430 b'',
2431 2431 b'dump-new',
2432 2432 False,
2433 2433 _(b'write a (new) persistent binary nodemap on stdout'),
2434 2434 ),
2435 2435 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2436 2436 (
2437 2437 b'',
2438 2438 b'check',
2439 2439 False,
2440 2440 _(b'check that the data on disk data are correct.'),
2441 2441 ),
2442 2442 (
2443 2443 b'',
2444 2444 b'metadata',
2445 2445 False,
2446 2446 _(b'display the on disk meta data for the nodemap'),
2447 2447 ),
2448 2448 ],
2449 2449 )
2450 2450 def debugnodemap(ui, repo, **opts):
2451 2451 """write and inspect on disk nodemap"""
2452 2452 if opts['dump_new']:
2453 2453 unfi = repo.unfiltered()
2454 2454 cl = unfi.changelog
2455 2455 if util.safehasattr(cl.index, "nodemap_data_all"):
2456 2456 data = cl.index.nodemap_data_all()
2457 2457 else:
2458 2458 data = nodemap.persistent_data(cl.index)
2459 2459 ui.write(data)
2460 2460 elif opts['dump_disk']:
2461 2461 unfi = repo.unfiltered()
2462 2462 cl = unfi.changelog
2463 2463 nm_data = nodemap.persisted_data(cl)
2464 2464 if nm_data is not None:
2465 2465 docket, data = nm_data
2466 2466 ui.write(data[:])
2467 2467 elif opts['check']:
2468 2468 unfi = repo.unfiltered()
2469 2469 cl = unfi.changelog
2470 2470 nm_data = nodemap.persisted_data(cl)
2471 2471 if nm_data is not None:
2472 2472 docket, data = nm_data
2473 2473 return nodemap.check_data(ui, cl.index, data)
2474 2474 elif opts['metadata']:
2475 2475 unfi = repo.unfiltered()
2476 2476 cl = unfi.changelog
2477 2477 nm_data = nodemap.persisted_data(cl)
2478 2478 if nm_data is not None:
2479 2479 docket, data = nm_data
2480 2480 ui.write((b"uid: %s\n") % docket.uid)
2481 2481 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2482 2482 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2483 2483 ui.write((b"data-length: %d\n") % docket.data_length)
2484 2484 ui.write((b"data-unused: %d\n") % docket.data_unused)
2485 2485 unused_perc = docket.data_unused * 100.0 / docket.data_length
2486 2486 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2487 2487
2488 2488
2489 2489 @command(
2490 2490 b'debugobsolete',
2491 2491 [
2492 2492 (b'', b'flags', 0, _(b'markers flag')),
2493 2493 (
2494 2494 b'',
2495 2495 b'record-parents',
2496 2496 False,
2497 2497 _(b'record parent information for the precursor'),
2498 2498 ),
2499 2499 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2500 2500 (
2501 2501 b'',
2502 2502 b'exclusive',
2503 2503 False,
2504 2504 _(b'restrict display to markers only relevant to REV'),
2505 2505 ),
2506 2506 (b'', b'index', False, _(b'display index of the marker')),
2507 2507 (b'', b'delete', [], _(b'delete markers specified by indices')),
2508 2508 ]
2509 2509 + cmdutil.commitopts2
2510 2510 + cmdutil.formatteropts,
2511 2511 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2512 2512 )
2513 2513 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2514 2514 """create arbitrary obsolete marker
2515 2515
2516 2516 With no arguments, displays the list of obsolescence markers."""
2517 2517
2518 2518 opts = pycompat.byteskwargs(opts)
2519 2519
2520 2520 def parsenodeid(s):
2521 2521 try:
2522 2522 # We do not use revsingle/revrange functions here to accept
2523 2523 # arbitrary node identifiers, possibly not present in the
2524 2524 # local repository.
2525 2525 n = bin(s)
2526 2526 if len(n) != repo.nodeconstants.nodelen:
2527 2527 raise TypeError()
2528 2528 return n
2529 2529 except TypeError:
2530 2530 raise error.InputError(
2531 2531 b'changeset references must be full hexadecimal '
2532 2532 b'node identifiers'
2533 2533 )
2534 2534
2535 2535 if opts.get(b'delete'):
2536 2536 indices = []
2537 2537 for v in opts.get(b'delete'):
2538 2538 try:
2539 2539 indices.append(int(v))
2540 2540 except ValueError:
2541 2541 raise error.InputError(
2542 2542 _(b'invalid index value: %r') % v,
2543 2543 hint=_(b'use integers for indices'),
2544 2544 )
2545 2545
2546 2546 if repo.currenttransaction():
2547 2547 raise error.Abort(
2548 2548 _(b'cannot delete obsmarkers in the middle of transaction.')
2549 2549 )
2550 2550
2551 2551 with repo.lock():
2552 2552 n = repair.deleteobsmarkers(repo.obsstore, indices)
2553 2553 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2554 2554
2555 2555 return
2556 2556
2557 2557 if precursor is not None:
2558 2558 if opts[b'rev']:
2559 2559 raise error.InputError(
2560 2560 b'cannot select revision when creating marker'
2561 2561 )
2562 2562 metadata = {}
2563 2563 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2564 2564 succs = tuple(parsenodeid(succ) for succ in successors)
2565 2565 l = repo.lock()
2566 2566 try:
2567 2567 tr = repo.transaction(b'debugobsolete')
2568 2568 try:
2569 2569 date = opts.get(b'date')
2570 2570 if date:
2571 2571 date = dateutil.parsedate(date)
2572 2572 else:
2573 2573 date = None
2574 2574 prec = parsenodeid(precursor)
2575 2575 parents = None
2576 2576 if opts[b'record_parents']:
2577 2577 if prec not in repo.unfiltered():
2578 2578 raise error.Abort(
2579 2579 b'cannot used --record-parents on '
2580 2580 b'unknown changesets'
2581 2581 )
2582 2582 parents = repo.unfiltered()[prec].parents()
2583 2583 parents = tuple(p.node() for p in parents)
2584 2584 repo.obsstore.create(
2585 2585 tr,
2586 2586 prec,
2587 2587 succs,
2588 2588 opts[b'flags'],
2589 2589 parents=parents,
2590 2590 date=date,
2591 2591 metadata=metadata,
2592 2592 ui=ui,
2593 2593 )
2594 2594 tr.close()
2595 2595 except ValueError as exc:
2596 2596 raise error.Abort(
2597 2597 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2598 2598 )
2599 2599 finally:
2600 2600 tr.release()
2601 2601 finally:
2602 2602 l.release()
2603 2603 else:
2604 2604 if opts[b'rev']:
2605 2605 revs = logcmdutil.revrange(repo, opts[b'rev'])
2606 2606 nodes = [repo[r].node() for r in revs]
2607 2607 markers = list(
2608 2608 obsutil.getmarkers(
2609 2609 repo, nodes=nodes, exclusive=opts[b'exclusive']
2610 2610 )
2611 2611 )
2612 2612 markers.sort(key=lambda x: x._data)
2613 2613 else:
2614 2614 markers = obsutil.getmarkers(repo)
2615 2615
2616 2616 markerstoiter = markers
2617 2617 isrelevant = lambda m: True
2618 2618 if opts.get(b'rev') and opts.get(b'index'):
2619 2619 markerstoiter = obsutil.getmarkers(repo)
2620 2620 markerset = set(markers)
2621 2621 isrelevant = lambda m: m in markerset
2622 2622
2623 2623 fm = ui.formatter(b'debugobsolete', opts)
2624 2624 for i, m in enumerate(markerstoiter):
2625 2625 if not isrelevant(m):
2626 2626 # marker can be irrelevant when we're iterating over a set
2627 2627 # of markers (markerstoiter) which is bigger than the set
2628 2628 # of markers we want to display (markers)
2629 2629 # this can happen if both --index and --rev options are
2630 2630 # provided and thus we need to iterate over all of the markers
2631 2631 # to get the correct indices, but only display the ones that
2632 2632 # are relevant to --rev value
2633 2633 continue
2634 2634 fm.startitem()
2635 2635 ind = i if opts.get(b'index') else None
2636 2636 cmdutil.showmarker(fm, m, index=ind)
2637 2637 fm.end()
2638 2638
2639 2639
2640 2640 @command(
2641 2641 b'debugp1copies',
2642 2642 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2643 2643 _(b'[-r REV]'),
2644 2644 )
2645 2645 def debugp1copies(ui, repo, **opts):
2646 2646 """dump copy information compared to p1"""
2647 2647
2648 2648 opts = pycompat.byteskwargs(opts)
2649 2649 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2650 2650 for dst, src in ctx.p1copies().items():
2651 2651 ui.write(b'%s -> %s\n' % (src, dst))
2652 2652
2653 2653
2654 2654 @command(
2655 2655 b'debugp2copies',
2656 2656 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2657 2657 _(b'[-r REV]'),
2658 2658 )
2659 2659 def debugp1copies(ui, repo, **opts):
2660 2660 """dump copy information compared to p2"""
2661 2661
2662 2662 opts = pycompat.byteskwargs(opts)
2663 2663 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2664 2664 for dst, src in ctx.p2copies().items():
2665 2665 ui.write(b'%s -> %s\n' % (src, dst))
2666 2666
2667 2667
2668 2668 @command(
2669 2669 b'debugpathcomplete',
2670 2670 [
2671 2671 (b'f', b'full', None, _(b'complete an entire path')),
2672 2672 (b'n', b'normal', None, _(b'show only normal files')),
2673 2673 (b'a', b'added', None, _(b'show only added files')),
2674 2674 (b'r', b'removed', None, _(b'show only removed files')),
2675 2675 ],
2676 2676 _(b'FILESPEC...'),
2677 2677 )
2678 2678 def debugpathcomplete(ui, repo, *specs, **opts):
2679 2679 """complete part or all of a tracked path
2680 2680
2681 2681 This command supports shells that offer path name completion. It
2682 2682 currently completes only files already known to the dirstate.
2683 2683
2684 2684 Completion extends only to the next path segment unless
2685 2685 --full is specified, in which case entire paths are used."""
2686 2686
2687 2687 def complete(path, acceptable):
2688 2688 dirstate = repo.dirstate
2689 2689 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2690 2690 rootdir = repo.root + pycompat.ossep
2691 2691 if spec != repo.root and not spec.startswith(rootdir):
2692 2692 return [], []
2693 2693 if os.path.isdir(spec):
2694 2694 spec += b'/'
2695 2695 spec = spec[len(rootdir) :]
2696 2696 fixpaths = pycompat.ossep != b'/'
2697 2697 if fixpaths:
2698 2698 spec = spec.replace(pycompat.ossep, b'/')
2699 2699 speclen = len(spec)
2700 2700 fullpaths = opts['full']
2701 2701 files, dirs = set(), set()
2702 2702 adddir, addfile = dirs.add, files.add
2703 2703 for f, st in pycompat.iteritems(dirstate):
2704 2704 if f.startswith(spec) and st.state in acceptable:
2705 2705 if fixpaths:
2706 2706 f = f.replace(b'/', pycompat.ossep)
2707 2707 if fullpaths:
2708 2708 addfile(f)
2709 2709 continue
2710 2710 s = f.find(pycompat.ossep, speclen)
2711 2711 if s >= 0:
2712 2712 adddir(f[:s])
2713 2713 else:
2714 2714 addfile(f)
2715 2715 return files, dirs
2716 2716
2717 2717 acceptable = b''
2718 2718 if opts['normal']:
2719 2719 acceptable += b'nm'
2720 2720 if opts['added']:
2721 2721 acceptable += b'a'
2722 2722 if opts['removed']:
2723 2723 acceptable += b'r'
2724 2724 cwd = repo.getcwd()
2725 2725 if not specs:
2726 2726 specs = [b'.']
2727 2727
2728 2728 files, dirs = set(), set()
2729 2729 for spec in specs:
2730 2730 f, d = complete(spec, acceptable or b'nmar')
2731 2731 files.update(f)
2732 2732 dirs.update(d)
2733 2733 files.update(dirs)
2734 2734 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2735 2735 ui.write(b'\n')
2736 2736
2737 2737
2738 2738 @command(
2739 2739 b'debugpathcopies',
2740 2740 cmdutil.walkopts,
2741 2741 b'hg debugpathcopies REV1 REV2 [FILE]',
2742 2742 inferrepo=True,
2743 2743 )
2744 2744 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2745 2745 """show copies between two revisions"""
2746 2746 ctx1 = scmutil.revsingle(repo, rev1)
2747 2747 ctx2 = scmutil.revsingle(repo, rev2)
2748 2748 m = scmutil.match(ctx1, pats, opts)
2749 2749 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2750 2750 ui.write(b'%s -> %s\n' % (src, dst))
2751 2751
2752 2752
2753 2753 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2754 2754 def debugpeer(ui, path):
2755 2755 """establish a connection to a peer repository"""
2756 2756 # Always enable peer request logging. Requires --debug to display
2757 2757 # though.
2758 2758 overrides = {
2759 2759 (b'devel', b'debug.peer-request'): True,
2760 2760 }
2761 2761
2762 2762 with ui.configoverride(overrides):
2763 2763 peer = hg.peer(ui, {}, path)
2764 2764
2765 2765 try:
2766 2766 local = peer.local() is not None
2767 2767 canpush = peer.canpush()
2768 2768
2769 2769 ui.write(_(b'url: %s\n') % peer.url())
2770 2770 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2771 2771 ui.write(
2772 2772 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2773 2773 )
2774 2774 finally:
2775 2775 peer.close()
2776 2776
2777 2777
2778 2778 @command(
2779 2779 b'debugpickmergetool',
2780 2780 [
2781 2781 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2782 2782 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2783 2783 ]
2784 2784 + cmdutil.walkopts
2785 2785 + cmdutil.mergetoolopts,
2786 2786 _(b'[PATTERN]...'),
2787 2787 inferrepo=True,
2788 2788 )
2789 2789 def debugpickmergetool(ui, repo, *pats, **opts):
2790 2790 """examine which merge tool is chosen for specified file
2791 2791
2792 2792 As described in :hg:`help merge-tools`, Mercurial examines
2793 2793 configurations below in this order to decide which merge tool is
2794 2794 chosen for specified file.
2795 2795
2796 2796 1. ``--tool`` option
2797 2797 2. ``HGMERGE`` environment variable
2798 2798 3. configurations in ``merge-patterns`` section
2799 2799 4. configuration of ``ui.merge``
2800 2800 5. configurations in ``merge-tools`` section
2801 2801 6. ``hgmerge`` tool (for historical reason only)
2802 2802 7. default tool for fallback (``:merge`` or ``:prompt``)
2803 2803
2804 2804 This command writes out examination result in the style below::
2805 2805
2806 2806 FILE = MERGETOOL
2807 2807
2808 2808 By default, all files known in the first parent context of the
2809 2809 working directory are examined. Use file patterns and/or -I/-X
2810 2810 options to limit target files. -r/--rev is also useful to examine
2811 2811 files in another context without actual updating to it.
2812 2812
2813 2813 With --debug, this command shows warning messages while matching
2814 2814 against ``merge-patterns`` and so on, too. It is recommended to
2815 2815 use this option with explicit file patterns and/or -I/-X options,
2816 2816 because this option increases amount of output per file according
2817 2817 to configurations in hgrc.
2818 2818
2819 2819 With -v/--verbose, this command shows configurations below at
2820 2820 first (only if specified).
2821 2821
2822 2822 - ``--tool`` option
2823 2823 - ``HGMERGE`` environment variable
2824 2824 - configuration of ``ui.merge``
2825 2825
2826 2826 If merge tool is chosen before matching against
2827 2827 ``merge-patterns``, this command can't show any helpful
2828 2828 information, even with --debug. In such case, information above is
2829 2829 useful to know why a merge tool is chosen.
2830 2830 """
2831 2831 opts = pycompat.byteskwargs(opts)
2832 2832 overrides = {}
2833 2833 if opts[b'tool']:
2834 2834 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2835 2835 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2836 2836
2837 2837 with ui.configoverride(overrides, b'debugmergepatterns'):
2838 2838 hgmerge = encoding.environ.get(b"HGMERGE")
2839 2839 if hgmerge is not None:
2840 2840 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2841 2841 uimerge = ui.config(b"ui", b"merge")
2842 2842 if uimerge:
2843 2843 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2844 2844
2845 2845 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2846 2846 m = scmutil.match(ctx, pats, opts)
2847 2847 changedelete = opts[b'changedelete']
2848 2848 for path in ctx.walk(m):
2849 2849 fctx = ctx[path]
2850 2850 with ui.silent(
2851 2851 error=True
2852 2852 ) if not ui.debugflag else util.nullcontextmanager():
2853 2853 tool, toolpath = filemerge._picktool(
2854 2854 repo,
2855 2855 ui,
2856 2856 path,
2857 2857 fctx.isbinary(),
2858 2858 b'l' in fctx.flags(),
2859 2859 changedelete,
2860 2860 )
2861 2861 ui.write(b'%s = %s\n' % (path, tool))
2862 2862
2863 2863
2864 2864 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2865 2865 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2866 2866 """access the pushkey key/value protocol
2867 2867
2868 2868 With two args, list the keys in the given namespace.
2869 2869
2870 2870 With five args, set a key to new if it currently is set to old.
2871 2871 Reports success or failure.
2872 2872 """
2873 2873
2874 2874 target = hg.peer(ui, {}, repopath)
2875 2875 try:
2876 2876 if keyinfo:
2877 2877 key, old, new = keyinfo
2878 2878 with target.commandexecutor() as e:
2879 2879 r = e.callcommand(
2880 2880 b'pushkey',
2881 2881 {
2882 2882 b'namespace': namespace,
2883 2883 b'key': key,
2884 2884 b'old': old,
2885 2885 b'new': new,
2886 2886 },
2887 2887 ).result()
2888 2888
2889 2889 ui.status(pycompat.bytestr(r) + b'\n')
2890 2890 return not r
2891 2891 else:
2892 2892 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2893 2893 ui.write(
2894 2894 b"%s\t%s\n"
2895 2895 % (stringutil.escapestr(k), stringutil.escapestr(v))
2896 2896 )
2897 2897 finally:
2898 2898 target.close()
2899 2899
2900 2900
2901 2901 @command(b'debugpvec', [], _(b'A B'))
2902 2902 def debugpvec(ui, repo, a, b=None):
2903 2903 ca = scmutil.revsingle(repo, a)
2904 2904 cb = scmutil.revsingle(repo, b)
2905 2905 pa = pvec.ctxpvec(ca)
2906 2906 pb = pvec.ctxpvec(cb)
2907 2907 if pa == pb:
2908 2908 rel = b"="
2909 2909 elif pa > pb:
2910 2910 rel = b">"
2911 2911 elif pa < pb:
2912 2912 rel = b"<"
2913 2913 elif pa | pb:
2914 2914 rel = b"|"
2915 2915 ui.write(_(b"a: %s\n") % pa)
2916 2916 ui.write(_(b"b: %s\n") % pb)
2917 2917 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2918 2918 ui.write(
2919 2919 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2920 2920 % (
2921 2921 abs(pa._depth - pb._depth),
2922 2922 pvec._hamming(pa._vec, pb._vec),
2923 2923 pa.distance(pb),
2924 2924 rel,
2925 2925 )
2926 2926 )
2927 2927
2928 2928
2929 2929 @command(
2930 2930 b'debugrebuilddirstate|debugrebuildstate',
2931 2931 [
2932 2932 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2933 2933 (
2934 2934 b'',
2935 2935 b'minimal',
2936 2936 None,
2937 2937 _(
2938 2938 b'only rebuild files that are inconsistent with '
2939 2939 b'the working copy parent'
2940 2940 ),
2941 2941 ),
2942 2942 ],
2943 2943 _(b'[-r REV]'),
2944 2944 )
2945 2945 def debugrebuilddirstate(ui, repo, rev, **opts):
2946 2946 """rebuild the dirstate as it would look like for the given revision
2947 2947
2948 2948 If no revision is specified the first current parent will be used.
2949 2949
2950 2950 The dirstate will be set to the files of the given revision.
2951 2951 The actual working directory content or existing dirstate
2952 2952 information such as adds or removes is not considered.
2953 2953
2954 2954 ``minimal`` will only rebuild the dirstate status for files that claim to be
2955 2955 tracked but are not in the parent manifest, or that exist in the parent
2956 2956 manifest but are not in the dirstate. It will not change adds, removes, or
2957 2957 modified files that are in the working copy parent.
2958 2958
2959 2959 One use of this command is to make the next :hg:`status` invocation
2960 2960 check the actual file content.
2961 2961 """
2962 2962 ctx = scmutil.revsingle(repo, rev)
2963 2963 with repo.wlock():
2964 2964 dirstate = repo.dirstate
2965 2965 changedfiles = None
2966 2966 # See command doc for what minimal does.
2967 2967 if opts.get('minimal'):
2968 2968 manifestfiles = set(ctx.manifest().keys())
2969 2969 dirstatefiles = set(dirstate)
2970 2970 manifestonly = manifestfiles - dirstatefiles
2971 2971 dsonly = dirstatefiles - manifestfiles
2972 2972 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2973 2973 changedfiles = manifestonly | dsnotadded
2974 2974
2975 2975 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2976 2976
2977 2977
2978 2978 @command(
2979 2979 b'debugrebuildfncache',
2980 2980 [
2981 2981 (
2982 2982 b'',
2983 2983 b'only-data',
2984 2984 False,
2985 2985 _(b'only look for wrong .d files (much faster)'),
2986 2986 )
2987 2987 ],
2988 2988 b'',
2989 2989 )
2990 2990 def debugrebuildfncache(ui, repo, **opts):
2991 2991 """rebuild the fncache file"""
2992 2992 opts = pycompat.byteskwargs(opts)
2993 2993 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2994 2994
2995 2995
2996 2996 @command(
2997 2997 b'debugrename',
2998 2998 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2999 2999 _(b'[-r REV] [FILE]...'),
3000 3000 )
3001 3001 def debugrename(ui, repo, *pats, **opts):
3002 3002 """dump rename information"""
3003 3003
3004 3004 opts = pycompat.byteskwargs(opts)
3005 3005 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3006 3006 m = scmutil.match(ctx, pats, opts)
3007 3007 for abs in ctx.walk(m):
3008 3008 fctx = ctx[abs]
3009 3009 o = fctx.filelog().renamed(fctx.filenode())
3010 3010 rel = repo.pathto(abs)
3011 3011 if o:
3012 3012 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3013 3013 else:
3014 3014 ui.write(_(b"%s not renamed\n") % rel)
3015 3015
3016 3016
3017 3017 @command(b'debugrequires|debugrequirements', [], b'')
3018 3018 def debugrequirements(ui, repo):
3019 3019 """print the current repo requirements"""
3020 3020 for r in sorted(repo.requirements):
3021 3021 ui.write(b"%s\n" % r)
3022 3022
3023 3023
3024 3024 @command(
3025 3025 b'debugrevlog',
3026 3026 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3027 3027 _(b'-c|-m|FILE'),
3028 3028 optionalrepo=True,
3029 3029 )
3030 3030 def debugrevlog(ui, repo, file_=None, **opts):
3031 3031 """show data and statistics about a revlog"""
3032 3032 opts = pycompat.byteskwargs(opts)
3033 3033 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3034 3034
3035 3035 if opts.get(b"dump"):
3036 3036 numrevs = len(r)
3037 3037 ui.write(
3038 3038 (
3039 3039 b"# rev p1rev p2rev start end deltastart base p1 p2"
3040 3040 b" rawsize totalsize compression heads chainlen\n"
3041 3041 )
3042 3042 )
3043 3043 ts = 0
3044 3044 heads = set()
3045 3045
3046 3046 for rev in pycompat.xrange(numrevs):
3047 3047 dbase = r.deltaparent(rev)
3048 3048 if dbase == -1:
3049 3049 dbase = rev
3050 3050 cbase = r.chainbase(rev)
3051 3051 clen = r.chainlen(rev)
3052 3052 p1, p2 = r.parentrevs(rev)
3053 3053 rs = r.rawsize(rev)
3054 3054 ts = ts + rs
3055 3055 heads -= set(r.parentrevs(rev))
3056 3056 heads.add(rev)
3057 3057 try:
3058 3058 compression = ts / r.end(rev)
3059 3059 except ZeroDivisionError:
3060 3060 compression = 0
3061 3061 ui.write(
3062 3062 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3063 3063 b"%11d %5d %8d\n"
3064 3064 % (
3065 3065 rev,
3066 3066 p1,
3067 3067 p2,
3068 3068 r.start(rev),
3069 3069 r.end(rev),
3070 3070 r.start(dbase),
3071 3071 r.start(cbase),
3072 3072 r.start(p1),
3073 3073 r.start(p2),
3074 3074 rs,
3075 3075 ts,
3076 3076 compression,
3077 3077 len(heads),
3078 3078 clen,
3079 3079 )
3080 3080 )
3081 3081 return 0
3082 3082
3083 3083 format = r._format_version
3084 3084 v = r._format_flags
3085 3085 flags = []
3086 3086 gdelta = False
3087 3087 if v & revlog.FLAG_INLINE_DATA:
3088 3088 flags.append(b'inline')
3089 3089 if v & revlog.FLAG_GENERALDELTA:
3090 3090 gdelta = True
3091 3091 flags.append(b'generaldelta')
3092 3092 if not flags:
3093 3093 flags = [b'(none)']
3094 3094
3095 3095 ### tracks merge vs single parent
3096 3096 nummerges = 0
3097 3097
3098 3098 ### tracks ways the "delta" are build
3099 3099 # nodelta
3100 3100 numempty = 0
3101 3101 numemptytext = 0
3102 3102 numemptydelta = 0
3103 3103 # full file content
3104 3104 numfull = 0
3105 3105 # intermediate snapshot against a prior snapshot
3106 3106 numsemi = 0
3107 3107 # snapshot count per depth
3108 3108 numsnapdepth = collections.defaultdict(lambda: 0)
3109 3109 # delta against previous revision
3110 3110 numprev = 0
3111 3111 # delta against first or second parent (not prev)
3112 3112 nump1 = 0
3113 3113 nump2 = 0
3114 3114 # delta against neither prev nor parents
3115 3115 numother = 0
3116 3116 # delta against prev that are also first or second parent
3117 3117 # (details of `numprev`)
3118 3118 nump1prev = 0
3119 3119 nump2prev = 0
3120 3120
3121 3121 # data about delta chain of each revs
3122 3122 chainlengths = []
3123 3123 chainbases = []
3124 3124 chainspans = []
3125 3125
3126 3126 # data about each revision
3127 3127 datasize = [None, 0, 0]
3128 3128 fullsize = [None, 0, 0]
3129 3129 semisize = [None, 0, 0]
3130 3130 # snapshot count per depth
3131 3131 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3132 3132 deltasize = [None, 0, 0]
3133 3133 chunktypecounts = {}
3134 3134 chunktypesizes = {}
3135 3135
3136 3136 def addsize(size, l):
3137 3137 if l[0] is None or size < l[0]:
3138 3138 l[0] = size
3139 3139 if size > l[1]:
3140 3140 l[1] = size
3141 3141 l[2] += size
3142 3142
3143 3143 numrevs = len(r)
3144 3144 for rev in pycompat.xrange(numrevs):
3145 3145 p1, p2 = r.parentrevs(rev)
3146 3146 delta = r.deltaparent(rev)
3147 3147 if format > 0:
3148 3148 addsize(r.rawsize(rev), datasize)
3149 3149 if p2 != nullrev:
3150 3150 nummerges += 1
3151 3151 size = r.length(rev)
3152 3152 if delta == nullrev:
3153 3153 chainlengths.append(0)
3154 3154 chainbases.append(r.start(rev))
3155 3155 chainspans.append(size)
3156 3156 if size == 0:
3157 3157 numempty += 1
3158 3158 numemptytext += 1
3159 3159 else:
3160 3160 numfull += 1
3161 3161 numsnapdepth[0] += 1
3162 3162 addsize(size, fullsize)
3163 3163 addsize(size, snapsizedepth[0])
3164 3164 else:
3165 3165 chainlengths.append(chainlengths[delta] + 1)
3166 3166 baseaddr = chainbases[delta]
3167 3167 revaddr = r.start(rev)
3168 3168 chainbases.append(baseaddr)
3169 3169 chainspans.append((revaddr - baseaddr) + size)
3170 3170 if size == 0:
3171 3171 numempty += 1
3172 3172 numemptydelta += 1
3173 3173 elif r.issnapshot(rev):
3174 3174 addsize(size, semisize)
3175 3175 numsemi += 1
3176 3176 depth = r.snapshotdepth(rev)
3177 3177 numsnapdepth[depth] += 1
3178 3178 addsize(size, snapsizedepth[depth])
3179 3179 else:
3180 3180 addsize(size, deltasize)
3181 3181 if delta == rev - 1:
3182 3182 numprev += 1
3183 3183 if delta == p1:
3184 3184 nump1prev += 1
3185 3185 elif delta == p2:
3186 3186 nump2prev += 1
3187 3187 elif delta == p1:
3188 3188 nump1 += 1
3189 3189 elif delta == p2:
3190 3190 nump2 += 1
3191 3191 elif delta != nullrev:
3192 3192 numother += 1
3193 3193
3194 3194 # Obtain data on the raw chunks in the revlog.
3195 3195 if util.safehasattr(r, b'_getsegmentforrevs'):
3196 3196 segment = r._getsegmentforrevs(rev, rev)[1]
3197 3197 else:
3198 3198 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3199 3199 if segment:
3200 3200 chunktype = bytes(segment[0:1])
3201 3201 else:
3202 3202 chunktype = b'empty'
3203 3203
3204 3204 if chunktype not in chunktypecounts:
3205 3205 chunktypecounts[chunktype] = 0
3206 3206 chunktypesizes[chunktype] = 0
3207 3207
3208 3208 chunktypecounts[chunktype] += 1
3209 3209 chunktypesizes[chunktype] += size
3210 3210
3211 3211 # Adjust size min value for empty cases
3212 3212 for size in (datasize, fullsize, semisize, deltasize):
3213 3213 if size[0] is None:
3214 3214 size[0] = 0
3215 3215
3216 3216 numdeltas = numrevs - numfull - numempty - numsemi
3217 3217 numoprev = numprev - nump1prev - nump2prev
3218 3218 totalrawsize = datasize[2]
3219 3219 datasize[2] /= numrevs
3220 3220 fulltotal = fullsize[2]
3221 3221 if numfull == 0:
3222 3222 fullsize[2] = 0
3223 3223 else:
3224 3224 fullsize[2] /= numfull
3225 3225 semitotal = semisize[2]
3226 3226 snaptotal = {}
3227 3227 if numsemi > 0:
3228 3228 semisize[2] /= numsemi
3229 3229 for depth in snapsizedepth:
3230 3230 snaptotal[depth] = snapsizedepth[depth][2]
3231 3231 snapsizedepth[depth][2] /= numsnapdepth[depth]
3232 3232
3233 3233 deltatotal = deltasize[2]
3234 3234 if numdeltas > 0:
3235 3235 deltasize[2] /= numdeltas
3236 3236 totalsize = fulltotal + semitotal + deltatotal
3237 3237 avgchainlen = sum(chainlengths) / numrevs
3238 3238 maxchainlen = max(chainlengths)
3239 3239 maxchainspan = max(chainspans)
3240 3240 compratio = 1
3241 3241 if totalsize:
3242 3242 compratio = totalrawsize / totalsize
3243 3243
3244 3244 basedfmtstr = b'%%%dd\n'
3245 3245 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3246 3246
3247 3247 def dfmtstr(max):
3248 3248 return basedfmtstr % len(str(max))
3249 3249
3250 3250 def pcfmtstr(max, padding=0):
3251 3251 return basepcfmtstr % (len(str(max)), b' ' * padding)
3252 3252
3253 3253 def pcfmt(value, total):
3254 3254 if total:
3255 3255 return (value, 100 * float(value) / total)
3256 3256 else:
3257 3257 return value, 100.0
3258 3258
3259 3259 ui.writenoi18n(b'format : %d\n' % format)
3260 3260 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3261 3261
3262 3262 ui.write(b'\n')
3263 3263 fmt = pcfmtstr(totalsize)
3264 3264 fmt2 = dfmtstr(totalsize)
3265 3265 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3266 3266 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3267 3267 ui.writenoi18n(
3268 3268 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3269 3269 )
3270 3270 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3271 3271 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3272 3272 ui.writenoi18n(
3273 3273 b' text : '
3274 3274 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3275 3275 )
3276 3276 ui.writenoi18n(
3277 3277 b' delta : '
3278 3278 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3279 3279 )
3280 3280 ui.writenoi18n(
3281 3281 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3282 3282 )
3283 3283 for depth in sorted(numsnapdepth):
3284 3284 ui.write(
3285 3285 (b' lvl-%-3d : ' % depth)
3286 3286 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3287 3287 )
3288 3288 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3289 3289 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3290 3290 ui.writenoi18n(
3291 3291 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3292 3292 )
3293 3293 for depth in sorted(numsnapdepth):
3294 3294 ui.write(
3295 3295 (b' lvl-%-3d : ' % depth)
3296 3296 + fmt % pcfmt(snaptotal[depth], totalsize)
3297 3297 )
3298 3298 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3299 3299
3300 3300 def fmtchunktype(chunktype):
3301 3301 if chunktype == b'empty':
3302 3302 return b' %s : ' % chunktype
3303 3303 elif chunktype in pycompat.bytestr(string.ascii_letters):
3304 3304 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3305 3305 else:
3306 3306 return b' 0x%s : ' % hex(chunktype)
3307 3307
3308 3308 ui.write(b'\n')
3309 3309 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3310 3310 for chunktype in sorted(chunktypecounts):
3311 3311 ui.write(fmtchunktype(chunktype))
3312 3312 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3313 3313 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3314 3314 for chunktype in sorted(chunktypecounts):
3315 3315 ui.write(fmtchunktype(chunktype))
3316 3316 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3317 3317
3318 3318 ui.write(b'\n')
3319 3319 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3320 3320 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3321 3321 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3322 3322 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3323 3323 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3324 3324
3325 3325 if format > 0:
3326 3326 ui.write(b'\n')
3327 3327 ui.writenoi18n(
3328 3328 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3329 3329 % tuple(datasize)
3330 3330 )
3331 3331 ui.writenoi18n(
3332 3332 b'full revision size (min/max/avg) : %d / %d / %d\n'
3333 3333 % tuple(fullsize)
3334 3334 )
3335 3335 ui.writenoi18n(
3336 3336 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3337 3337 % tuple(semisize)
3338 3338 )
3339 3339 for depth in sorted(snapsizedepth):
3340 3340 if depth == 0:
3341 3341 continue
3342 3342 ui.writenoi18n(
3343 3343 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3344 3344 % ((depth,) + tuple(snapsizedepth[depth]))
3345 3345 )
3346 3346 ui.writenoi18n(
3347 3347 b'delta size (min/max/avg) : %d / %d / %d\n'
3348 3348 % tuple(deltasize)
3349 3349 )
3350 3350
3351 3351 if numdeltas > 0:
3352 3352 ui.write(b'\n')
3353 3353 fmt = pcfmtstr(numdeltas)
3354 3354 fmt2 = pcfmtstr(numdeltas, 4)
3355 3355 ui.writenoi18n(
3356 3356 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3357 3357 )
3358 3358 if numprev > 0:
3359 3359 ui.writenoi18n(
3360 3360 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3361 3361 )
3362 3362 ui.writenoi18n(
3363 3363 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3364 3364 )
3365 3365 ui.writenoi18n(
3366 3366 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3367 3367 )
3368 3368 if gdelta:
3369 3369 ui.writenoi18n(
3370 3370 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3371 3371 )
3372 3372 ui.writenoi18n(
3373 3373 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3374 3374 )
3375 3375 ui.writenoi18n(
3376 3376 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3377 3377 )
3378 3378
3379 3379
3380 3380 @command(
3381 3381 b'debugrevlogindex',
3382 3382 cmdutil.debugrevlogopts
3383 3383 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3384 3384 _(b'[-f FORMAT] -c|-m|FILE'),
3385 3385 optionalrepo=True,
3386 3386 )
3387 3387 def debugrevlogindex(ui, repo, file_=None, **opts):
3388 3388 """dump the contents of a revlog index"""
3389 3389 opts = pycompat.byteskwargs(opts)
3390 3390 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3391 3391 format = opts.get(b'format', 0)
3392 3392 if format not in (0, 1):
3393 3393 raise error.Abort(_(b"unknown format %d") % format)
3394 3394
3395 3395 if ui.debugflag:
3396 3396 shortfn = hex
3397 3397 else:
3398 3398 shortfn = short
3399 3399
3400 3400 # There might not be anything in r, so have a sane default
3401 3401 idlen = 12
3402 3402 for i in r:
3403 3403 idlen = len(shortfn(r.node(i)))
3404 3404 break
3405 3405
3406 3406 if format == 0:
3407 3407 if ui.verbose:
3408 3408 ui.writenoi18n(
3409 3409 b" rev offset length linkrev %s %s p2\n"
3410 3410 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3411 3411 )
3412 3412 else:
3413 3413 ui.writenoi18n(
3414 3414 b" rev linkrev %s %s p2\n"
3415 3415 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3416 3416 )
3417 3417 elif format == 1:
3418 3418 if ui.verbose:
3419 3419 ui.writenoi18n(
3420 3420 (
3421 3421 b" rev flag offset length size link p1"
3422 3422 b" p2 %s\n"
3423 3423 )
3424 3424 % b"nodeid".rjust(idlen)
3425 3425 )
3426 3426 else:
3427 3427 ui.writenoi18n(
3428 3428 b" rev flag size link p1 p2 %s\n"
3429 3429 % b"nodeid".rjust(idlen)
3430 3430 )
3431 3431
3432 3432 for i in r:
3433 3433 node = r.node(i)
3434 3434 if format == 0:
3435 3435 try:
3436 3436 pp = r.parents(node)
3437 3437 except Exception:
3438 3438 pp = [repo.nullid, repo.nullid]
3439 3439 if ui.verbose:
3440 3440 ui.write(
3441 3441 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3442 3442 % (
3443 3443 i,
3444 3444 r.start(i),
3445 3445 r.length(i),
3446 3446 r.linkrev(i),
3447 3447 shortfn(node),
3448 3448 shortfn(pp[0]),
3449 3449 shortfn(pp[1]),
3450 3450 )
3451 3451 )
3452 3452 else:
3453 3453 ui.write(
3454 3454 b"% 6d % 7d %s %s %s\n"
3455 3455 % (
3456 3456 i,
3457 3457 r.linkrev(i),
3458 3458 shortfn(node),
3459 3459 shortfn(pp[0]),
3460 3460 shortfn(pp[1]),
3461 3461 )
3462 3462 )
3463 3463 elif format == 1:
3464 3464 pr = r.parentrevs(i)
3465 3465 if ui.verbose:
3466 3466 ui.write(
3467 3467 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3468 3468 % (
3469 3469 i,
3470 3470 r.flags(i),
3471 3471 r.start(i),
3472 3472 r.length(i),
3473 3473 r.rawsize(i),
3474 3474 r.linkrev(i),
3475 3475 pr[0],
3476 3476 pr[1],
3477 3477 shortfn(node),
3478 3478 )
3479 3479 )
3480 3480 else:
3481 3481 ui.write(
3482 3482 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3483 3483 % (
3484 3484 i,
3485 3485 r.flags(i),
3486 3486 r.rawsize(i),
3487 3487 r.linkrev(i),
3488 3488 pr[0],
3489 3489 pr[1],
3490 3490 shortfn(node),
3491 3491 )
3492 3492 )
3493 3493
3494 3494
3495 3495 @command(
3496 3496 b'debugrevspec',
3497 3497 [
3498 3498 (
3499 3499 b'',
3500 3500 b'optimize',
3501 3501 None,
3502 3502 _(b'print parsed tree after optimizing (DEPRECATED)'),
3503 3503 ),
3504 3504 (
3505 3505 b'',
3506 3506 b'show-revs',
3507 3507 True,
3508 3508 _(b'print list of result revisions (default)'),
3509 3509 ),
3510 3510 (
3511 3511 b's',
3512 3512 b'show-set',
3513 3513 None,
3514 3514 _(b'print internal representation of result set'),
3515 3515 ),
3516 3516 (
3517 3517 b'p',
3518 3518 b'show-stage',
3519 3519 [],
3520 3520 _(b'print parsed tree at the given stage'),
3521 3521 _(b'NAME'),
3522 3522 ),
3523 3523 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3524 3524 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3525 3525 ],
3526 3526 b'REVSPEC',
3527 3527 )
3528 3528 def debugrevspec(ui, repo, expr, **opts):
3529 3529 """parse and apply a revision specification
3530 3530
3531 3531 Use -p/--show-stage option to print the parsed tree at the given stages.
3532 3532 Use -p all to print tree at every stage.
3533 3533
3534 3534 Use --no-show-revs option with -s or -p to print only the set
3535 3535 representation or the parsed tree respectively.
3536 3536
3537 3537 Use --verify-optimized to compare the optimized result with the unoptimized
3538 3538 one. Returns 1 if the optimized result differs.
3539 3539 """
3540 3540 opts = pycompat.byteskwargs(opts)
3541 3541 aliases = ui.configitems(b'revsetalias')
3542 3542 stages = [
3543 3543 (b'parsed', lambda tree: tree),
3544 3544 (
3545 3545 b'expanded',
3546 3546 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3547 3547 ),
3548 3548 (b'concatenated', revsetlang.foldconcat),
3549 3549 (b'analyzed', revsetlang.analyze),
3550 3550 (b'optimized', revsetlang.optimize),
3551 3551 ]
3552 3552 if opts[b'no_optimized']:
3553 3553 stages = stages[:-1]
3554 3554 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3555 3555 raise error.Abort(
3556 3556 _(b'cannot use --verify-optimized with --no-optimized')
3557 3557 )
3558 3558 stagenames = {n for n, f in stages}
3559 3559
3560 3560 showalways = set()
3561 3561 showchanged = set()
3562 3562 if ui.verbose and not opts[b'show_stage']:
3563 3563 # show parsed tree by --verbose (deprecated)
3564 3564 showalways.add(b'parsed')
3565 3565 showchanged.update([b'expanded', b'concatenated'])
3566 3566 if opts[b'optimize']:
3567 3567 showalways.add(b'optimized')
3568 3568 if opts[b'show_stage'] and opts[b'optimize']:
3569 3569 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3570 3570 if opts[b'show_stage'] == [b'all']:
3571 3571 showalways.update(stagenames)
3572 3572 else:
3573 3573 for n in opts[b'show_stage']:
3574 3574 if n not in stagenames:
3575 3575 raise error.Abort(_(b'invalid stage name: %s') % n)
3576 3576 showalways.update(opts[b'show_stage'])
3577 3577
3578 3578 treebystage = {}
3579 3579 printedtree = None
3580 3580 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3581 3581 for n, f in stages:
3582 3582 treebystage[n] = tree = f(tree)
3583 3583 if n in showalways or (n in showchanged and tree != printedtree):
3584 3584 if opts[b'show_stage'] or n != b'parsed':
3585 3585 ui.write(b"* %s:\n" % n)
3586 3586 ui.write(revsetlang.prettyformat(tree), b"\n")
3587 3587 printedtree = tree
3588 3588
3589 3589 if opts[b'verify_optimized']:
3590 3590 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3591 3591 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3592 3592 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3593 3593 ui.writenoi18n(
3594 3594 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3595 3595 )
3596 3596 ui.writenoi18n(
3597 3597 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3598 3598 )
3599 3599 arevs = list(arevs)
3600 3600 brevs = list(brevs)
3601 3601 if arevs == brevs:
3602 3602 return 0
3603 3603 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3604 3604 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3605 3605 sm = difflib.SequenceMatcher(None, arevs, brevs)
3606 3606 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3607 3607 if tag in ('delete', 'replace'):
3608 3608 for c in arevs[alo:ahi]:
3609 3609 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3610 3610 if tag in ('insert', 'replace'):
3611 3611 for c in brevs[blo:bhi]:
3612 3612 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3613 3613 if tag == 'equal':
3614 3614 for c in arevs[alo:ahi]:
3615 3615 ui.write(b' %d\n' % c)
3616 3616 return 1
3617 3617
3618 3618 func = revset.makematcher(tree)
3619 3619 revs = func(repo)
3620 3620 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3621 3621 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3622 3622 if not opts[b'show_revs']:
3623 3623 return
3624 3624 for c in revs:
3625 3625 ui.write(b"%d\n" % c)
3626 3626
3627 3627
3628 3628 @command(
3629 3629 b'debugserve',
3630 3630 [
3631 3631 (
3632 3632 b'',
3633 3633 b'sshstdio',
3634 3634 False,
3635 3635 _(b'run an SSH server bound to process handles'),
3636 3636 ),
3637 3637 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3638 3638 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3639 3639 ],
3640 3640 b'',
3641 3641 )
3642 3642 def debugserve(ui, repo, **opts):
3643 3643 """run a server with advanced settings
3644 3644
3645 3645 This command is similar to :hg:`serve`. It exists partially as a
3646 3646 workaround to the fact that ``hg serve --stdio`` must have specific
3647 3647 arguments for security reasons.
3648 3648 """
3649 3649 opts = pycompat.byteskwargs(opts)
3650 3650
3651 3651 if not opts[b'sshstdio']:
3652 3652 raise error.Abort(_(b'only --sshstdio is currently supported'))
3653 3653
3654 3654 logfh = None
3655 3655
3656 3656 if opts[b'logiofd'] and opts[b'logiofile']:
3657 3657 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3658 3658
3659 3659 if opts[b'logiofd']:
3660 3660 # Ideally we would be line buffered. But line buffering in binary
3661 3661 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3662 3662 # buffering could have performance impacts. But since this isn't
3663 3663 # performance critical code, it should be fine.
3664 3664 try:
3665 3665 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3666 3666 except OSError as e:
3667 3667 if e.errno != errno.ESPIPE:
3668 3668 raise
3669 3669 # can't seek a pipe, so `ab` mode fails on py3
3670 3670 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3671 3671 elif opts[b'logiofile']:
3672 3672 logfh = open(opts[b'logiofile'], b'ab', 0)
3673 3673
3674 3674 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3675 3675 s.serve_forever()
3676 3676
3677 3677
3678 3678 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3679 3679 def debugsetparents(ui, repo, rev1, rev2=None):
3680 3680 """manually set the parents of the current working directory (DANGEROUS)
3681 3681
3682 3682 This command is not what you are looking for and should not be used. Using
3683 3683 this command will most certainly results in slight corruption of the file
3684 3684 level histories withing your repository. DO NOT USE THIS COMMAND.
3685 3685
3686 3686 The command update the p1 and p2 field in the dirstate, and not touching
3687 3687 anything else. This useful for writing repository conversion tools, but
3688 3688 should be used with extreme care. For example, neither the working
3689 3689 directory nor the dirstate is updated, so file status may be incorrect
3690 3690 after running this command. Only used if you are one of the few people that
3691 3691 deeply unstand both conversion tools and file level histories. If you are
3692 3692 reading this help, you are not one of this people (most of them sailed west
3693 3693 from Mithlond anyway.
3694 3694
3695 3695 So one last time DO NOT USE THIS COMMAND.
3696 3696
3697 3697 Returns 0 on success.
3698 3698 """
3699 3699
3700 3700 node1 = scmutil.revsingle(repo, rev1).node()
3701 3701 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3702 3702
3703 3703 with repo.wlock():
3704 3704 repo.setparents(node1, node2)
3705 3705
3706 3706
3707 3707 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3708 3708 def debugsidedata(ui, repo, file_, rev=None, **opts):
3709 3709 """dump the side data for a cl/manifest/file revision
3710 3710
3711 3711 Use --verbose to dump the sidedata content."""
3712 3712 opts = pycompat.byteskwargs(opts)
3713 3713 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3714 3714 if rev is not None:
3715 3715 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3716 3716 file_, rev = None, file_
3717 3717 elif rev is None:
3718 3718 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3719 3719 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3720 3720 r = getattr(r, '_revlog', r)
3721 3721 try:
3722 3722 sidedata = r.sidedata(r.lookup(rev))
3723 3723 except KeyError:
3724 3724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3725 3725 if sidedata:
3726 3726 sidedata = list(sidedata.items())
3727 3727 sidedata.sort()
3728 3728 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3729 3729 for key, value in sidedata:
3730 3730 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3731 3731 if ui.verbose:
3732 3732 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3733 3733
3734 3734
3735 3735 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3736 3736 def debugssl(ui, repo, source=None, **opts):
3737 3737 """test a secure connection to a server
3738 3738
3739 3739 This builds the certificate chain for the server on Windows, installing the
3740 3740 missing intermediates and trusted root via Windows Update if necessary. It
3741 3741 does nothing on other platforms.
3742 3742
3743 3743 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3744 3744 that server is used. See :hg:`help urls` for more information.
3745 3745
3746 3746 If the update succeeds, retry the original operation. Otherwise, the cause
3747 3747 of the SSL error is likely another issue.
3748 3748 """
3749 3749 if not pycompat.iswindows:
3750 3750 raise error.Abort(
3751 3751 _(b'certificate chain building is only possible on Windows')
3752 3752 )
3753 3753
3754 3754 if not source:
3755 3755 if not repo:
3756 3756 raise error.Abort(
3757 3757 _(
3758 3758 b"there is no Mercurial repository here, and no "
3759 3759 b"server specified"
3760 3760 )
3761 3761 )
3762 3762 source = b"default"
3763 3763
3764 3764 source, branches = urlutil.get_unique_pull_path(
3765 3765 b'debugssl', repo, ui, source
3766 3766 )
3767 3767 url = urlutil.url(source)
3768 3768
3769 3769 defaultport = {b'https': 443, b'ssh': 22}
3770 3770 if url.scheme in defaultport:
3771 3771 try:
3772 3772 addr = (url.host, int(url.port or defaultport[url.scheme]))
3773 3773 except ValueError:
3774 3774 raise error.Abort(_(b"malformed port number in URL"))
3775 3775 else:
3776 3776 raise error.Abort(_(b"only https and ssh connections are supported"))
3777 3777
3778 3778 from . import win32
3779 3779
3780 3780 s = ssl.wrap_socket(
3781 3781 socket.socket(),
3782 3782 ssl_version=ssl.PROTOCOL_TLS,
3783 3783 cert_reqs=ssl.CERT_NONE,
3784 3784 ca_certs=None,
3785 3785 )
3786 3786
3787 3787 try:
3788 3788 s.connect(addr)
3789 3789 cert = s.getpeercert(True)
3790 3790
3791 3791 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3792 3792
3793 3793 complete = win32.checkcertificatechain(cert, build=False)
3794 3794
3795 3795 if not complete:
3796 3796 ui.status(_(b'certificate chain is incomplete, updating... '))
3797 3797
3798 3798 if not win32.checkcertificatechain(cert):
3799 3799 ui.status(_(b'failed.\n'))
3800 3800 else:
3801 3801 ui.status(_(b'done.\n'))
3802 3802 else:
3803 3803 ui.status(_(b'full certificate chain is available\n'))
3804 3804 finally:
3805 3805 s.close()
3806 3806
3807 3807
3808 3808 @command(
3809 3809 b"debugbackupbundle",
3810 3810 [
3811 3811 (
3812 3812 b"",
3813 3813 b"recover",
3814 3814 b"",
3815 3815 b"brings the specified changeset back into the repository",
3816 3816 )
3817 3817 ]
3818 3818 + cmdutil.logopts,
3819 3819 _(b"hg debugbackupbundle [--recover HASH]"),
3820 3820 )
3821 3821 def debugbackupbundle(ui, repo, *pats, **opts):
3822 3822 """lists the changesets available in backup bundles
3823 3823
3824 3824 Without any arguments, this command prints a list of the changesets in each
3825 3825 backup bundle.
3826 3826
3827 3827 --recover takes a changeset hash and unbundles the first bundle that
3828 3828 contains that hash, which puts that changeset back in your repository.
3829 3829
3830 3830 --verbose will print the entire commit message and the bundle path for that
3831 3831 backup.
3832 3832 """
3833 3833 backups = list(
3834 3834 filter(
3835 3835 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3836 3836 )
3837 3837 )
3838 3838 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3839 3839
3840 3840 opts = pycompat.byteskwargs(opts)
3841 3841 opts[b"bundle"] = b""
3842 3842 opts[b"force"] = None
3843 3843 limit = logcmdutil.getlimit(opts)
3844 3844
3845 3845 def display(other, chlist, displayer):
3846 3846 if opts.get(b"newest_first"):
3847 3847 chlist.reverse()
3848 3848 count = 0
3849 3849 for n in chlist:
3850 3850 if limit is not None and count >= limit:
3851 3851 break
3852 3852 parents = [
3853 3853 True for p in other.changelog.parents(n) if p != repo.nullid
3854 3854 ]
3855 3855 if opts.get(b"no_merges") and len(parents) == 2:
3856 3856 continue
3857 3857 count += 1
3858 3858 displayer.show(other[n])
3859 3859
3860 3860 recovernode = opts.get(b"recover")
3861 3861 if recovernode:
3862 3862 if scmutil.isrevsymbol(repo, recovernode):
3863 3863 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3864 3864 return
3865 3865 elif backups:
3866 3866 msg = _(
3867 3867 b"Recover changesets using: hg debugbackupbundle --recover "
3868 3868 b"<changeset hash>\n\nAvailable backup changesets:"
3869 3869 )
3870 3870 ui.status(msg, label=b"status.removed")
3871 3871 else:
3872 3872 ui.status(_(b"no backup changesets found\n"))
3873 3873 return
3874 3874
3875 3875 for backup in backups:
3876 3876 # Much of this is copied from the hg incoming logic
3877 3877 source = os.path.relpath(backup, encoding.getcwd())
3878 3878 source, branches = urlutil.get_unique_pull_path(
3879 3879 b'debugbackupbundle',
3880 3880 repo,
3881 3881 ui,
3882 3882 source,
3883 3883 default_branches=opts.get(b'branch'),
3884 3884 )
3885 3885 try:
3886 3886 other = hg.peer(repo, opts, source)
3887 3887 except error.LookupError as ex:
3888 3888 msg = _(b"\nwarning: unable to open bundle %s") % source
3889 3889 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3890 3890 ui.warn(msg, hint=hint)
3891 3891 continue
3892 3892 revs, checkout = hg.addbranchrevs(
3893 3893 repo, other, branches, opts.get(b"rev")
3894 3894 )
3895 3895
3896 3896 if revs:
3897 3897 revs = [other.lookup(rev) for rev in revs]
3898 3898
3899 3899 with ui.silent():
3900 3900 try:
3901 3901 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3902 3902 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3903 3903 )
3904 3904 except error.LookupError:
3905 3905 continue
3906 3906
3907 3907 try:
3908 3908 if not chlist:
3909 3909 continue
3910 3910 if recovernode:
3911 3911 with repo.lock(), repo.transaction(b"unbundle") as tr:
3912 3912 if scmutil.isrevsymbol(other, recovernode):
3913 3913 ui.status(_(b"Unbundling %s\n") % (recovernode))
3914 3914 f = hg.openpath(ui, source)
3915 3915 gen = exchange.readbundle(ui, f, source)
3916 3916 if isinstance(gen, bundle2.unbundle20):
3917 3917 bundle2.applybundle(
3918 3918 repo,
3919 3919 gen,
3920 3920 tr,
3921 3921 source=b"unbundle",
3922 3922 url=b"bundle:" + source,
3923 3923 )
3924 3924 else:
3925 3925 gen.apply(repo, b"unbundle", b"bundle:" + source)
3926 3926 break
3927 3927 else:
3928 3928 backupdate = encoding.strtolocal(
3929 3929 time.strftime(
3930 3930 "%a %H:%M, %Y-%m-%d",
3931 3931 time.localtime(os.path.getmtime(source)),
3932 3932 )
3933 3933 )
3934 3934 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3935 3935 if ui.verbose:
3936 3936 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3937 3937 else:
3938 3938 opts[
3939 3939 b"template"
3940 3940 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3941 3941 displayer = logcmdutil.changesetdisplayer(
3942 3942 ui, other, opts, False
3943 3943 )
3944 3944 display(other, chlist, displayer)
3945 3945 displayer.close()
3946 3946 finally:
3947 3947 cleanupfn()
3948 3948
3949 3949
3950 3950 @command(
3951 3951 b'debugsub',
3952 3952 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3953 3953 _(b'[-r REV] [REV]'),
3954 3954 )
3955 3955 def debugsub(ui, repo, rev=None):
3956 3956 ctx = scmutil.revsingle(repo, rev, None)
3957 3957 for k, v in sorted(ctx.substate.items()):
3958 3958 ui.writenoi18n(b'path %s\n' % k)
3959 3959 ui.writenoi18n(b' source %s\n' % v[0])
3960 3960 ui.writenoi18n(b' revision %s\n' % v[1])
3961 3961
3962 3962
3963 3963 @command(b'debugshell', optionalrepo=True)
3964 3964 def debugshell(ui, repo):
3965 3965 """run an interactive Python interpreter
3966 3966
3967 3967 The local namespace is provided with a reference to the ui and
3968 3968 the repo instance (if available).
3969 3969 """
3970 3970 import code
3971 3971
3972 3972 imported_objects = {
3973 3973 'ui': ui,
3974 3974 'repo': repo,
3975 3975 }
3976 3976
3977 3977 code.interact(local=imported_objects)
3978 3978
3979 3979
3980 3980 @command(
3981 3981 b'debugsuccessorssets',
3982 3982 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3983 3983 _(b'[REV]'),
3984 3984 )
3985 3985 def debugsuccessorssets(ui, repo, *revs, **opts):
3986 3986 """show set of successors for revision
3987 3987
3988 3988 A successors set of changeset A is a consistent group of revisions that
3989 3989 succeed A. It contains non-obsolete changesets only unless closests
3990 3990 successors set is set.
3991 3991
3992 3992 In most cases a changeset A has a single successors set containing a single
3993 3993 successor (changeset A replaced by A').
3994 3994
3995 3995 A changeset that is made obsolete with no successors are called "pruned".
3996 3996 Such changesets have no successors sets at all.
3997 3997
3998 3998 A changeset that has been "split" will have a successors set containing
3999 3999 more than one successor.
4000 4000
4001 4001 A changeset that has been rewritten in multiple different ways is called
4002 4002 "divergent". Such changesets have multiple successor sets (each of which
4003 4003 may also be split, i.e. have multiple successors).
4004 4004
4005 4005 Results are displayed as follows::
4006 4006
4007 4007 <rev1>
4008 4008 <successors-1A>
4009 4009 <rev2>
4010 4010 <successors-2A>
4011 4011 <successors-2B1> <successors-2B2> <successors-2B3>
4012 4012
4013 4013 Here rev2 has two possible (i.e. divergent) successors sets. The first
4014 4014 holds one element, whereas the second holds three (i.e. the changeset has
4015 4015 been split).
4016 4016 """
4017 4017 # passed to successorssets caching computation from one call to another
4018 4018 cache = {}
4019 4019 ctx2str = bytes
4020 4020 node2str = short
4021 4021 for rev in logcmdutil.revrange(repo, revs):
4022 4022 ctx = repo[rev]
4023 4023 ui.write(b'%s\n' % ctx2str(ctx))
4024 4024 for succsset in obsutil.successorssets(
4025 4025 repo, ctx.node(), closest=opts['closest'], cache=cache
4026 4026 ):
4027 4027 if succsset:
4028 4028 ui.write(b' ')
4029 4029 ui.write(node2str(succsset[0]))
4030 4030 for node in succsset[1:]:
4031 4031 ui.write(b' ')
4032 4032 ui.write(node2str(node))
4033 4033 ui.write(b'\n')
4034 4034
4035 4035
4036 4036 @command(b'debugtagscache', [])
4037 4037 def debugtagscache(ui, repo):
4038 4038 """display the contents of .hg/cache/hgtagsfnodes1"""
4039 4039 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4040 4040 flog = repo.file(b'.hgtags')
4041 4041 for r in repo:
4042 4042 node = repo[r].node()
4043 4043 tagsnode = cache.getfnode(node, computemissing=False)
4044 4044 if tagsnode:
4045 4045 tagsnodedisplay = hex(tagsnode)
4046 4046 if not flog.hasnode(tagsnode):
4047 4047 tagsnodedisplay += b' (unknown node)'
4048 4048 elif tagsnode is None:
4049 4049 tagsnodedisplay = b'missing'
4050 4050 else:
4051 4051 tagsnodedisplay = b'invalid'
4052 4052
4053 4053 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4054 4054
4055 4055
4056 4056 @command(
4057 4057 b'debugtemplate',
4058 4058 [
4059 4059 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4060 4060 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4061 4061 ],
4062 4062 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4063 4063 optionalrepo=True,
4064 4064 )
4065 4065 def debugtemplate(ui, repo, tmpl, **opts):
4066 4066 """parse and apply a template
4067 4067
4068 4068 If -r/--rev is given, the template is processed as a log template and
4069 4069 applied to the given changesets. Otherwise, it is processed as a generic
4070 4070 template.
4071 4071
4072 4072 Use --verbose to print the parsed tree.
4073 4073 """
4074 4074 revs = None
4075 4075 if opts['rev']:
4076 4076 if repo is None:
4077 4077 raise error.RepoError(
4078 4078 _(b'there is no Mercurial repository here (.hg not found)')
4079 4079 )
4080 4080 revs = logcmdutil.revrange(repo, opts['rev'])
4081 4081
4082 4082 props = {}
4083 4083 for d in opts['define']:
4084 4084 try:
4085 4085 k, v = (e.strip() for e in d.split(b'=', 1))
4086 4086 if not k or k == b'ui':
4087 4087 raise ValueError
4088 4088 props[k] = v
4089 4089 except ValueError:
4090 4090 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4091 4091
4092 4092 if ui.verbose:
4093 4093 aliases = ui.configitems(b'templatealias')
4094 4094 tree = templater.parse(tmpl)
4095 4095 ui.note(templater.prettyformat(tree), b'\n')
4096 4096 newtree = templater.expandaliases(tree, aliases)
4097 4097 if newtree != tree:
4098 4098 ui.notenoi18n(
4099 4099 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4100 4100 )
4101 4101
4102 4102 if revs is None:
4103 4103 tres = formatter.templateresources(ui, repo)
4104 4104 t = formatter.maketemplater(ui, tmpl, resources=tres)
4105 4105 if ui.verbose:
4106 4106 kwds, funcs = t.symbolsuseddefault()
4107 4107 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4108 4108 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4109 4109 ui.write(t.renderdefault(props))
4110 4110 else:
4111 4111 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4112 4112 if ui.verbose:
4113 4113 kwds, funcs = displayer.t.symbolsuseddefault()
4114 4114 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4115 4115 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4116 4116 for r in revs:
4117 4117 displayer.show(repo[r], **pycompat.strkwargs(props))
4118 4118 displayer.close()
4119 4119
4120 4120
4121 4121 @command(
4122 4122 b'debuguigetpass',
4123 4123 [
4124 4124 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4125 4125 ],
4126 4126 _(b'[-p TEXT]'),
4127 4127 norepo=True,
4128 4128 )
4129 4129 def debuguigetpass(ui, prompt=b''):
4130 4130 """show prompt to type password"""
4131 4131 r = ui.getpass(prompt)
4132 4132 if r is None:
4133 4133 r = b"<default response>"
4134 4134 ui.writenoi18n(b'response: %s\n' % r)
4135 4135
4136 4136
4137 4137 @command(
4138 4138 b'debuguiprompt',
4139 4139 [
4140 4140 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4141 4141 ],
4142 4142 _(b'[-p TEXT]'),
4143 4143 norepo=True,
4144 4144 )
4145 4145 def debuguiprompt(ui, prompt=b''):
4146 4146 """show plain prompt"""
4147 4147 r = ui.prompt(prompt)
4148 4148 ui.writenoi18n(b'response: %s\n' % r)
4149 4149
4150 4150
4151 4151 @command(b'debugupdatecaches', [])
4152 4152 def debugupdatecaches(ui, repo, *pats, **opts):
4153 4153 """warm all known caches in the repository"""
4154 4154 with repo.wlock(), repo.lock():
4155 4155 repo.updatecaches(caches=repository.CACHES_ALL)
4156 4156
4157 4157
4158 4158 @command(
4159 4159 b'debugupgraderepo',
4160 4160 [
4161 4161 (
4162 4162 b'o',
4163 4163 b'optimize',
4164 4164 [],
4165 4165 _(b'extra optimization to perform'),
4166 4166 _(b'NAME'),
4167 4167 ),
4168 4168 (b'', b'run', False, _(b'performs an upgrade')),
4169 4169 (b'', b'backup', True, _(b'keep the old repository content around')),
4170 4170 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4171 4171 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4172 4172 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4173 4173 ],
4174 4174 )
4175 4175 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4176 4176 """upgrade a repository to use different features
4177 4177
4178 4178 If no arguments are specified, the repository is evaluated for upgrade
4179 4179 and a list of problems and potential optimizations is printed.
4180 4180
4181 4181 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4182 4182 can be influenced via additional arguments. More details will be provided
4183 4183 by the command output when run without ``--run``.
4184 4184
4185 4185 During the upgrade, the repository will be locked and no writes will be
4186 4186 allowed.
4187 4187
4188 4188 At the end of the upgrade, the repository may not be readable while new
4189 4189 repository data is swapped in. This window will be as long as it takes to
4190 4190 rename some directories inside the ``.hg`` directory. On most machines, this
4191 4191 should complete almost instantaneously and the chances of a consumer being
4192 4192 unable to access the repository should be low.
4193 4193
4194 4194 By default, all revlogs will be upgraded. You can restrict this using flags
4195 4195 such as `--manifest`:
4196 4196
4197 4197 * `--manifest`: only optimize the manifest
4198 4198 * `--no-manifest`: optimize all revlog but the manifest
4199 4199 * `--changelog`: optimize the changelog only
4200 4200 * `--no-changelog --no-manifest`: optimize filelogs only
4201 4201 * `--filelogs`: optimize the filelogs only
4202 4202 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4203 4203 """
4204 4204 return upgrade.upgraderepo(
4205 4205 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4206 4206 )
4207 4207
4208 4208
4209 4209 @command(
4210 4210 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4211 4211 )
4212 4212 def debugwalk(ui, repo, *pats, **opts):
4213 4213 """show how files match on given patterns"""
4214 4214 opts = pycompat.byteskwargs(opts)
4215 4215 m = scmutil.match(repo[None], pats, opts)
4216 4216 if ui.verbose:
4217 4217 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4218 4218 items = list(repo[None].walk(m))
4219 4219 if not items:
4220 4220 return
4221 4221 f = lambda fn: fn
4222 4222 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4223 4223 f = lambda fn: util.normpath(fn)
4224 4224 fmt = b'f %%-%ds %%-%ds %%s' % (
4225 4225 max([len(abs) for abs in items]),
4226 4226 max([len(repo.pathto(abs)) for abs in items]),
4227 4227 )
4228 4228 for abs in items:
4229 4229 line = fmt % (
4230 4230 abs,
4231 4231 f(repo.pathto(abs)),
4232 4232 m.exact(abs) and b'exact' or b'',
4233 4233 )
4234 4234 ui.write(b"%s\n" % line.rstrip())
4235 4235
4236 4236
4237 4237 @command(b'debugwhyunstable', [], _(b'REV'))
4238 4238 def debugwhyunstable(ui, repo, rev):
4239 4239 """explain instabilities of a changeset"""
4240 4240 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4241 4241 dnodes = b''
4242 4242 if entry.get(b'divergentnodes'):
4243 4243 dnodes = (
4244 4244 b' '.join(
4245 4245 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4246 4246 for ctx in entry[b'divergentnodes']
4247 4247 )
4248 4248 + b' '
4249 4249 )
4250 4250 ui.write(
4251 4251 b'%s: %s%s %s\n'
4252 4252 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4253 4253 )
4254 4254
4255 4255
4256 4256 @command(
4257 4257 b'debugwireargs',
4258 4258 [
4259 4259 (b'', b'three', b'', b'three'),
4260 4260 (b'', b'four', b'', b'four'),
4261 4261 (b'', b'five', b'', b'five'),
4262 4262 ]
4263 4263 + cmdutil.remoteopts,
4264 4264 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4265 4265 norepo=True,
4266 4266 )
4267 4267 def debugwireargs(ui, repopath, *vals, **opts):
4268 4268 opts = pycompat.byteskwargs(opts)
4269 4269 repo = hg.peer(ui, opts, repopath)
4270 4270 try:
4271 4271 for opt in cmdutil.remoteopts:
4272 4272 del opts[opt[1]]
4273 4273 args = {}
4274 4274 for k, v in pycompat.iteritems(opts):
4275 4275 if v:
4276 4276 args[k] = v
4277 4277 args = pycompat.strkwargs(args)
4278 4278 # run twice to check that we don't mess up the stream for the next command
4279 4279 res1 = repo.debugwireargs(*vals, **args)
4280 4280 res2 = repo.debugwireargs(*vals, **args)
4281 4281 ui.write(b"%s\n" % res1)
4282 4282 if res1 != res2:
4283 4283 ui.warn(b"%s\n" % res2)
4284 4284 finally:
4285 4285 repo.close()
4286 4286
4287 4287
4288 4288 def _parsewirelangblocks(fh):
4289 4289 activeaction = None
4290 4290 blocklines = []
4291 4291 lastindent = 0
4292 4292
4293 4293 for line in fh:
4294 4294 line = line.rstrip()
4295 4295 if not line:
4296 4296 continue
4297 4297
4298 4298 if line.startswith(b'#'):
4299 4299 continue
4300 4300
4301 4301 if not line.startswith(b' '):
4302 4302 # New block. Flush previous one.
4303 4303 if activeaction:
4304 4304 yield activeaction, blocklines
4305 4305
4306 4306 activeaction = line
4307 4307 blocklines = []
4308 4308 lastindent = 0
4309 4309 continue
4310 4310
4311 4311 # Else we start with an indent.
4312 4312
4313 4313 if not activeaction:
4314 4314 raise error.Abort(_(b'indented line outside of block'))
4315 4315
4316 4316 indent = len(line) - len(line.lstrip())
4317 4317
4318 4318 # If this line is indented more than the last line, concatenate it.
4319 4319 if indent > lastindent and blocklines:
4320 4320 blocklines[-1] += line.lstrip()
4321 4321 else:
4322 4322 blocklines.append(line)
4323 4323 lastindent = indent
4324 4324
4325 4325 # Flush last block.
4326 4326 if activeaction:
4327 4327 yield activeaction, blocklines
4328 4328
4329 4329
4330 4330 @command(
4331 4331 b'debugwireproto',
4332 4332 [
4333 4333 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4334 4334 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4335 4335 (
4336 4336 b'',
4337 4337 b'noreadstderr',
4338 4338 False,
4339 4339 _(b'do not read from stderr of the remote'),
4340 4340 ),
4341 4341 (
4342 4342 b'',
4343 4343 b'nologhandshake',
4344 4344 False,
4345 4345 _(b'do not log I/O related to the peer handshake'),
4346 4346 ),
4347 4347 ]
4348 4348 + cmdutil.remoteopts,
4349 4349 _(b'[PATH]'),
4350 4350 optionalrepo=True,
4351 4351 )
4352 4352 def debugwireproto(ui, repo, path=None, **opts):
4353 4353 """send wire protocol commands to a server
4354 4354
4355 4355 This command can be used to issue wire protocol commands to remote
4356 4356 peers and to debug the raw data being exchanged.
4357 4357
4358 4358 ``--localssh`` will start an SSH server against the current repository
4359 4359 and connect to that. By default, the connection will perform a handshake
4360 4360 and establish an appropriate peer instance.
4361 4361
4362 4362 ``--peer`` can be used to bypass the handshake protocol and construct a
4363 4363 peer instance using the specified class type. Valid values are ``raw``,
4364 4364 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4365 4365 don't support higher-level command actions.
4366 4366
4367 4367 ``--noreadstderr`` can be used to disable automatic reading from stderr
4368 4368 of the peer (for SSH connections only). Disabling automatic reading of
4369 4369 stderr is useful for making output more deterministic.
4370 4370
4371 4371 Commands are issued via a mini language which is specified via stdin.
4372 4372 The language consists of individual actions to perform. An action is
4373 4373 defined by a block. A block is defined as a line with no leading
4374 4374 space followed by 0 or more lines with leading space. Blocks are
4375 4375 effectively a high-level command with additional metadata.
4376 4376
4377 4377 Lines beginning with ``#`` are ignored.
4378 4378
4379 4379 The following sections denote available actions.
4380 4380
4381 4381 raw
4382 4382 ---
4383 4383
4384 4384 Send raw data to the server.
4385 4385
4386 4386 The block payload contains the raw data to send as one atomic send
4387 4387 operation. The data may not actually be delivered in a single system
4388 4388 call: it depends on the abilities of the transport being used.
4389 4389
4390 4390 Each line in the block is de-indented and concatenated. Then, that
4391 4391 value is evaluated as a Python b'' literal. This allows the use of
4392 4392 backslash escaping, etc.
4393 4393
4394 4394 raw+
4395 4395 ----
4396 4396
4397 4397 Behaves like ``raw`` except flushes output afterwards.
4398 4398
4399 4399 command <X>
4400 4400 -----------
4401 4401
4402 4402 Send a request to run a named command, whose name follows the ``command``
4403 4403 string.
4404 4404
4405 4405 Arguments to the command are defined as lines in this block. The format of
4406 4406 each line is ``<key> <value>``. e.g.::
4407 4407
4408 4408 command listkeys
4409 4409 namespace bookmarks
4410 4410
4411 4411 If the value begins with ``eval:``, it will be interpreted as a Python
4412 4412 literal expression. Otherwise values are interpreted as Python b'' literals.
4413 4413 This allows sending complex types and encoding special byte sequences via
4414 4414 backslash escaping.
4415 4415
4416 4416 The following arguments have special meaning:
4417 4417
4418 4418 ``PUSHFILE``
4419 4419 When defined, the *push* mechanism of the peer will be used instead
4420 4420 of the static request-response mechanism and the content of the
4421 4421 file specified in the value of this argument will be sent as the
4422 4422 command payload.
4423 4423
4424 4424 This can be used to submit a local bundle file to the remote.
4425 4425
4426 4426 batchbegin
4427 4427 ----------
4428 4428
4429 4429 Instruct the peer to begin a batched send.
4430 4430
4431 4431 All ``command`` blocks are queued for execution until the next
4432 4432 ``batchsubmit`` block.
4433 4433
4434 4434 batchsubmit
4435 4435 -----------
4436 4436
4437 4437 Submit previously queued ``command`` blocks as a batch request.
4438 4438
4439 4439 This action MUST be paired with a ``batchbegin`` action.
4440 4440
4441 4441 httprequest <method> <path>
4442 4442 ---------------------------
4443 4443
4444 4444 (HTTP peer only)
4445 4445
4446 4446 Send an HTTP request to the peer.
4447 4447
4448 4448 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4449 4449
4450 4450 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4451 4451 headers to add to the request. e.g. ``Accept: foo``.
4452 4452
4453 4453 The following arguments are special:
4454 4454
4455 4455 ``BODYFILE``
4456 4456 The content of the file defined as the value to this argument will be
4457 4457 transferred verbatim as the HTTP request body.
4458 4458
4459 4459 ``frame <type> <flags> <payload>``
4460 4460 Send a unified protocol frame as part of the request body.
4461 4461
4462 4462 All frames will be collected and sent as the body to the HTTP
4463 4463 request.
4464 4464
4465 4465 close
4466 4466 -----
4467 4467
4468 4468 Close the connection to the server.
4469 4469
4470 4470 flush
4471 4471 -----
4472 4472
4473 4473 Flush data written to the server.
4474 4474
4475 4475 readavailable
4476 4476 -------------
4477 4477
4478 4478 Close the write end of the connection and read all available data from
4479 4479 the server.
4480 4480
4481 4481 If the connection to the server encompasses multiple pipes, we poll both
4482 4482 pipes and read available data.
4483 4483
4484 4484 readline
4485 4485 --------
4486 4486
4487 4487 Read a line of output from the server. If there are multiple output
4488 4488 pipes, reads only the main pipe.
4489 4489
4490 4490 ereadline
4491 4491 ---------
4492 4492
4493 4493 Like ``readline``, but read from the stderr pipe, if available.
4494 4494
4495 4495 read <X>
4496 4496 --------
4497 4497
4498 4498 ``read()`` N bytes from the server's main output pipe.
4499 4499
4500 4500 eread <X>
4501 4501 ---------
4502 4502
4503 4503 ``read()`` N bytes from the server's stderr pipe, if available.
4504 4504
4505 4505 Specifying Unified Frame-Based Protocol Frames
4506 4506 ----------------------------------------------
4507 4507
4508 4508 It is possible to emit a *Unified Frame-Based Protocol* by using special
4509 4509 syntax.
4510 4510
4511 4511 A frame is composed as a type, flags, and payload. These can be parsed
4512 4512 from a string of the form:
4513 4513
4514 4514 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4515 4515
4516 4516 ``request-id`` and ``stream-id`` are integers defining the request and
4517 4517 stream identifiers.
4518 4518
4519 4519 ``type`` can be an integer value for the frame type or the string name
4520 4520 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4521 4521 ``command-name``.
4522 4522
4523 4523 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4524 4524 components. Each component (and there can be just one) can be an integer
4525 4525 or a flag name for stream flags or frame flags, respectively. Values are
4526 4526 resolved to integers and then bitwise OR'd together.
4527 4527
4528 4528 ``payload`` represents the raw frame payload. If it begins with
4529 4529 ``cbor:``, the following string is evaluated as Python code and the
4530 4530 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4531 4531 as a Python byte string literal.
4532 4532 """
4533 4533 opts = pycompat.byteskwargs(opts)
4534 4534
4535 4535 if opts[b'localssh'] and not repo:
4536 4536 raise error.Abort(_(b'--localssh requires a repository'))
4537 4537
4538 4538 if opts[b'peer'] and opts[b'peer'] not in (
4539 4539 b'raw',
4540 4540 b'ssh1',
4541 4541 ):
4542 4542 raise error.Abort(
4543 4543 _(b'invalid value for --peer'),
4544 4544 hint=_(b'valid values are "raw" and "ssh1"'),
4545 4545 )
4546 4546
4547 4547 if path and opts[b'localssh']:
4548 4548 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4549 4549
4550 4550 if ui.interactive():
4551 4551 ui.write(_(b'(waiting for commands on stdin)\n'))
4552 4552
4553 4553 blocks = list(_parsewirelangblocks(ui.fin))
4554 4554
4555 4555 proc = None
4556 4556 stdin = None
4557 4557 stdout = None
4558 4558 stderr = None
4559 4559 opener = None
4560 4560
4561 4561 if opts[b'localssh']:
4562 4562 # We start the SSH server in its own process so there is process
4563 4563 # separation. This prevents a whole class of potential bugs around
4564 4564 # shared state from interfering with server operation.
4565 4565 args = procutil.hgcmd() + [
4566 4566 b'-R',
4567 4567 repo.root,
4568 4568 b'debugserve',
4569 4569 b'--sshstdio',
4570 4570 ]
4571 4571 proc = subprocess.Popen(
4572 4572 pycompat.rapply(procutil.tonativestr, args),
4573 4573 stdin=subprocess.PIPE,
4574 4574 stdout=subprocess.PIPE,
4575 4575 stderr=subprocess.PIPE,
4576 4576 bufsize=0,
4577 4577 )
4578 4578
4579 4579 stdin = proc.stdin
4580 4580 stdout = proc.stdout
4581 4581 stderr = proc.stderr
4582 4582
4583 4583 # We turn the pipes into observers so we can log I/O.
4584 4584 if ui.verbose or opts[b'peer'] == b'raw':
4585 4585 stdin = util.makeloggingfileobject(
4586 4586 ui, proc.stdin, b'i', logdata=True
4587 4587 )
4588 4588 stdout = util.makeloggingfileobject(
4589 4589 ui, proc.stdout, b'o', logdata=True
4590 4590 )
4591 4591 stderr = util.makeloggingfileobject(
4592 4592 ui, proc.stderr, b'e', logdata=True
4593 4593 )
4594 4594
4595 4595 # --localssh also implies the peer connection settings.
4596 4596
4597 4597 url = b'ssh://localserver'
4598 4598 autoreadstderr = not opts[b'noreadstderr']
4599 4599
4600 4600 if opts[b'peer'] == b'ssh1':
4601 4601 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4602 4602 peer = sshpeer.sshv1peer(
4603 4603 ui,
4604 4604 url,
4605 4605 proc,
4606 4606 stdin,
4607 4607 stdout,
4608 4608 stderr,
4609 4609 None,
4610 4610 autoreadstderr=autoreadstderr,
4611 4611 )
4612 4612 elif opts[b'peer'] == b'raw':
4613 4613 ui.write(_(b'using raw connection to peer\n'))
4614 4614 peer = None
4615 4615 else:
4616 4616 ui.write(_(b'creating ssh peer from handshake results\n'))
4617 4617 peer = sshpeer.makepeer(
4618 4618 ui,
4619 4619 url,
4620 4620 proc,
4621 4621 stdin,
4622 4622 stdout,
4623 4623 stderr,
4624 4624 autoreadstderr=autoreadstderr,
4625 4625 )
4626 4626
4627 4627 elif path:
4628 4628 # We bypass hg.peer() so we can proxy the sockets.
4629 4629 # TODO consider not doing this because we skip
4630 4630 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4631 4631 u = urlutil.url(path)
4632 4632 if u.scheme != b'http':
4633 4633 raise error.Abort(_(b'only http:// paths are currently supported'))
4634 4634
4635 4635 url, authinfo = u.authinfo()
4636 4636 openerargs = {
4637 4637 'useragent': b'Mercurial debugwireproto',
4638 4638 }
4639 4639
4640 4640 # Turn pipes/sockets into observers so we can log I/O.
4641 4641 if ui.verbose:
4642 4642 openerargs.update(
4643 4643 {
4644 4644 'loggingfh': ui,
4645 4645 'loggingname': b's',
4646 4646 'loggingopts': {
4647 4647 'logdata': True,
4648 4648 'logdataapis': False,
4649 4649 },
4650 4650 }
4651 4651 )
4652 4652
4653 4653 if ui.debugflag:
4654 4654 openerargs['loggingopts']['logdataapis'] = True
4655 4655
4656 4656 # Don't send default headers when in raw mode. This allows us to
4657 4657 # bypass most of the behavior of our URL handling code so we can
4658 4658 # have near complete control over what's sent on the wire.
4659 4659 if opts[b'peer'] == b'raw':
4660 4660 openerargs['sendaccept'] = False
4661 4661
4662 4662 opener = urlmod.opener(ui, authinfo, **openerargs)
4663 4663
4664 4664 if opts[b'peer'] == b'raw':
4665 4665 ui.write(_(b'using raw connection to peer\n'))
4666 4666 peer = None
4667 4667 elif opts[b'peer']:
4668 4668 raise error.Abort(
4669 4669 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4670 4670 )
4671 4671 else:
4672 4672 peer = httppeer.makepeer(ui, path, opener=opener)
4673 4673
4674 4674 # We /could/ populate stdin/stdout with sock.makefile()...
4675 4675 else:
4676 4676 raise error.Abort(_(b'unsupported connection configuration'))
4677 4677
4678 4678 batchedcommands = None
4679 4679
4680 4680 # Now perform actions based on the parsed wire language instructions.
4681 4681 for action, lines in blocks:
4682 4682 if action in (b'raw', b'raw+'):
4683 4683 if not stdin:
4684 4684 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4685 4685
4686 4686 # Concatenate the data together.
4687 4687 data = b''.join(l.lstrip() for l in lines)
4688 4688 data = stringutil.unescapestr(data)
4689 4689 stdin.write(data)
4690 4690
4691 4691 if action == b'raw+':
4692 4692 stdin.flush()
4693 4693 elif action == b'flush':
4694 4694 if not stdin:
4695 4695 raise error.Abort(_(b'cannot call flush on this peer'))
4696 4696 stdin.flush()
4697 4697 elif action.startswith(b'command'):
4698 4698 if not peer:
4699 4699 raise error.Abort(
4700 4700 _(
4701 4701 b'cannot send commands unless peer instance '
4702 4702 b'is available'
4703 4703 )
4704 4704 )
4705 4705
4706 4706 command = action.split(b' ', 1)[1]
4707 4707
4708 4708 args = {}
4709 4709 for line in lines:
4710 4710 # We need to allow empty values.
4711 4711 fields = line.lstrip().split(b' ', 1)
4712 4712 if len(fields) == 1:
4713 4713 key = fields[0]
4714 4714 value = b''
4715 4715 else:
4716 4716 key, value = fields
4717 4717
4718 4718 if value.startswith(b'eval:'):
4719 4719 value = stringutil.evalpythonliteral(value[5:])
4720 4720 else:
4721 4721 value = stringutil.unescapestr(value)
4722 4722
4723 4723 args[key] = value
4724 4724
4725 4725 if batchedcommands is not None:
4726 4726 batchedcommands.append((command, args))
4727 4727 continue
4728 4728
4729 4729 ui.status(_(b'sending %s command\n') % command)
4730 4730
4731 4731 if b'PUSHFILE' in args:
4732 4732 with open(args[b'PUSHFILE'], 'rb') as fh:
4733 4733 del args[b'PUSHFILE']
4734 4734 res, output = peer._callpush(
4735 4735 command, fh, **pycompat.strkwargs(args)
4736 4736 )
4737 4737 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4738 4738 ui.status(
4739 4739 _(b'remote output: %s\n') % stringutil.escapestr(output)
4740 4740 )
4741 4741 else:
4742 4742 with peer.commandexecutor() as e:
4743 4743 res = e.callcommand(command, args).result()
4744 4744
4745 4745 ui.status(
4746 4746 _(b'response: %s\n')
4747 4747 % stringutil.pprint(res, bprefix=True, indent=2)
4748 4748 )
4749 4749
4750 4750 elif action == b'batchbegin':
4751 4751 if batchedcommands is not None:
4752 4752 raise error.Abort(_(b'nested batchbegin not allowed'))
4753 4753
4754 4754 batchedcommands = []
4755 4755 elif action == b'batchsubmit':
4756 4756 # There is a batching API we could go through. But it would be
4757 4757 # difficult to normalize requests into function calls. It is easier
4758 4758 # to bypass this layer and normalize to commands + args.
4759 4759 ui.status(
4760 4760 _(b'sending batch with %d sub-commands\n')
4761 4761 % len(batchedcommands)
4762 4762 )
4763 4763 assert peer is not None
4764 4764 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4765 4765 ui.status(
4766 4766 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4767 4767 )
4768 4768
4769 4769 batchedcommands = None
4770 4770
4771 4771 elif action.startswith(b'httprequest '):
4772 4772 if not opener:
4773 4773 raise error.Abort(
4774 4774 _(b'cannot use httprequest without an HTTP peer')
4775 4775 )
4776 4776
4777 4777 request = action.split(b' ', 2)
4778 4778 if len(request) != 3:
4779 4779 raise error.Abort(
4780 4780 _(
4781 4781 b'invalid httprequest: expected format is '
4782 4782 b'"httprequest <method> <path>'
4783 4783 )
4784 4784 )
4785 4785
4786 4786 method, httppath = request[1:]
4787 4787 headers = {}
4788 4788 body = None
4789 4789 frames = []
4790 4790 for line in lines:
4791 4791 line = line.lstrip()
4792 4792 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4793 4793 if m:
4794 4794 # Headers need to use native strings.
4795 4795 key = pycompat.strurl(m.group(1))
4796 4796 value = pycompat.strurl(m.group(2))
4797 4797 headers[key] = value
4798 4798 continue
4799 4799
4800 4800 if line.startswith(b'BODYFILE '):
4801 4801 with open(line.split(b' ', 1), b'rb') as fh:
4802 4802 body = fh.read()
4803 4803 elif line.startswith(b'frame '):
4804 4804 frame = wireprotoframing.makeframefromhumanstring(
4805 4805 line[len(b'frame ') :]
4806 4806 )
4807 4807
4808 4808 frames.append(frame)
4809 4809 else:
4810 4810 raise error.Abort(
4811 4811 _(b'unknown argument to httprequest: %s') % line
4812 4812 )
4813 4813
4814 4814 url = path + httppath
4815 4815
4816 4816 if frames:
4817 4817 body = b''.join(bytes(f) for f in frames)
4818 4818
4819 4819 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4820 4820
4821 4821 # urllib.Request insists on using has_data() as a proxy for
4822 4822 # determining the request method. Override that to use our
4823 4823 # explicitly requested method.
4824 4824 req.get_method = lambda: pycompat.sysstr(method)
4825 4825
4826 4826 try:
4827 4827 res = opener.open(req)
4828 4828 body = res.read()
4829 4829 except util.urlerr.urlerror as e:
4830 4830 # read() method must be called, but only exists in Python 2
4831 4831 getattr(e, 'read', lambda: None)()
4832 4832 continue
4833 4833
4834 4834 ct = res.headers.get('Content-Type')
4835 4835 if ct == 'application/mercurial-cbor':
4836 4836 ui.write(
4837 4837 _(b'cbor> %s\n')
4838 4838 % stringutil.pprint(
4839 4839 cborutil.decodeall(body), bprefix=True, indent=2
4840 4840 )
4841 4841 )
4842 4842
4843 4843 elif action == b'close':
4844 4844 assert peer is not None
4845 4845 peer.close()
4846 4846 elif action == b'readavailable':
4847 4847 if not stdout or not stderr:
4848 4848 raise error.Abort(
4849 4849 _(b'readavailable not available on this peer')
4850 4850 )
4851 4851
4852 4852 stdin.close()
4853 4853 stdout.read()
4854 4854 stderr.read()
4855 4855
4856 4856 elif action == b'readline':
4857 4857 if not stdout:
4858 4858 raise error.Abort(_(b'readline not available on this peer'))
4859 4859 stdout.readline()
4860 4860 elif action == b'ereadline':
4861 4861 if not stderr:
4862 4862 raise error.Abort(_(b'ereadline not available on this peer'))
4863 4863 stderr.readline()
4864 4864 elif action.startswith(b'read '):
4865 4865 count = int(action.split(b' ', 1)[1])
4866 4866 if not stdout:
4867 4867 raise error.Abort(_(b'read not available on this peer'))
4868 4868 stdout.read(count)
4869 4869 elif action.startswith(b'eread '):
4870 4870 count = int(action.split(b' ', 1)[1])
4871 4871 if not stderr:
4872 4872 raise error.Abort(_(b'eread not available on this peer'))
4873 4873 stderr.read(count)
4874 4874 else:
4875 4875 raise error.Abort(_(b'unknown action: %s') % action)
4876 4876
4877 4877 if batchedcommands is not None:
4878 4878 raise error.Abort(_(b'unclosed "batchbegin" request'))
4879 4879
4880 4880 if peer:
4881 4881 peer.close()
4882 4882
4883 4883 if proc:
4884 4884 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now