##// END OF EJS Templates
debuglock: make the command more useful in non-interactive mode...
marmoute -
r50092:883be4c7 default
parent child Browse files
Show More
@@ -1,4918 +1,4930 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 dirstateutils,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 106 deltas as deltautil,
107 107 nodemap,
108 108 rewrite,
109 109 sidedata,
110 110 )
111 111
112 112 release = lockmod.release
113 113
114 114 table = {}
115 115 table.update(strip.command._table)
116 116 command = registrar.command(table)
117 117
118 118
119 119 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
120 120 def debugancestor(ui, repo, *args):
121 121 """find the ancestor revision of two revisions in a given index"""
122 122 if len(args) == 3:
123 123 index, rev1, rev2 = args
124 124 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
125 125 lookup = r.lookup
126 126 elif len(args) == 2:
127 127 if not repo:
128 128 raise error.Abort(
129 129 _(b'there is no Mercurial repository here (.hg not found)')
130 130 )
131 131 rev1, rev2 = args
132 132 r = repo.changelog
133 133 lookup = repo.lookup
134 134 else:
135 135 raise error.Abort(_(b'either two or three arguments required'))
136 136 a = r.ancestor(lookup(rev1), lookup(rev2))
137 137 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
138 138
139 139
140 140 @command(b'debugantivirusrunning', [])
141 141 def debugantivirusrunning(ui, repo):
142 142 """attempt to trigger an antivirus scanner to see if one is active"""
143 143 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
144 144 f.write(
145 145 util.b85decode(
146 146 # This is a base85-armored version of the EICAR test file. See
147 147 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
148 148 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
149 149 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
150 150 )
151 151 )
152 152 # Give an AV engine time to scan the file.
153 153 time.sleep(2)
154 154 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
155 155
156 156
157 157 @command(b'debugapplystreamclonebundle', [], b'FILE')
158 158 def debugapplystreamclonebundle(ui, repo, fname):
159 159 """apply a stream clone bundle file"""
160 160 f = hg.openpath(ui, fname)
161 161 gen = exchange.readbundle(ui, f, fname)
162 162 gen.apply(repo)
163 163
164 164
165 165 @command(
166 166 b'debugbuilddag',
167 167 [
168 168 (
169 169 b'm',
170 170 b'mergeable-file',
171 171 None,
172 172 _(b'add single file mergeable changes'),
173 173 ),
174 174 (
175 175 b'o',
176 176 b'overwritten-file',
177 177 None,
178 178 _(b'add single file all revs overwrite'),
179 179 ),
180 180 (b'n', b'new-file', None, _(b'add new file at each rev')),
181 181 (
182 182 b'',
183 183 b'from-existing',
184 184 None,
185 185 _(b'continue from a non-empty repository'),
186 186 ),
187 187 ],
188 188 _(b'[OPTION]... [TEXT]'),
189 189 )
190 190 def debugbuilddag(
191 191 ui,
192 192 repo,
193 193 text=None,
194 194 mergeable_file=False,
195 195 overwritten_file=False,
196 196 new_file=False,
197 197 from_existing=False,
198 198 ):
199 199 """builds a repo with a given DAG from scratch in the current empty repo
200 200
201 201 The description of the DAG is read from stdin if not given on the
202 202 command line.
203 203
204 204 Elements:
205 205
206 206 - "+n" is a linear run of n nodes based on the current default parent
207 207 - "." is a single node based on the current default parent
208 208 - "$" resets the default parent to null (implied at the start);
209 209 otherwise the default parent is always the last node created
210 210 - "<p" sets the default parent to the backref p
211 211 - "*p" is a fork at parent p, which is a backref
212 212 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
213 213 - "/p2" is a merge of the preceding node and p2
214 214 - ":tag" defines a local tag for the preceding node
215 215 - "@branch" sets the named branch for subsequent nodes
216 216 - "#...\\n" is a comment up to the end of the line
217 217
218 218 Whitespace between the above elements is ignored.
219 219
220 220 A backref is either
221 221
222 222 - a number n, which references the node curr-n, where curr is the current
223 223 node, or
224 224 - the name of a local tag you placed earlier using ":tag", or
225 225 - empty to denote the default parent.
226 226
227 227 All string valued-elements are either strictly alphanumeric, or must
228 228 be enclosed in double quotes ("..."), with "\\" as escape character.
229 229 """
230 230
231 231 if text is None:
232 232 ui.status(_(b"reading DAG from stdin\n"))
233 233 text = ui.fin.read()
234 234
235 235 cl = repo.changelog
236 236 if len(cl) > 0 and not from_existing:
237 237 raise error.Abort(_(b'repository is not empty'))
238 238
239 239 # determine number of revs in DAG
240 240 total = 0
241 241 for type, data in dagparser.parsedag(text):
242 242 if type == b'n':
243 243 total += 1
244 244
245 245 if mergeable_file:
246 246 linesperrev = 2
247 247 # make a file with k lines per rev
248 248 initialmergedlines = [
249 249 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
250 250 ]
251 251 initialmergedlines.append(b"")
252 252
253 253 tags = []
254 254 progress = ui.makeprogress(
255 255 _(b'building'), unit=_(b'revisions'), total=total
256 256 )
257 257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 258 at = -1
259 259 atbranch = b'default'
260 260 nodeids = []
261 261 id = 0
262 262 progress.update(id)
263 263 for type, data in dagparser.parsedag(text):
264 264 if type == b'n':
265 265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 266 id, ps = data
267 267
268 268 files = []
269 269 filecontent = {}
270 270
271 271 p2 = None
272 272 if mergeable_file:
273 273 fn = b"mf"
274 274 p1 = repo[ps[0]]
275 275 if len(ps) > 1:
276 276 p2 = repo[ps[1]]
277 277 pa = p1.ancestor(p2)
278 278 base, local, other = [
279 279 x[fn].data() for x in (pa, p1, p2)
280 280 ]
281 281 m3 = simplemerge.Merge3Text(base, local, other)
282 282 ml = [
283 283 l.strip()
284 284 for l in simplemerge.render_minimized(m3)[0]
285 285 ]
286 286 ml.append(b"")
287 287 elif at > 0:
288 288 ml = p1[fn].data().split(b"\n")
289 289 else:
290 290 ml = initialmergedlines
291 291 ml[id * linesperrev] += b" r%i" % id
292 292 mergedtext = b"\n".join(ml)
293 293 files.append(fn)
294 294 filecontent[fn] = mergedtext
295 295
296 296 if overwritten_file:
297 297 fn = b"of"
298 298 files.append(fn)
299 299 filecontent[fn] = b"r%i\n" % id
300 300
301 301 if new_file:
302 302 fn = b"nf%i" % id
303 303 files.append(fn)
304 304 filecontent[fn] = b"r%i\n" % id
305 305 if len(ps) > 1:
306 306 if not p2:
307 307 p2 = repo[ps[1]]
308 308 for fn in p2:
309 309 if fn.startswith(b"nf"):
310 310 files.append(fn)
311 311 filecontent[fn] = p2[fn].data()
312 312
313 313 def fctxfn(repo, cx, path):
314 314 if path in filecontent:
315 315 return context.memfilectx(
316 316 repo, cx, path, filecontent[path]
317 317 )
318 318 return None
319 319
320 320 if len(ps) == 0 or ps[0] < 0:
321 321 pars = [None, None]
322 322 elif len(ps) == 1:
323 323 pars = [nodeids[ps[0]], None]
324 324 else:
325 325 pars = [nodeids[p] for p in ps]
326 326 cx = context.memctx(
327 327 repo,
328 328 pars,
329 329 b"r%i" % id,
330 330 files,
331 331 fctxfn,
332 332 date=(id, 0),
333 333 user=b"debugbuilddag",
334 334 extra={b'branch': atbranch},
335 335 )
336 336 nodeid = repo.commitctx(cx)
337 337 nodeids.append(nodeid)
338 338 at = id
339 339 elif type == b'l':
340 340 id, name = data
341 341 ui.note((b'tag %s\n' % name))
342 342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 343 elif type == b'a':
344 344 ui.note((b'branch %s\n' % data))
345 345 atbranch = data
346 346 progress.update(id)
347 347
348 348 if tags:
349 349 repo.vfs.write(b"localtags", b"".join(tags))
350 350
351 351
352 352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 353 indent_string = b' ' * indent
354 354 if all:
355 355 ui.writenoi18n(
356 356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 357 % indent_string
358 358 )
359 359
360 360 def showchunks(named):
361 361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 362 for deltadata in gen.deltaiter():
363 363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 364 ui.write(
365 365 b"%s%s %s %s %s %s %d\n"
366 366 % (
367 367 indent_string,
368 368 hex(node),
369 369 hex(p1),
370 370 hex(p2),
371 371 hex(cs),
372 372 hex(deltabase),
373 373 len(delta),
374 374 )
375 375 )
376 376
377 377 gen.changelogheader()
378 378 showchunks(b"changelog")
379 379 gen.manifestheader()
380 380 showchunks(b"manifest")
381 381 for chunkdata in iter(gen.filelogheader, {}):
382 382 fname = chunkdata[b'filename']
383 383 showchunks(fname)
384 384 else:
385 385 if isinstance(gen, bundle2.unbundle20):
386 386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 387 gen.changelogheader()
388 388 for deltadata in gen.deltaiter():
389 389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 391
392 392
393 393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 394 """display version and markers contained in 'data'"""
395 395 opts = pycompat.byteskwargs(opts)
396 396 data = part.read()
397 397 indent_string = b' ' * indent
398 398 try:
399 399 version, markers = obsolete._readmarkers(data)
400 400 except error.UnknownVersion as exc:
401 401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 402 msg %= indent_string, exc.version, len(data)
403 403 ui.write(msg)
404 404 else:
405 405 msg = b"%sversion: %d (%d bytes)\n"
406 406 msg %= indent_string, version, len(data)
407 407 ui.write(msg)
408 408 fm = ui.formatter(b'debugobsolete', opts)
409 409 for rawmarker in sorted(markers):
410 410 m = obsutil.marker(None, rawmarker)
411 411 fm.startitem()
412 412 fm.plain(indent_string)
413 413 cmdutil.showmarker(fm, m)
414 414 fm.end()
415 415
416 416
417 417 def _debugphaseheads(ui, data, indent=0):
418 418 """display version and markers contained in 'data'"""
419 419 indent_string = b' ' * indent
420 420 headsbyphase = phases.binarydecode(data)
421 421 for phase in phases.allphases:
422 422 for head in headsbyphase[phase]:
423 423 ui.write(indent_string)
424 424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 425
426 426
427 427 def _quasirepr(thing):
428 428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 429 return b'{%s}' % (
430 430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 431 )
432 432 return pycompat.bytestr(repr(thing))
433 433
434 434
435 435 def _debugbundle2(ui, gen, all=None, **opts):
436 436 """lists the contents of a bundle2"""
437 437 if not isinstance(gen, bundle2.unbundle20):
438 438 raise error.Abort(_(b'not a bundle2 file'))
439 439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 440 parttypes = opts.get('part_type', [])
441 441 for part in gen.iterparts():
442 442 if parttypes and part.type not in parttypes:
443 443 continue
444 444 msg = b'%s -- %s (mandatory: %r)\n'
445 445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 446 if part.type == b'changegroup':
447 447 version = part.params.get(b'version', b'01')
448 448 cg = changegroup.getunbundler(version, part, b'UN')
449 449 if not ui.quiet:
450 450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 451 if part.type == b'obsmarkers':
452 452 if not ui.quiet:
453 453 _debugobsmarkers(ui, part, indent=4, **opts)
454 454 if part.type == b'phase-heads':
455 455 if not ui.quiet:
456 456 _debugphaseheads(ui, part, indent=4)
457 457
458 458
459 459 @command(
460 460 b'debugbundle',
461 461 [
462 462 (b'a', b'all', None, _(b'show all details')),
463 463 (b'', b'part-type', [], _(b'show only the named part type')),
464 464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 465 ],
466 466 _(b'FILE'),
467 467 norepo=True,
468 468 )
469 469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 470 """lists the contents of a bundle"""
471 471 with hg.openpath(ui, bundlepath) as f:
472 472 if spec:
473 473 spec = exchange.getbundlespec(ui, f)
474 474 ui.write(b'%s\n' % spec)
475 475 return
476 476
477 477 gen = exchange.readbundle(ui, f, bundlepath)
478 478 if isinstance(gen, bundle2.unbundle20):
479 479 return _debugbundle2(ui, gen, all=all, **opts)
480 480 _debugchangegroup(ui, gen, all=all, **opts)
481 481
482 482
483 483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 484 def debugcapabilities(ui, path, **opts):
485 485 """lists the capabilities of a remote peer"""
486 486 opts = pycompat.byteskwargs(opts)
487 487 peer = hg.peer(ui, opts, path)
488 488 try:
489 489 caps = peer.capabilities()
490 490 ui.writenoi18n(b'Main capabilities:\n')
491 491 for c in sorted(caps):
492 492 ui.write(b' %s\n' % c)
493 493 b2caps = bundle2.bundle2caps(peer)
494 494 if b2caps:
495 495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 496 for key, values in sorted(b2caps.items()):
497 497 ui.write(b' %s\n' % key)
498 498 for v in values:
499 499 ui.write(b' %s\n' % v)
500 500 finally:
501 501 peer.close()
502 502
503 503
504 504 @command(
505 505 b'debugchangedfiles',
506 506 [
507 507 (
508 508 b'',
509 509 b'compute',
510 510 False,
511 511 b"compute information instead of reading it from storage",
512 512 ),
513 513 ],
514 514 b'REV',
515 515 )
516 516 def debugchangedfiles(ui, repo, rev, **opts):
517 517 """list the stored files changes for a revision"""
518 518 ctx = logcmdutil.revsingle(repo, rev, None)
519 519 files = None
520 520
521 521 if opts['compute']:
522 522 files = metadata.compute_all_files_changes(ctx)
523 523 else:
524 524 sd = repo.changelog.sidedata(ctx.rev())
525 525 files_block = sd.get(sidedata.SD_FILES)
526 526 if files_block is not None:
527 527 files = metadata.decode_files_sidedata(sd)
528 528 if files is not None:
529 529 for f in sorted(files.touched):
530 530 if f in files.added:
531 531 action = b"added"
532 532 elif f in files.removed:
533 533 action = b"removed"
534 534 elif f in files.merged:
535 535 action = b"merged"
536 536 elif f in files.salvaged:
537 537 action = b"salvaged"
538 538 else:
539 539 action = b"touched"
540 540
541 541 copy_parent = b""
542 542 copy_source = b""
543 543 if f in files.copied_from_p1:
544 544 copy_parent = b"p1"
545 545 copy_source = files.copied_from_p1[f]
546 546 elif f in files.copied_from_p2:
547 547 copy_parent = b"p2"
548 548 copy_source = files.copied_from_p2[f]
549 549
550 550 data = (action, copy_parent, f, copy_source)
551 551 template = b"%-8s %2s: %s, %s;\n"
552 552 ui.write(template % data)
553 553
554 554
555 555 @command(b'debugcheckstate', [], b'')
556 556 def debugcheckstate(ui, repo):
557 557 """validate the correctness of the current dirstate"""
558 558 parent1, parent2 = repo.dirstate.parents()
559 559 m1 = repo[parent1].manifest()
560 560 m2 = repo[parent2].manifest()
561 561 errors = 0
562 562 for err in repo.dirstate.verify(m1, m2):
563 563 ui.warn(err[0] % err[1:])
564 564 errors += 1
565 565 if errors:
566 566 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
567 567 raise error.Abort(errstr)
568 568
569 569
570 570 @command(
571 571 b'debugcolor',
572 572 [(b'', b'style', None, _(b'show all configured styles'))],
573 573 b'hg debugcolor',
574 574 )
575 575 def debugcolor(ui, repo, **opts):
576 576 """show available color, effects or style"""
577 577 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
578 578 if opts.get('style'):
579 579 return _debugdisplaystyle(ui)
580 580 else:
581 581 return _debugdisplaycolor(ui)
582 582
583 583
584 584 def _debugdisplaycolor(ui):
585 585 ui = ui.copy()
586 586 ui._styles.clear()
587 587 for effect in color._activeeffects(ui).keys():
588 588 ui._styles[effect] = effect
589 589 if ui._terminfoparams:
590 590 for k, v in ui.configitems(b'color'):
591 591 if k.startswith(b'color.'):
592 592 ui._styles[k] = k[6:]
593 593 elif k.startswith(b'terminfo.'):
594 594 ui._styles[k] = k[9:]
595 595 ui.write(_(b'available colors:\n'))
596 596 # sort label with a '_' after the other to group '_background' entry.
597 597 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
598 598 for colorname, label in items:
599 599 ui.write(b'%s\n' % colorname, label=label)
600 600
601 601
602 602 def _debugdisplaystyle(ui):
603 603 ui.write(_(b'available style:\n'))
604 604 if not ui._styles:
605 605 return
606 606 width = max(len(s) for s in ui._styles)
607 607 for label, effects in sorted(ui._styles.items()):
608 608 ui.write(b'%s' % label, label=label)
609 609 if effects:
610 610 # 50
611 611 ui.write(b': ')
612 612 ui.write(b' ' * (max(0, width - len(label))))
613 613 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
614 614 ui.write(b'\n')
615 615
616 616
617 617 @command(b'debugcreatestreamclonebundle', [], b'FILE')
618 618 def debugcreatestreamclonebundle(ui, repo, fname):
619 619 """create a stream clone bundle file
620 620
621 621 Stream bundles are special bundles that are essentially archives of
622 622 revlog files. They are commonly used for cloning very quickly.
623 623 """
624 624 # TODO we may want to turn this into an abort when this functionality
625 625 # is moved into `hg bundle`.
626 626 if phases.hassecret(repo):
627 627 ui.warn(
628 628 _(
629 629 b'(warning: stream clone bundle will contain secret '
630 630 b'revisions)\n'
631 631 )
632 632 )
633 633
634 634 requirements, gen = streamclone.generatebundlev1(repo)
635 635 changegroup.writechunks(ui, gen, fname)
636 636
637 637 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
638 638
639 639
640 640 @command(
641 641 b'debugdag',
642 642 [
643 643 (b't', b'tags', None, _(b'use tags as labels')),
644 644 (b'b', b'branches', None, _(b'annotate with branch names')),
645 645 (b'', b'dots', None, _(b'use dots for runs')),
646 646 (b's', b'spaces', None, _(b'separate elements by spaces')),
647 647 ],
648 648 _(b'[OPTION]... [FILE [REV]...]'),
649 649 optionalrepo=True,
650 650 )
651 651 def debugdag(ui, repo, file_=None, *revs, **opts):
652 652 """format the changelog or an index DAG as a concise textual description
653 653
654 654 If you pass a revlog index, the revlog's DAG is emitted. If you list
655 655 revision numbers, they get labeled in the output as rN.
656 656
657 657 Otherwise, the changelog DAG of the current repo is emitted.
658 658 """
659 659 spaces = opts.get('spaces')
660 660 dots = opts.get('dots')
661 661 if file_:
662 662 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
663 663 revs = {int(r) for r in revs}
664 664
665 665 def events():
666 666 for r in rlog:
667 667 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
668 668 if r in revs:
669 669 yield b'l', (r, b"r%i" % r)
670 670
671 671 elif repo:
672 672 cl = repo.changelog
673 673 tags = opts.get('tags')
674 674 branches = opts.get('branches')
675 675 if tags:
676 676 labels = {}
677 677 for l, n in repo.tags().items():
678 678 labels.setdefault(cl.rev(n), []).append(l)
679 679
680 680 def events():
681 681 b = b"default"
682 682 for r in cl:
683 683 if branches:
684 684 newb = cl.read(cl.node(r))[5][b'branch']
685 685 if newb != b:
686 686 yield b'a', newb
687 687 b = newb
688 688 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
689 689 if tags:
690 690 ls = labels.get(r)
691 691 if ls:
692 692 for l in ls:
693 693 yield b'l', (r, l)
694 694
695 695 else:
696 696 raise error.Abort(_(b'need repo for changelog dag'))
697 697
698 698 for line in dagparser.dagtextlines(
699 699 events(),
700 700 addspaces=spaces,
701 701 wraplabels=True,
702 702 wrapannotations=True,
703 703 wrapnonlinear=dots,
704 704 usedots=dots,
705 705 maxlinewidth=70,
706 706 ):
707 707 ui.write(line)
708 708 ui.write(b"\n")
709 709
710 710
711 711 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
712 712 def debugdata(ui, repo, file_, rev=None, **opts):
713 713 """dump the contents of a data file revision"""
714 714 opts = pycompat.byteskwargs(opts)
715 715 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
716 716 if rev is not None:
717 717 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
718 718 file_, rev = None, file_
719 719 elif rev is None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
722 722 try:
723 723 ui.write(r.rawdata(r.lookup(rev)))
724 724 except KeyError:
725 725 raise error.Abort(_(b'invalid revision identifier %s') % rev)
726 726
727 727
728 728 @command(
729 729 b'debugdate',
730 730 [(b'e', b'extended', None, _(b'try extended date formats'))],
731 731 _(b'[-e] DATE [RANGE]'),
732 732 norepo=True,
733 733 optionalrepo=True,
734 734 )
735 735 def debugdate(ui, date, range=None, **opts):
736 736 """parse and display a date"""
737 737 if opts["extended"]:
738 738 d = dateutil.parsedate(date, dateutil.extendeddateformats)
739 739 else:
740 740 d = dateutil.parsedate(date)
741 741 ui.writenoi18n(b"internal: %d %d\n" % d)
742 742 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
743 743 if range:
744 744 m = dateutil.matchdate(range)
745 745 ui.writenoi18n(b"match: %s\n" % m(d[0]))
746 746
747 747
748 748 @command(
749 749 b'debugdeltachain',
750 750 cmdutil.debugrevlogopts + cmdutil.formatteropts,
751 751 _(b'-c|-m|FILE'),
752 752 optionalrepo=True,
753 753 )
754 754 def debugdeltachain(ui, repo, file_=None, **opts):
755 755 """dump information about delta chains in a revlog
756 756
757 757 Output can be templatized. Available template keywords are:
758 758
759 759 :``rev``: revision number
760 760 :``chainid``: delta chain identifier (numbered by unique base)
761 761 :``chainlen``: delta chain length to this revision
762 762 :``prevrev``: previous revision in delta chain
763 763 :``deltatype``: role of delta / how it was computed
764 764 :``compsize``: compressed size of revision
765 765 :``uncompsize``: uncompressed size of revision
766 766 :``chainsize``: total size of compressed revisions in chain
767 767 :``chainratio``: total chain size divided by uncompressed revision size
768 768 (new delta chains typically start at ratio 2.00)
769 769 :``lindist``: linear distance from base revision in delta chain to end
770 770 of this revision
771 771 :``extradist``: total size of revisions not part of this delta chain from
772 772 base of delta chain to end of this revision; a measurement
773 773 of how much extra data we need to read/seek across to read
774 774 the delta chain for this revision
775 775 :``extraratio``: extradist divided by chainsize; another representation of
776 776 how much unrelated data is needed to load this delta chain
777 777
778 778 If the repository is configured to use the sparse read, additional keywords
779 779 are available:
780 780
781 781 :``readsize``: total size of data read from the disk for a revision
782 782 (sum of the sizes of all the blocks)
783 783 :``largestblock``: size of the largest block of data read from the disk
784 784 :``readdensity``: density of useful bytes in the data read from the disk
785 785 :``srchunks``: in how many data hunks the whole revision would be read
786 786
787 787 The sparse read can be enabled with experimental.sparse-read = True
788 788 """
789 789 opts = pycompat.byteskwargs(opts)
790 790 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
791 791 index = r.index
792 792 start = r.start
793 793 length = r.length
794 794 generaldelta = r._generaldelta
795 795 withsparseread = getattr(r, '_withsparseread', False)
796 796
797 797 def revinfo(rev):
798 798 e = index[rev]
799 799 compsize = e[1]
800 800 uncompsize = e[2]
801 801 chainsize = 0
802 802
803 803 if generaldelta:
804 804 if e[3] == e[5]:
805 805 deltatype = b'p1'
806 806 elif e[3] == e[6]:
807 807 deltatype = b'p2'
808 808 elif e[3] == rev - 1:
809 809 deltatype = b'prev'
810 810 elif e[3] == rev:
811 811 deltatype = b'base'
812 812 else:
813 813 deltatype = b'other'
814 814 else:
815 815 if e[3] == rev:
816 816 deltatype = b'base'
817 817 else:
818 818 deltatype = b'prev'
819 819
820 820 chain = r._deltachain(rev)[0]
821 821 for iterrev in chain:
822 822 e = index[iterrev]
823 823 chainsize += e[1]
824 824
825 825 return compsize, uncompsize, deltatype, chain, chainsize
826 826
827 827 fm = ui.formatter(b'debugdeltachain', opts)
828 828
829 829 fm.plain(
830 830 b' rev chain# chainlen prev delta '
831 831 b'size rawsize chainsize ratio lindist extradist '
832 832 b'extraratio'
833 833 )
834 834 if withsparseread:
835 835 fm.plain(b' readsize largestblk rddensity srchunks')
836 836 fm.plain(b'\n')
837 837
838 838 chainbases = {}
839 839 for rev in r:
840 840 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
841 841 chainbase = chain[0]
842 842 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
843 843 basestart = start(chainbase)
844 844 revstart = start(rev)
845 845 lineardist = revstart + comp - basestart
846 846 extradist = lineardist - chainsize
847 847 try:
848 848 prevrev = chain[-2]
849 849 except IndexError:
850 850 prevrev = -1
851 851
852 852 if uncomp != 0:
853 853 chainratio = float(chainsize) / float(uncomp)
854 854 else:
855 855 chainratio = chainsize
856 856
857 857 if chainsize != 0:
858 858 extraratio = float(extradist) / float(chainsize)
859 859 else:
860 860 extraratio = extradist
861 861
862 862 fm.startitem()
863 863 fm.write(
864 864 b'rev chainid chainlen prevrev deltatype compsize '
865 865 b'uncompsize chainsize chainratio lindist extradist '
866 866 b'extraratio',
867 867 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
868 868 rev,
869 869 chainid,
870 870 len(chain),
871 871 prevrev,
872 872 deltatype,
873 873 comp,
874 874 uncomp,
875 875 chainsize,
876 876 chainratio,
877 877 lineardist,
878 878 extradist,
879 879 extraratio,
880 880 rev=rev,
881 881 chainid=chainid,
882 882 chainlen=len(chain),
883 883 prevrev=prevrev,
884 884 deltatype=deltatype,
885 885 compsize=comp,
886 886 uncompsize=uncomp,
887 887 chainsize=chainsize,
888 888 chainratio=chainratio,
889 889 lindist=lineardist,
890 890 extradist=extradist,
891 891 extraratio=extraratio,
892 892 )
893 893 if withsparseread:
894 894 readsize = 0
895 895 largestblock = 0
896 896 srchunks = 0
897 897
898 898 for revschunk in deltautil.slicechunk(r, chain):
899 899 srchunks += 1
900 900 blkend = start(revschunk[-1]) + length(revschunk[-1])
901 901 blksize = blkend - start(revschunk[0])
902 902
903 903 readsize += blksize
904 904 if largestblock < blksize:
905 905 largestblock = blksize
906 906
907 907 if readsize:
908 908 readdensity = float(chainsize) / float(readsize)
909 909 else:
910 910 readdensity = 1
911 911
912 912 fm.write(
913 913 b'readsize largestblock readdensity srchunks',
914 914 b' %10d %10d %9.5f %8d',
915 915 readsize,
916 916 largestblock,
917 917 readdensity,
918 918 srchunks,
919 919 readsize=readsize,
920 920 largestblock=largestblock,
921 921 readdensity=readdensity,
922 922 srchunks=srchunks,
923 923 )
924 924
925 925 fm.plain(b'\n')
926 926
927 927 fm.end()
928 928
929 929
930 930 @command(
931 931 b'debugdirstate|debugstate',
932 932 [
933 933 (
934 934 b'',
935 935 b'nodates',
936 936 None,
937 937 _(b'do not display the saved mtime (DEPRECATED)'),
938 938 ),
939 939 (b'', b'dates', True, _(b'display the saved mtime')),
940 940 (b'', b'datesort', None, _(b'sort by saved mtime')),
941 941 (
942 942 b'',
943 943 b'docket',
944 944 False,
945 945 _(b'display the docket (metadata file) instead'),
946 946 ),
947 947 (
948 948 b'',
949 949 b'all',
950 950 False,
951 951 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
952 952 ),
953 953 ],
954 954 _(b'[OPTION]...'),
955 955 )
956 956 def debugstate(ui, repo, **opts):
957 957 """show the contents of the current dirstate"""
958 958
959 959 if opts.get("docket"):
960 960 if not repo.dirstate._use_dirstate_v2:
961 961 raise error.Abort(_(b'dirstate v1 does not have a docket'))
962 962
963 963 docket = repo.dirstate._map.docket
964 964 (
965 965 start_offset,
966 966 root_nodes,
967 967 nodes_with_entry,
968 968 nodes_with_copy,
969 969 unused_bytes,
970 970 _unused,
971 971 ignore_pattern,
972 972 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
973 973
974 974 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
975 975 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
976 976 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
977 977 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
978 978 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
979 979 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
980 980 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
981 981 ui.write(
982 982 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
983 983 )
984 984 return
985 985
986 986 nodates = not opts['dates']
987 987 if opts.get('nodates') is not None:
988 988 nodates = True
989 989 datesort = opts.get('datesort')
990 990
991 991 if datesort:
992 992
993 993 def keyfunc(entry):
994 994 filename, _state, _mode, _size, mtime = entry
995 995 return (mtime, filename)
996 996
997 997 else:
998 998 keyfunc = None # sort by filename
999 999 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1000 1000 entries.sort(key=keyfunc)
1001 1001 for entry in entries:
1002 1002 filename, state, mode, size, mtime = entry
1003 1003 if mtime == -1:
1004 1004 timestr = b'unset '
1005 1005 elif nodates:
1006 1006 timestr = b'set '
1007 1007 else:
1008 1008 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1009 1009 timestr = encoding.strtolocal(timestr)
1010 1010 if mode & 0o20000:
1011 1011 mode = b'lnk'
1012 1012 else:
1013 1013 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1014 1014 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1015 1015 for f in repo.dirstate.copies():
1016 1016 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1017 1017
1018 1018
1019 1019 @command(
1020 1020 b'debugdirstateignorepatternshash',
1021 1021 [],
1022 1022 _(b''),
1023 1023 )
1024 1024 def debugdirstateignorepatternshash(ui, repo, **opts):
1025 1025 """show the hash of ignore patterns stored in dirstate if v2,
1026 1026 or nothing for dirstate-v2
1027 1027 """
1028 1028 if repo.dirstate._use_dirstate_v2:
1029 1029 docket = repo.dirstate._map.docket
1030 1030 hash_len = 20 # 160 bits for SHA-1
1031 1031 hash_bytes = docket.tree_metadata[-hash_len:]
1032 1032 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1033 1033
1034 1034
1035 1035 @command(
1036 1036 b'debugdiscovery',
1037 1037 [
1038 1038 (b'', b'old', None, _(b'use old-style discovery')),
1039 1039 (
1040 1040 b'',
1041 1041 b'nonheads',
1042 1042 None,
1043 1043 _(b'use old-style discovery with non-heads included'),
1044 1044 ),
1045 1045 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1046 1046 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1047 1047 (
1048 1048 b'',
1049 1049 b'local-as-revs',
1050 1050 b"",
1051 1051 b'treat local has having these revisions only',
1052 1052 ),
1053 1053 (
1054 1054 b'',
1055 1055 b'remote-as-revs',
1056 1056 b"",
1057 1057 b'use local as remote, with only these revisions',
1058 1058 ),
1059 1059 ]
1060 1060 + cmdutil.remoteopts
1061 1061 + cmdutil.formatteropts,
1062 1062 _(b'[--rev REV] [OTHER]'),
1063 1063 )
1064 1064 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1065 1065 """runs the changeset discovery protocol in isolation
1066 1066
1067 1067 The local peer can be "replaced" by a subset of the local repository by
1068 1068 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1069 1069 be "replaced" by a subset of the local repository using the
1070 1070 `--local-as-revs` flag. This is useful to efficiently debug pathological
1071 1071 discovery situation.
1072 1072
1073 1073 The following developer oriented config are relevant for people playing with this command:
1074 1074
1075 1075 * devel.discovery.exchange-heads=True
1076 1076
1077 1077 If False, the discovery will not start with
1078 1078 remote head fetching and local head querying.
1079 1079
1080 1080 * devel.discovery.grow-sample=True
1081 1081
1082 1082 If False, the sample size used in set discovery will not be increased
1083 1083 through the process
1084 1084
1085 1085 * devel.discovery.grow-sample.dynamic=True
1086 1086
1087 1087 When discovery.grow-sample.dynamic is True, the default, the sample size is
1088 1088 adapted to the shape of the undecided set (it is set to the max of:
1089 1089 <target-size>, len(roots(undecided)), len(heads(undecided)
1090 1090
1091 1091 * devel.discovery.grow-sample.rate=1.05
1092 1092
1093 1093 the rate at which the sample grow
1094 1094
1095 1095 * devel.discovery.randomize=True
1096 1096
1097 1097 If andom sampling during discovery are deterministic. It is meant for
1098 1098 integration tests.
1099 1099
1100 1100 * devel.discovery.sample-size=200
1101 1101
1102 1102 Control the initial size of the discovery sample
1103 1103
1104 1104 * devel.discovery.sample-size.initial=100
1105 1105
1106 1106 Control the initial size of the discovery for initial change
1107 1107 """
1108 1108 opts = pycompat.byteskwargs(opts)
1109 1109 unfi = repo.unfiltered()
1110 1110
1111 1111 # setup potential extra filtering
1112 1112 local_revs = opts[b"local_as_revs"]
1113 1113 remote_revs = opts[b"remote_as_revs"]
1114 1114
1115 1115 # make sure tests are repeatable
1116 1116 random.seed(int(opts[b'seed']))
1117 1117
1118 1118 if not remote_revs:
1119 1119
1120 1120 remoteurl, branches = urlutil.get_unique_pull_path(
1121 1121 b'debugdiscovery', repo, ui, remoteurl
1122 1122 )
1123 1123 remote = hg.peer(repo, opts, remoteurl)
1124 1124 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1125 1125 else:
1126 1126 branches = (None, [])
1127 1127 remote_filtered_revs = logcmdutil.revrange(
1128 1128 unfi, [b"not (::(%s))" % remote_revs]
1129 1129 )
1130 1130 remote_filtered_revs = frozenset(remote_filtered_revs)
1131 1131
1132 1132 def remote_func(x):
1133 1133 return remote_filtered_revs
1134 1134
1135 1135 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1136 1136
1137 1137 remote = repo.peer()
1138 1138 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1139 1139
1140 1140 if local_revs:
1141 1141 local_filtered_revs = logcmdutil.revrange(
1142 1142 unfi, [b"not (::(%s))" % local_revs]
1143 1143 )
1144 1144 local_filtered_revs = frozenset(local_filtered_revs)
1145 1145
1146 1146 def local_func(x):
1147 1147 return local_filtered_revs
1148 1148
1149 1149 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1150 1150 repo = repo.filtered(b'debug-discovery-local-filter')
1151 1151
1152 1152 data = {}
1153 1153 if opts.get(b'old'):
1154 1154
1155 1155 def doit(pushedrevs, remoteheads, remote=remote):
1156 1156 if not util.safehasattr(remote, b'branches'):
1157 1157 # enable in-client legacy support
1158 1158 remote = localrepo.locallegacypeer(remote.local())
1159 1159 common, _in, hds = treediscovery.findcommonincoming(
1160 1160 repo, remote, force=True, audit=data
1161 1161 )
1162 1162 common = set(common)
1163 1163 if not opts.get(b'nonheads'):
1164 1164 ui.writenoi18n(
1165 1165 b"unpruned common: %s\n"
1166 1166 % b" ".join(sorted(short(n) for n in common))
1167 1167 )
1168 1168
1169 1169 clnode = repo.changelog.node
1170 1170 common = repo.revs(b'heads(::%ln)', common)
1171 1171 common = {clnode(r) for r in common}
1172 1172 return common, hds
1173 1173
1174 1174 else:
1175 1175
1176 1176 def doit(pushedrevs, remoteheads, remote=remote):
1177 1177 nodes = None
1178 1178 if pushedrevs:
1179 1179 revs = logcmdutil.revrange(repo, pushedrevs)
1180 1180 nodes = [repo[r].node() for r in revs]
1181 1181 common, any, hds = setdiscovery.findcommonheads(
1182 1182 ui, repo, remote, ancestorsof=nodes, audit=data
1183 1183 )
1184 1184 return common, hds
1185 1185
1186 1186 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1187 1187 localrevs = opts[b'rev']
1188 1188
1189 1189 fm = ui.formatter(b'debugdiscovery', opts)
1190 1190 if fm.strict_format:
1191 1191
1192 1192 @contextlib.contextmanager
1193 1193 def may_capture_output():
1194 1194 ui.pushbuffer()
1195 1195 yield
1196 1196 data[b'output'] = ui.popbuffer()
1197 1197
1198 1198 else:
1199 1199 may_capture_output = util.nullcontextmanager
1200 1200 with may_capture_output():
1201 1201 with util.timedcm('debug-discovery') as t:
1202 1202 common, hds = doit(localrevs, remoterevs)
1203 1203
1204 1204 # compute all statistics
1205 1205 heads_common = set(common)
1206 1206 heads_remote = set(hds)
1207 1207 heads_local = set(repo.heads())
1208 1208 # note: they cannot be a local or remote head that is in common and not
1209 1209 # itself a head of common.
1210 1210 heads_common_local = heads_common & heads_local
1211 1211 heads_common_remote = heads_common & heads_remote
1212 1212 heads_common_both = heads_common & heads_remote & heads_local
1213 1213
1214 1214 all = repo.revs(b'all()')
1215 1215 common = repo.revs(b'::%ln', common)
1216 1216 roots_common = repo.revs(b'roots(::%ld)', common)
1217 1217 missing = repo.revs(b'not ::%ld', common)
1218 1218 heads_missing = repo.revs(b'heads(%ld)', missing)
1219 1219 roots_missing = repo.revs(b'roots(%ld)', missing)
1220 1220 assert len(common) + len(missing) == len(all)
1221 1221
1222 1222 initial_undecided = repo.revs(
1223 1223 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1224 1224 )
1225 1225 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1226 1226 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1227 1227 common_initial_undecided = initial_undecided & common
1228 1228 missing_initial_undecided = initial_undecided & missing
1229 1229
1230 1230 data[b'elapsed'] = t.elapsed
1231 1231 data[b'nb-common-heads'] = len(heads_common)
1232 1232 data[b'nb-common-heads-local'] = len(heads_common_local)
1233 1233 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1234 1234 data[b'nb-common-heads-both'] = len(heads_common_both)
1235 1235 data[b'nb-common-roots'] = len(roots_common)
1236 1236 data[b'nb-head-local'] = len(heads_local)
1237 1237 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1238 1238 data[b'nb-head-remote'] = len(heads_remote)
1239 1239 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1240 1240 heads_common_remote
1241 1241 )
1242 1242 data[b'nb-revs'] = len(all)
1243 1243 data[b'nb-revs-common'] = len(common)
1244 1244 data[b'nb-revs-missing'] = len(missing)
1245 1245 data[b'nb-missing-heads'] = len(heads_missing)
1246 1246 data[b'nb-missing-roots'] = len(roots_missing)
1247 1247 data[b'nb-ini_und'] = len(initial_undecided)
1248 1248 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1249 1249 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1250 1250 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1251 1251 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1252 1252
1253 1253 fm.startitem()
1254 1254 fm.data(**pycompat.strkwargs(data))
1255 1255 # display discovery summary
1256 1256 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1257 1257 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1258 1258 fm.plain(b"queries: %(total-queries)9d\n" % data)
1259 1259 fm.plain(b"heads summary:\n")
1260 1260 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1261 1261 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1262 1262 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1263 1263 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1264 1264 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1265 1265 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1266 1266 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1267 1267 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1268 1268 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1269 1269 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1270 1270 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1271 1271 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1272 1272 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1273 1273 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1274 1274 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1275 1275 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1276 1276 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1277 1277 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1278 1278 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1279 1279 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1280 1280 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1281 1281 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1282 1282
1283 1283 if ui.verbose:
1284 1284 fm.plain(
1285 1285 b"common heads: %s\n"
1286 1286 % b" ".join(sorted(short(n) for n in heads_common))
1287 1287 )
1288 1288 fm.end()
1289 1289
1290 1290
1291 1291 _chunksize = 4 << 10
1292 1292
1293 1293
1294 1294 @command(
1295 1295 b'debugdownload',
1296 1296 [
1297 1297 (b'o', b'output', b'', _(b'path')),
1298 1298 ],
1299 1299 optionalrepo=True,
1300 1300 )
1301 1301 def debugdownload(ui, repo, url, output=None, **opts):
1302 1302 """download a resource using Mercurial logic and config"""
1303 1303 fh = urlmod.open(ui, url, output)
1304 1304
1305 1305 dest = ui
1306 1306 if output:
1307 1307 dest = open(output, b"wb", _chunksize)
1308 1308 try:
1309 1309 data = fh.read(_chunksize)
1310 1310 while data:
1311 1311 dest.write(data)
1312 1312 data = fh.read(_chunksize)
1313 1313 finally:
1314 1314 if output:
1315 1315 dest.close()
1316 1316
1317 1317
1318 1318 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1319 1319 def debugextensions(ui, repo, **opts):
1320 1320 '''show information about active extensions'''
1321 1321 opts = pycompat.byteskwargs(opts)
1322 1322 exts = extensions.extensions(ui)
1323 1323 hgver = util.version()
1324 1324 fm = ui.formatter(b'debugextensions', opts)
1325 1325 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1326 1326 isinternal = extensions.ismoduleinternal(extmod)
1327 1327 extsource = None
1328 1328
1329 1329 if util.safehasattr(extmod, '__file__'):
1330 1330 extsource = pycompat.fsencode(extmod.__file__)
1331 1331 elif getattr(sys, 'oxidized', False):
1332 1332 extsource = pycompat.sysexecutable
1333 1333 if isinternal:
1334 1334 exttestedwith = [] # never expose magic string to users
1335 1335 else:
1336 1336 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1337 1337 extbuglink = getattr(extmod, 'buglink', None)
1338 1338
1339 1339 fm.startitem()
1340 1340
1341 1341 if ui.quiet or ui.verbose:
1342 1342 fm.write(b'name', b'%s\n', extname)
1343 1343 else:
1344 1344 fm.write(b'name', b'%s', extname)
1345 1345 if isinternal or hgver in exttestedwith:
1346 1346 fm.plain(b'\n')
1347 1347 elif not exttestedwith:
1348 1348 fm.plain(_(b' (untested!)\n'))
1349 1349 else:
1350 1350 lasttestedversion = exttestedwith[-1]
1351 1351 fm.plain(b' (%s!)\n' % lasttestedversion)
1352 1352
1353 1353 fm.condwrite(
1354 1354 ui.verbose and extsource,
1355 1355 b'source',
1356 1356 _(b' location: %s\n'),
1357 1357 extsource or b"",
1358 1358 )
1359 1359
1360 1360 if ui.verbose:
1361 1361 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1362 1362 fm.data(bundled=isinternal)
1363 1363
1364 1364 fm.condwrite(
1365 1365 ui.verbose and exttestedwith,
1366 1366 b'testedwith',
1367 1367 _(b' tested with: %s\n'),
1368 1368 fm.formatlist(exttestedwith, name=b'ver'),
1369 1369 )
1370 1370
1371 1371 fm.condwrite(
1372 1372 ui.verbose and extbuglink,
1373 1373 b'buglink',
1374 1374 _(b' bug reporting: %s\n'),
1375 1375 extbuglink or b"",
1376 1376 )
1377 1377
1378 1378 fm.end()
1379 1379
1380 1380
1381 1381 @command(
1382 1382 b'debugfileset',
1383 1383 [
1384 1384 (
1385 1385 b'r',
1386 1386 b'rev',
1387 1387 b'',
1388 1388 _(b'apply the filespec on this revision'),
1389 1389 _(b'REV'),
1390 1390 ),
1391 1391 (
1392 1392 b'',
1393 1393 b'all-files',
1394 1394 False,
1395 1395 _(b'test files from all revisions and working directory'),
1396 1396 ),
1397 1397 (
1398 1398 b's',
1399 1399 b'show-matcher',
1400 1400 None,
1401 1401 _(b'print internal representation of matcher'),
1402 1402 ),
1403 1403 (
1404 1404 b'p',
1405 1405 b'show-stage',
1406 1406 [],
1407 1407 _(b'print parsed tree at the given stage'),
1408 1408 _(b'NAME'),
1409 1409 ),
1410 1410 ],
1411 1411 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1412 1412 )
1413 1413 def debugfileset(ui, repo, expr, **opts):
1414 1414 '''parse and apply a fileset specification'''
1415 1415 from . import fileset
1416 1416
1417 1417 fileset.symbols # force import of fileset so we have predicates to optimize
1418 1418 opts = pycompat.byteskwargs(opts)
1419 1419 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1420 1420
1421 1421 stages = [
1422 1422 (b'parsed', pycompat.identity),
1423 1423 (b'analyzed', filesetlang.analyze),
1424 1424 (b'optimized', filesetlang.optimize),
1425 1425 ]
1426 1426 stagenames = {n for n, f in stages}
1427 1427
1428 1428 showalways = set()
1429 1429 if ui.verbose and not opts[b'show_stage']:
1430 1430 # show parsed tree by --verbose (deprecated)
1431 1431 showalways.add(b'parsed')
1432 1432 if opts[b'show_stage'] == [b'all']:
1433 1433 showalways.update(stagenames)
1434 1434 else:
1435 1435 for n in opts[b'show_stage']:
1436 1436 if n not in stagenames:
1437 1437 raise error.Abort(_(b'invalid stage name: %s') % n)
1438 1438 showalways.update(opts[b'show_stage'])
1439 1439
1440 1440 tree = filesetlang.parse(expr)
1441 1441 for n, f in stages:
1442 1442 tree = f(tree)
1443 1443 if n in showalways:
1444 1444 if opts[b'show_stage'] or n != b'parsed':
1445 1445 ui.write(b"* %s:\n" % n)
1446 1446 ui.write(filesetlang.prettyformat(tree), b"\n")
1447 1447
1448 1448 files = set()
1449 1449 if opts[b'all_files']:
1450 1450 for r in repo:
1451 1451 c = repo[r]
1452 1452 files.update(c.files())
1453 1453 files.update(c.substate)
1454 1454 if opts[b'all_files'] or ctx.rev() is None:
1455 1455 wctx = repo[None]
1456 1456 files.update(
1457 1457 repo.dirstate.walk(
1458 1458 scmutil.matchall(repo),
1459 1459 subrepos=list(wctx.substate),
1460 1460 unknown=True,
1461 1461 ignored=True,
1462 1462 )
1463 1463 )
1464 1464 files.update(wctx.substate)
1465 1465 else:
1466 1466 files.update(ctx.files())
1467 1467 files.update(ctx.substate)
1468 1468
1469 1469 m = ctx.matchfileset(repo.getcwd(), expr)
1470 1470 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1471 1471 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1472 1472 for f in sorted(files):
1473 1473 if not m(f):
1474 1474 continue
1475 1475 ui.write(b"%s\n" % f)
1476 1476
1477 1477
1478 1478 @command(
1479 1479 b"debug-repair-issue6528",
1480 1480 [
1481 1481 (
1482 1482 b'',
1483 1483 b'to-report',
1484 1484 b'',
1485 1485 _(b'build a report of affected revisions to this file'),
1486 1486 _(b'FILE'),
1487 1487 ),
1488 1488 (
1489 1489 b'',
1490 1490 b'from-report',
1491 1491 b'',
1492 1492 _(b'repair revisions listed in this report file'),
1493 1493 _(b'FILE'),
1494 1494 ),
1495 1495 (
1496 1496 b'',
1497 1497 b'paranoid',
1498 1498 False,
1499 1499 _(b'check that both detection methods do the same thing'),
1500 1500 ),
1501 1501 ]
1502 1502 + cmdutil.dryrunopts,
1503 1503 )
1504 1504 def debug_repair_issue6528(ui, repo, **opts):
1505 1505 """find affected revisions and repair them. See issue6528 for more details.
1506 1506
1507 1507 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1508 1508 computation of affected revisions for a given repository across clones.
1509 1509 The report format is line-based (with empty lines ignored):
1510 1510
1511 1511 ```
1512 1512 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1513 1513 ```
1514 1514
1515 1515 There can be multiple broken revisions per filelog, they are separated by
1516 1516 a comma with no spaces. The only space is between the revision(s) and the
1517 1517 filename.
1518 1518
1519 1519 Note that this does *not* mean that this repairs future affected revisions,
1520 1520 that needs a separate fix at the exchange level that was introduced in
1521 1521 Mercurial 5.9.1.
1522 1522
1523 1523 There is a `--paranoid` flag to test that the fast implementation is correct
1524 1524 by checking it against the slow implementation. Since this matter is quite
1525 1525 urgent and testing every edge-case is probably quite costly, we use this
1526 1526 method to test on large repositories as a fuzzing method of sorts.
1527 1527 """
1528 1528 cmdutil.check_incompatible_arguments(
1529 1529 opts, 'to_report', ['from_report', 'dry_run']
1530 1530 )
1531 1531 dry_run = opts.get('dry_run')
1532 1532 to_report = opts.get('to_report')
1533 1533 from_report = opts.get('from_report')
1534 1534 paranoid = opts.get('paranoid')
1535 1535 # TODO maybe add filelog pattern and revision pattern parameters to help
1536 1536 # narrow down the search for users that know what they're looking for?
1537 1537
1538 1538 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1539 1539 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1540 1540 raise error.Abort(_(msg))
1541 1541
1542 1542 rewrite.repair_issue6528(
1543 1543 ui,
1544 1544 repo,
1545 1545 dry_run=dry_run,
1546 1546 to_report=to_report,
1547 1547 from_report=from_report,
1548 1548 paranoid=paranoid,
1549 1549 )
1550 1550
1551 1551
1552 1552 @command(b'debugformat', [] + cmdutil.formatteropts)
1553 1553 def debugformat(ui, repo, **opts):
1554 1554 """display format information about the current repository
1555 1555
1556 1556 Use --verbose to get extra information about current config value and
1557 1557 Mercurial default."""
1558 1558 opts = pycompat.byteskwargs(opts)
1559 1559 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1560 1560 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1561 1561
1562 1562 def makeformatname(name):
1563 1563 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1564 1564
1565 1565 fm = ui.formatter(b'debugformat', opts)
1566 1566 if fm.isplain():
1567 1567
1568 1568 def formatvalue(value):
1569 1569 if util.safehasattr(value, b'startswith'):
1570 1570 return value
1571 1571 if value:
1572 1572 return b'yes'
1573 1573 else:
1574 1574 return b'no'
1575 1575
1576 1576 else:
1577 1577 formatvalue = pycompat.identity
1578 1578
1579 1579 fm.plain(b'format-variant')
1580 1580 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1581 1581 fm.plain(b' repo')
1582 1582 if ui.verbose:
1583 1583 fm.plain(b' config default')
1584 1584 fm.plain(b'\n')
1585 1585 for fv in upgrade.allformatvariant:
1586 1586 fm.startitem()
1587 1587 repovalue = fv.fromrepo(repo)
1588 1588 configvalue = fv.fromconfig(repo)
1589 1589
1590 1590 if repovalue != configvalue:
1591 1591 namelabel = b'formatvariant.name.mismatchconfig'
1592 1592 repolabel = b'formatvariant.repo.mismatchconfig'
1593 1593 elif repovalue != fv.default:
1594 1594 namelabel = b'formatvariant.name.mismatchdefault'
1595 1595 repolabel = b'formatvariant.repo.mismatchdefault'
1596 1596 else:
1597 1597 namelabel = b'formatvariant.name.uptodate'
1598 1598 repolabel = b'formatvariant.repo.uptodate'
1599 1599
1600 1600 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1601 1601 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1602 1602 if fv.default != configvalue:
1603 1603 configlabel = b'formatvariant.config.special'
1604 1604 else:
1605 1605 configlabel = b'formatvariant.config.default'
1606 1606 fm.condwrite(
1607 1607 ui.verbose,
1608 1608 b'config',
1609 1609 b' %6s',
1610 1610 formatvalue(configvalue),
1611 1611 label=configlabel,
1612 1612 )
1613 1613 fm.condwrite(
1614 1614 ui.verbose,
1615 1615 b'default',
1616 1616 b' %7s',
1617 1617 formatvalue(fv.default),
1618 1618 label=b'formatvariant.default',
1619 1619 )
1620 1620 fm.plain(b'\n')
1621 1621 fm.end()
1622 1622
1623 1623
1624 1624 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1625 1625 def debugfsinfo(ui, path=b"."):
1626 1626 """show information detected about current filesystem"""
1627 1627 ui.writenoi18n(b'path: %s\n' % path)
1628 1628 ui.writenoi18n(
1629 1629 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1630 1630 )
1631 1631 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1632 1632 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1633 1633 ui.writenoi18n(
1634 1634 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1635 1635 )
1636 1636 ui.writenoi18n(
1637 1637 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1638 1638 )
1639 1639 casesensitive = b'(unknown)'
1640 1640 try:
1641 1641 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1642 1642 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1643 1643 except OSError:
1644 1644 pass
1645 1645 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1646 1646
1647 1647
1648 1648 @command(
1649 1649 b'debuggetbundle',
1650 1650 [
1651 1651 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1652 1652 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1653 1653 (
1654 1654 b't',
1655 1655 b'type',
1656 1656 b'bzip2',
1657 1657 _(b'bundle compression type to use'),
1658 1658 _(b'TYPE'),
1659 1659 ),
1660 1660 ],
1661 1661 _(b'REPO FILE [-H|-C ID]...'),
1662 1662 norepo=True,
1663 1663 )
1664 1664 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1665 1665 """retrieves a bundle from a repo
1666 1666
1667 1667 Every ID must be a full-length hex node id string. Saves the bundle to the
1668 1668 given file.
1669 1669 """
1670 1670 opts = pycompat.byteskwargs(opts)
1671 1671 repo = hg.peer(ui, opts, repopath)
1672 1672 if not repo.capable(b'getbundle'):
1673 1673 raise error.Abort(b"getbundle() not supported by target repository")
1674 1674 args = {}
1675 1675 if common:
1676 1676 args['common'] = [bin(s) for s in common]
1677 1677 if head:
1678 1678 args['heads'] = [bin(s) for s in head]
1679 1679 # TODO: get desired bundlecaps from command line.
1680 1680 args['bundlecaps'] = None
1681 1681 bundle = repo.getbundle(b'debug', **args)
1682 1682
1683 1683 bundletype = opts.get(b'type', b'bzip2').lower()
1684 1684 btypes = {
1685 1685 b'none': b'HG10UN',
1686 1686 b'bzip2': b'HG10BZ',
1687 1687 b'gzip': b'HG10GZ',
1688 1688 b'bundle2': b'HG20',
1689 1689 }
1690 1690 bundletype = btypes.get(bundletype)
1691 1691 if bundletype not in bundle2.bundletypes:
1692 1692 raise error.Abort(_(b'unknown bundle type specified with --type'))
1693 1693 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1694 1694
1695 1695
1696 1696 @command(b'debugignore', [], b'[FILE]')
1697 1697 def debugignore(ui, repo, *files, **opts):
1698 1698 """display the combined ignore pattern and information about ignored files
1699 1699
1700 1700 With no argument display the combined ignore pattern.
1701 1701
1702 1702 Given space separated file names, shows if the given file is ignored and
1703 1703 if so, show the ignore rule (file and line number) that matched it.
1704 1704 """
1705 1705 ignore = repo.dirstate._ignore
1706 1706 if not files:
1707 1707 # Show all the patterns
1708 1708 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1709 1709 else:
1710 1710 m = scmutil.match(repo[None], pats=files)
1711 1711 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1712 1712 for f in m.files():
1713 1713 nf = util.normpath(f)
1714 1714 ignored = None
1715 1715 ignoredata = None
1716 1716 if nf != b'.':
1717 1717 if ignore(nf):
1718 1718 ignored = nf
1719 1719 ignoredata = repo.dirstate._ignorefileandline(nf)
1720 1720 else:
1721 1721 for p in pathutil.finddirs(nf):
1722 1722 if ignore(p):
1723 1723 ignored = p
1724 1724 ignoredata = repo.dirstate._ignorefileandline(p)
1725 1725 break
1726 1726 if ignored:
1727 1727 if ignored == nf:
1728 1728 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1729 1729 else:
1730 1730 ui.write(
1731 1731 _(
1732 1732 b"%s is ignored because of "
1733 1733 b"containing directory %s\n"
1734 1734 )
1735 1735 % (uipathfn(f), ignored)
1736 1736 )
1737 1737 ignorefile, lineno, line = ignoredata
1738 1738 ui.write(
1739 1739 _(b"(ignore rule in %s, line %d: '%s')\n")
1740 1740 % (ignorefile, lineno, line)
1741 1741 )
1742 1742 else:
1743 1743 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1744 1744
1745 1745
1746 1746 @command(
1747 1747 b'debugindex',
1748 1748 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1749 1749 _(b'-c|-m|FILE'),
1750 1750 )
1751 1751 def debugindex(ui, repo, file_=None, **opts):
1752 1752 """dump index data for a storage primitive"""
1753 1753 opts = pycompat.byteskwargs(opts)
1754 1754 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1755 1755
1756 1756 if ui.debugflag:
1757 1757 shortfn = hex
1758 1758 else:
1759 1759 shortfn = short
1760 1760
1761 1761 idlen = 12
1762 1762 for i in store:
1763 1763 idlen = len(shortfn(store.node(i)))
1764 1764 break
1765 1765
1766 1766 fm = ui.formatter(b'debugindex', opts)
1767 1767 fm.plain(
1768 1768 b' rev linkrev %s %s p2\n'
1769 1769 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1770 1770 )
1771 1771
1772 1772 for rev in store:
1773 1773 node = store.node(rev)
1774 1774 parents = store.parents(node)
1775 1775
1776 1776 fm.startitem()
1777 1777 fm.write(b'rev', b'%6d ', rev)
1778 1778 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1779 1779 fm.write(b'node', b'%s ', shortfn(node))
1780 1780 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1781 1781 fm.write(b'p2', b'%s', shortfn(parents[1]))
1782 1782 fm.plain(b'\n')
1783 1783
1784 1784 fm.end()
1785 1785
1786 1786
1787 1787 @command(
1788 1788 b'debugindexdot',
1789 1789 cmdutil.debugrevlogopts,
1790 1790 _(b'-c|-m|FILE'),
1791 1791 optionalrepo=True,
1792 1792 )
1793 1793 def debugindexdot(ui, repo, file_=None, **opts):
1794 1794 """dump an index DAG as a graphviz dot file"""
1795 1795 opts = pycompat.byteskwargs(opts)
1796 1796 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1797 1797 ui.writenoi18n(b"digraph G {\n")
1798 1798 for i in r:
1799 1799 node = r.node(i)
1800 1800 pp = r.parents(node)
1801 1801 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1802 1802 if pp[1] != repo.nullid:
1803 1803 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1804 1804 ui.write(b"}\n")
1805 1805
1806 1806
1807 1807 @command(b'debugindexstats', [])
1808 1808 def debugindexstats(ui, repo):
1809 1809 """show stats related to the changelog index"""
1810 1810 repo.changelog.shortest(repo.nullid, 1)
1811 1811 index = repo.changelog.index
1812 1812 if not util.safehasattr(index, b'stats'):
1813 1813 raise error.Abort(_(b'debugindexstats only works with native code'))
1814 1814 for k, v in sorted(index.stats().items()):
1815 1815 ui.write(b'%s: %d\n' % (k, v))
1816 1816
1817 1817
1818 1818 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1819 1819 def debuginstall(ui, **opts):
1820 1820 """test Mercurial installation
1821 1821
1822 1822 Returns 0 on success.
1823 1823 """
1824 1824 opts = pycompat.byteskwargs(opts)
1825 1825
1826 1826 problems = 0
1827 1827
1828 1828 fm = ui.formatter(b'debuginstall', opts)
1829 1829 fm.startitem()
1830 1830
1831 1831 # encoding might be unknown or wrong. don't translate these messages.
1832 1832 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1833 1833 err = None
1834 1834 try:
1835 1835 codecs.lookup(pycompat.sysstr(encoding.encoding))
1836 1836 except LookupError as inst:
1837 1837 err = stringutil.forcebytestr(inst)
1838 1838 problems += 1
1839 1839 fm.condwrite(
1840 1840 err,
1841 1841 b'encodingerror',
1842 1842 b" %s\n (check that your locale is properly set)\n",
1843 1843 err,
1844 1844 )
1845 1845
1846 1846 # Python
1847 1847 pythonlib = None
1848 1848 if util.safehasattr(os, '__file__'):
1849 1849 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1850 1850 elif getattr(sys, 'oxidized', False):
1851 1851 pythonlib = pycompat.sysexecutable
1852 1852
1853 1853 fm.write(
1854 1854 b'pythonexe',
1855 1855 _(b"checking Python executable (%s)\n"),
1856 1856 pycompat.sysexecutable or _(b"unknown"),
1857 1857 )
1858 1858 fm.write(
1859 1859 b'pythonimplementation',
1860 1860 _(b"checking Python implementation (%s)\n"),
1861 1861 pycompat.sysbytes(platform.python_implementation()),
1862 1862 )
1863 1863 fm.write(
1864 1864 b'pythonver',
1865 1865 _(b"checking Python version (%s)\n"),
1866 1866 (b"%d.%d.%d" % sys.version_info[:3]),
1867 1867 )
1868 1868 fm.write(
1869 1869 b'pythonlib',
1870 1870 _(b"checking Python lib (%s)...\n"),
1871 1871 pythonlib or _(b"unknown"),
1872 1872 )
1873 1873
1874 1874 try:
1875 1875 from . import rustext # pytype: disable=import-error
1876 1876
1877 1877 rustext.__doc__ # trigger lazy import
1878 1878 except ImportError:
1879 1879 rustext = None
1880 1880
1881 1881 security = set(sslutil.supportedprotocols)
1882 1882 if sslutil.hassni:
1883 1883 security.add(b'sni')
1884 1884
1885 1885 fm.write(
1886 1886 b'pythonsecurity',
1887 1887 _(b"checking Python security support (%s)\n"),
1888 1888 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1889 1889 )
1890 1890
1891 1891 # These are warnings, not errors. So don't increment problem count. This
1892 1892 # may change in the future.
1893 1893 if b'tls1.2' not in security:
1894 1894 fm.plain(
1895 1895 _(
1896 1896 b' TLS 1.2 not supported by Python install; '
1897 1897 b'network connections lack modern security\n'
1898 1898 )
1899 1899 )
1900 1900 if b'sni' not in security:
1901 1901 fm.plain(
1902 1902 _(
1903 1903 b' SNI not supported by Python install; may have '
1904 1904 b'connectivity issues with some servers\n'
1905 1905 )
1906 1906 )
1907 1907
1908 1908 fm.plain(
1909 1909 _(
1910 1910 b"checking Rust extensions (%s)\n"
1911 1911 % (b'missing' if rustext is None else b'installed')
1912 1912 ),
1913 1913 )
1914 1914
1915 1915 # TODO print CA cert info
1916 1916
1917 1917 # hg version
1918 1918 hgver = util.version()
1919 1919 fm.write(
1920 1920 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1921 1921 )
1922 1922 fm.write(
1923 1923 b'hgverextra',
1924 1924 _(b"checking Mercurial custom build (%s)\n"),
1925 1925 b'+'.join(hgver.split(b'+')[1:]),
1926 1926 )
1927 1927
1928 1928 # compiled modules
1929 1929 hgmodules = None
1930 1930 if util.safehasattr(sys.modules[__name__], '__file__'):
1931 1931 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1932 1932 elif getattr(sys, 'oxidized', False):
1933 1933 hgmodules = pycompat.sysexecutable
1934 1934
1935 1935 fm.write(
1936 1936 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1937 1937 )
1938 1938 fm.write(
1939 1939 b'hgmodules',
1940 1940 _(b"checking installed modules (%s)...\n"),
1941 1941 hgmodules or _(b"unknown"),
1942 1942 )
1943 1943
1944 1944 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1945 1945 rustext = rustandc # for now, that's the only case
1946 1946 cext = policy.policy in (b'c', b'allow') or rustandc
1947 1947 nopure = cext or rustext
1948 1948 if nopure:
1949 1949 err = None
1950 1950 try:
1951 1951 if cext:
1952 1952 from .cext import ( # pytype: disable=import-error
1953 1953 base85,
1954 1954 bdiff,
1955 1955 mpatch,
1956 1956 osutil,
1957 1957 )
1958 1958
1959 1959 # quiet pyflakes
1960 1960 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1961 1961 if rustext:
1962 1962 from .rustext import ( # pytype: disable=import-error
1963 1963 ancestor,
1964 1964 dirstate,
1965 1965 )
1966 1966
1967 1967 dir(ancestor), dir(dirstate) # quiet pyflakes
1968 1968 except Exception as inst:
1969 1969 err = stringutil.forcebytestr(inst)
1970 1970 problems += 1
1971 1971 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1972 1972
1973 1973 compengines = util.compengines._engines.values()
1974 1974 fm.write(
1975 1975 b'compengines',
1976 1976 _(b'checking registered compression engines (%s)\n'),
1977 1977 fm.formatlist(
1978 1978 sorted(e.name() for e in compengines),
1979 1979 name=b'compengine',
1980 1980 fmt=b'%s',
1981 1981 sep=b', ',
1982 1982 ),
1983 1983 )
1984 1984 fm.write(
1985 1985 b'compenginesavail',
1986 1986 _(b'checking available compression engines (%s)\n'),
1987 1987 fm.formatlist(
1988 1988 sorted(e.name() for e in compengines if e.available()),
1989 1989 name=b'compengine',
1990 1990 fmt=b'%s',
1991 1991 sep=b', ',
1992 1992 ),
1993 1993 )
1994 1994 wirecompengines = compression.compengines.supportedwireengines(
1995 1995 compression.SERVERROLE
1996 1996 )
1997 1997 fm.write(
1998 1998 b'compenginesserver',
1999 1999 _(
2000 2000 b'checking available compression engines '
2001 2001 b'for wire protocol (%s)\n'
2002 2002 ),
2003 2003 fm.formatlist(
2004 2004 [e.name() for e in wirecompengines if e.wireprotosupport()],
2005 2005 name=b'compengine',
2006 2006 fmt=b'%s',
2007 2007 sep=b', ',
2008 2008 ),
2009 2009 )
2010 2010 re2 = b'missing'
2011 2011 if util._re2:
2012 2012 re2 = b'available'
2013 2013 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2014 2014 fm.data(re2=bool(util._re2))
2015 2015
2016 2016 # templates
2017 2017 p = templater.templatedir()
2018 2018 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2019 2019 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2020 2020 if p:
2021 2021 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2022 2022 if m:
2023 2023 # template found, check if it is working
2024 2024 err = None
2025 2025 try:
2026 2026 templater.templater.frommapfile(m)
2027 2027 except Exception as inst:
2028 2028 err = stringutil.forcebytestr(inst)
2029 2029 p = None
2030 2030 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2031 2031 else:
2032 2032 p = None
2033 2033 fm.condwrite(
2034 2034 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2035 2035 )
2036 2036 fm.condwrite(
2037 2037 not m,
2038 2038 b'defaulttemplatenotfound',
2039 2039 _(b" template '%s' not found\n"),
2040 2040 b"default",
2041 2041 )
2042 2042 if not p:
2043 2043 problems += 1
2044 2044 fm.condwrite(
2045 2045 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2046 2046 )
2047 2047
2048 2048 # editor
2049 2049 editor = ui.geteditor()
2050 2050 editor = util.expandpath(editor)
2051 2051 editorbin = procutil.shellsplit(editor)[0]
2052 2052 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2053 2053 cmdpath = procutil.findexe(editorbin)
2054 2054 fm.condwrite(
2055 2055 not cmdpath and editor == b'vi',
2056 2056 b'vinotfound',
2057 2057 _(
2058 2058 b" No commit editor set and can't find %s in PATH\n"
2059 2059 b" (specify a commit editor in your configuration"
2060 2060 b" file)\n"
2061 2061 ),
2062 2062 not cmdpath and editor == b'vi' and editorbin,
2063 2063 )
2064 2064 fm.condwrite(
2065 2065 not cmdpath and editor != b'vi',
2066 2066 b'editornotfound',
2067 2067 _(
2068 2068 b" Can't find editor '%s' in PATH\n"
2069 2069 b" (specify a commit editor in your configuration"
2070 2070 b" file)\n"
2071 2071 ),
2072 2072 not cmdpath and editorbin,
2073 2073 )
2074 2074 if not cmdpath and editor != b'vi':
2075 2075 problems += 1
2076 2076
2077 2077 # check username
2078 2078 username = None
2079 2079 err = None
2080 2080 try:
2081 2081 username = ui.username()
2082 2082 except error.Abort as e:
2083 2083 err = e.message
2084 2084 problems += 1
2085 2085
2086 2086 fm.condwrite(
2087 2087 username, b'username', _(b"checking username (%s)\n"), username
2088 2088 )
2089 2089 fm.condwrite(
2090 2090 err,
2091 2091 b'usernameerror',
2092 2092 _(
2093 2093 b"checking username...\n %s\n"
2094 2094 b" (specify a username in your configuration file)\n"
2095 2095 ),
2096 2096 err,
2097 2097 )
2098 2098
2099 2099 for name, mod in extensions.extensions():
2100 2100 handler = getattr(mod, 'debuginstall', None)
2101 2101 if handler is not None:
2102 2102 problems += handler(ui, fm)
2103 2103
2104 2104 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2105 2105 if not problems:
2106 2106 fm.data(problems=problems)
2107 2107 fm.condwrite(
2108 2108 problems,
2109 2109 b'problems',
2110 2110 _(b"%d problems detected, please check your install!\n"),
2111 2111 problems,
2112 2112 )
2113 2113 fm.end()
2114 2114
2115 2115 return problems
2116 2116
2117 2117
2118 2118 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2119 2119 def debugknown(ui, repopath, *ids, **opts):
2120 2120 """test whether node ids are known to a repo
2121 2121
2122 2122 Every ID must be a full-length hex node id string. Returns a list of 0s
2123 2123 and 1s indicating unknown/known.
2124 2124 """
2125 2125 opts = pycompat.byteskwargs(opts)
2126 2126 repo = hg.peer(ui, opts, repopath)
2127 2127 if not repo.capable(b'known'):
2128 2128 raise error.Abort(b"known() not supported by target repository")
2129 2129 flags = repo.known([bin(s) for s in ids])
2130 2130 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2131 2131
2132 2132
2133 2133 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2134 2134 def debuglabelcomplete(ui, repo, *args):
2135 2135 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2136 2136 debugnamecomplete(ui, repo, *args)
2137 2137
2138 2138
2139 2139 @command(
2140 2140 b'debuglocks',
2141 2141 [
2142 2142 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2143 2143 (
2144 2144 b'W',
2145 2145 b'force-free-wlock',
2146 2146 None,
2147 2147 _(b'free the working state lock (DANGEROUS)'),
2148 2148 ),
2149 2149 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2150 2150 (
2151 2151 b'S',
2152 2152 b'set-wlock',
2153 2153 None,
2154 2154 _(b'set the working state lock until stopped'),
2155 2155 ),
2156 2156 ],
2157 2157 _(b'[OPTION]...'),
2158 2158 )
2159 2159 def debuglocks(ui, repo, **opts):
2160 2160 """show or modify state of locks
2161 2161
2162 2162 By default, this command will show which locks are held. This
2163 2163 includes the user and process holding the lock, the amount of time
2164 2164 the lock has been held, and the machine name where the process is
2165 2165 running if it's not local.
2166 2166
2167 2167 Locks protect the integrity of Mercurial's data, so should be
2168 2168 treated with care. System crashes or other interruptions may cause
2169 2169 locks to not be properly released, though Mercurial will usually
2170 2170 detect and remove such stale locks automatically.
2171 2171
2172 2172 However, detecting stale locks may not always be possible (for
2173 2173 instance, on a shared filesystem). Removing locks may also be
2174 2174 blocked by filesystem permissions.
2175 2175
2176 2176 Setting a lock will prevent other commands from changing the data.
2177 2177 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2178 2178 The set locks are removed when the command exits.
2179 2179
2180 2180 Returns 0 if no locks are held.
2181 2181
2182 2182 """
2183 2183
2184 2184 if opts.get('force_free_lock'):
2185 2185 repo.svfs.tryunlink(b'lock')
2186 2186 if opts.get('force_free_wlock'):
2187 2187 repo.vfs.tryunlink(b'wlock')
2188 2188 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2189 2189 return 0
2190 2190
2191 2191 locks = []
2192 2192 try:
2193 2193 if opts.get('set_wlock'):
2194 2194 try:
2195 2195 locks.append(repo.wlock(False))
2196 2196 except error.LockHeld:
2197 2197 raise error.Abort(_(b'wlock is already held'))
2198 2198 if opts.get('set_lock'):
2199 2199 try:
2200 2200 locks.append(repo.lock(False))
2201 2201 except error.LockHeld:
2202 2202 raise error.Abort(_(b'lock is already held'))
2203 2203 if len(locks):
2204 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2204 try:
2205 if ui.interactive():
2206 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2207 ui.promptchoice(prompt)
2208 else:
2209 msg = b"%d locks held, waiting for signal\n"
2210 msg %= len(locks)
2211 ui.status(msg)
2212 while True: # XXX wait for a signal
2213 time.sleep(0.1)
2214 except KeyboardInterrupt:
2215 msg = b"signal-received releasing locks\n"
2216 ui.status(msg)
2205 2217 return 0
2206 2218 finally:
2207 2219 release(*locks)
2208 2220
2209 2221 now = time.time()
2210 2222 held = 0
2211 2223
2212 2224 def report(vfs, name, method):
2213 2225 # this causes stale locks to get reaped for more accurate reporting
2214 2226 try:
2215 2227 l = method(False)
2216 2228 except error.LockHeld:
2217 2229 l = None
2218 2230
2219 2231 if l:
2220 2232 l.release()
2221 2233 else:
2222 2234 try:
2223 2235 st = vfs.lstat(name)
2224 2236 age = now - st[stat.ST_MTIME]
2225 2237 user = util.username(st.st_uid)
2226 2238 locker = vfs.readlock(name)
2227 2239 if b":" in locker:
2228 2240 host, pid = locker.split(b':')
2229 2241 if host == socket.gethostname():
2230 2242 locker = b'user %s, process %s' % (user or b'None', pid)
2231 2243 else:
2232 2244 locker = b'user %s, process %s, host %s' % (
2233 2245 user or b'None',
2234 2246 pid,
2235 2247 host,
2236 2248 )
2237 2249 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2238 2250 return 1
2239 2251 except OSError as e:
2240 2252 if e.errno != errno.ENOENT:
2241 2253 raise
2242 2254
2243 2255 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2244 2256 return 0
2245 2257
2246 2258 held += report(repo.svfs, b"lock", repo.lock)
2247 2259 held += report(repo.vfs, b"wlock", repo.wlock)
2248 2260
2249 2261 return held
2250 2262
2251 2263
2252 2264 @command(
2253 2265 b'debugmanifestfulltextcache',
2254 2266 [
2255 2267 (b'', b'clear', False, _(b'clear the cache')),
2256 2268 (
2257 2269 b'a',
2258 2270 b'add',
2259 2271 [],
2260 2272 _(b'add the given manifest nodes to the cache'),
2261 2273 _(b'NODE'),
2262 2274 ),
2263 2275 ],
2264 2276 b'',
2265 2277 )
2266 2278 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2267 2279 """show, clear or amend the contents of the manifest fulltext cache"""
2268 2280
2269 2281 def getcache():
2270 2282 r = repo.manifestlog.getstorage(b'')
2271 2283 try:
2272 2284 return r._fulltextcache
2273 2285 except AttributeError:
2274 2286 msg = _(
2275 2287 b"Current revlog implementation doesn't appear to have a "
2276 2288 b"manifest fulltext cache\n"
2277 2289 )
2278 2290 raise error.Abort(msg)
2279 2291
2280 2292 if opts.get('clear'):
2281 2293 with repo.wlock():
2282 2294 cache = getcache()
2283 2295 cache.clear(clear_persisted_data=True)
2284 2296 return
2285 2297
2286 2298 if add:
2287 2299 with repo.wlock():
2288 2300 m = repo.manifestlog
2289 2301 store = m.getstorage(b'')
2290 2302 for n in add:
2291 2303 try:
2292 2304 manifest = m[store.lookup(n)]
2293 2305 except error.LookupError as e:
2294 2306 raise error.Abort(
2295 2307 bytes(e), hint=b"Check your manifest node id"
2296 2308 )
2297 2309 manifest.read() # stores revisision in cache too
2298 2310 return
2299 2311
2300 2312 cache = getcache()
2301 2313 if not len(cache):
2302 2314 ui.write(_(b'cache empty\n'))
2303 2315 else:
2304 2316 ui.write(
2305 2317 _(
2306 2318 b'cache contains %d manifest entries, in order of most to '
2307 2319 b'least recent:\n'
2308 2320 )
2309 2321 % (len(cache),)
2310 2322 )
2311 2323 totalsize = 0
2312 2324 for nodeid in cache:
2313 2325 # Use cache.get to not update the LRU order
2314 2326 data = cache.peek(nodeid)
2315 2327 size = len(data)
2316 2328 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2317 2329 ui.write(
2318 2330 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2319 2331 )
2320 2332 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2321 2333 ui.write(
2322 2334 _(b'total cache data size %s, on-disk %s\n')
2323 2335 % (util.bytecount(totalsize), util.bytecount(ondisk))
2324 2336 )
2325 2337
2326 2338
2327 2339 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2328 2340 def debugmergestate(ui, repo, *args, **opts):
2329 2341 """print merge state
2330 2342
2331 2343 Use --verbose to print out information about whether v1 or v2 merge state
2332 2344 was chosen."""
2333 2345
2334 2346 if ui.verbose:
2335 2347 ms = mergestatemod.mergestate(repo)
2336 2348
2337 2349 # sort so that reasonable information is on top
2338 2350 v1records = ms._readrecordsv1()
2339 2351 v2records = ms._readrecordsv2()
2340 2352
2341 2353 if not v1records and not v2records:
2342 2354 pass
2343 2355 elif not v2records:
2344 2356 ui.writenoi18n(b'no version 2 merge state\n')
2345 2357 elif ms._v1v2match(v1records, v2records):
2346 2358 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2347 2359 else:
2348 2360 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2349 2361
2350 2362 opts = pycompat.byteskwargs(opts)
2351 2363 if not opts[b'template']:
2352 2364 opts[b'template'] = (
2353 2365 b'{if(commits, "", "no merge state found\n")}'
2354 2366 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2355 2367 b'{files % "file: {path} (state \\"{state}\\")\n'
2356 2368 b'{if(local_path, "'
2357 2369 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2358 2370 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2359 2371 b' other path: {other_path} (node {other_node})\n'
2360 2372 b'")}'
2361 2373 b'{if(rename_side, "'
2362 2374 b' rename side: {rename_side}\n'
2363 2375 b' renamed path: {renamed_path}\n'
2364 2376 b'")}'
2365 2377 b'{extras % " extra: {key} = {value}\n"}'
2366 2378 b'"}'
2367 2379 b'{extras % "extra: {file} ({key} = {value})\n"}'
2368 2380 )
2369 2381
2370 2382 ms = mergestatemod.mergestate.read(repo)
2371 2383
2372 2384 fm = ui.formatter(b'debugmergestate', opts)
2373 2385 fm.startitem()
2374 2386
2375 2387 fm_commits = fm.nested(b'commits')
2376 2388 if ms.active():
2377 2389 for name, node, label_index in (
2378 2390 (b'local', ms.local, 0),
2379 2391 (b'other', ms.other, 1),
2380 2392 ):
2381 2393 fm_commits.startitem()
2382 2394 fm_commits.data(name=name)
2383 2395 fm_commits.data(node=hex(node))
2384 2396 if ms._labels and len(ms._labels) > label_index:
2385 2397 fm_commits.data(label=ms._labels[label_index])
2386 2398 fm_commits.end()
2387 2399
2388 2400 fm_files = fm.nested(b'files')
2389 2401 if ms.active():
2390 2402 for f in ms:
2391 2403 fm_files.startitem()
2392 2404 fm_files.data(path=f)
2393 2405 state = ms._state[f]
2394 2406 fm_files.data(state=state[0])
2395 2407 if state[0] in (
2396 2408 mergestatemod.MERGE_RECORD_UNRESOLVED,
2397 2409 mergestatemod.MERGE_RECORD_RESOLVED,
2398 2410 ):
2399 2411 fm_files.data(local_key=state[1])
2400 2412 fm_files.data(local_path=state[2])
2401 2413 fm_files.data(ancestor_path=state[3])
2402 2414 fm_files.data(ancestor_node=state[4])
2403 2415 fm_files.data(other_path=state[5])
2404 2416 fm_files.data(other_node=state[6])
2405 2417 fm_files.data(local_flags=state[7])
2406 2418 elif state[0] in (
2407 2419 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2408 2420 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2409 2421 ):
2410 2422 fm_files.data(renamed_path=state[1])
2411 2423 fm_files.data(rename_side=state[2])
2412 2424 fm_extras = fm_files.nested(b'extras')
2413 2425 for k, v in sorted(ms.extras(f).items()):
2414 2426 fm_extras.startitem()
2415 2427 fm_extras.data(key=k)
2416 2428 fm_extras.data(value=v)
2417 2429 fm_extras.end()
2418 2430
2419 2431 fm_files.end()
2420 2432
2421 2433 fm_extras = fm.nested(b'extras')
2422 2434 for f, d in sorted(ms.allextras().items()):
2423 2435 if f in ms:
2424 2436 # If file is in mergestate, we have already processed it's extras
2425 2437 continue
2426 2438 for k, v in d.items():
2427 2439 fm_extras.startitem()
2428 2440 fm_extras.data(file=f)
2429 2441 fm_extras.data(key=k)
2430 2442 fm_extras.data(value=v)
2431 2443 fm_extras.end()
2432 2444
2433 2445 fm.end()
2434 2446
2435 2447
2436 2448 @command(b'debugnamecomplete', [], _(b'NAME...'))
2437 2449 def debugnamecomplete(ui, repo, *args):
2438 2450 '''complete "names" - tags, open branch names, bookmark names'''
2439 2451
2440 2452 names = set()
2441 2453 # since we previously only listed open branches, we will handle that
2442 2454 # specially (after this for loop)
2443 2455 for name, ns in repo.names.items():
2444 2456 if name != b'branches':
2445 2457 names.update(ns.listnames(repo))
2446 2458 names.update(
2447 2459 tag
2448 2460 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2449 2461 if not closed
2450 2462 )
2451 2463 completions = set()
2452 2464 if not args:
2453 2465 args = [b'']
2454 2466 for a in args:
2455 2467 completions.update(n for n in names if n.startswith(a))
2456 2468 ui.write(b'\n'.join(sorted(completions)))
2457 2469 ui.write(b'\n')
2458 2470
2459 2471
2460 2472 @command(
2461 2473 b'debugnodemap',
2462 2474 [
2463 2475 (
2464 2476 b'',
2465 2477 b'dump-new',
2466 2478 False,
2467 2479 _(b'write a (new) persistent binary nodemap on stdout'),
2468 2480 ),
2469 2481 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2470 2482 (
2471 2483 b'',
2472 2484 b'check',
2473 2485 False,
2474 2486 _(b'check that the data on disk data are correct.'),
2475 2487 ),
2476 2488 (
2477 2489 b'',
2478 2490 b'metadata',
2479 2491 False,
2480 2492 _(b'display the on disk meta data for the nodemap'),
2481 2493 ),
2482 2494 ],
2483 2495 )
2484 2496 def debugnodemap(ui, repo, **opts):
2485 2497 """write and inspect on disk nodemap"""
2486 2498 if opts['dump_new']:
2487 2499 unfi = repo.unfiltered()
2488 2500 cl = unfi.changelog
2489 2501 if util.safehasattr(cl.index, "nodemap_data_all"):
2490 2502 data = cl.index.nodemap_data_all()
2491 2503 else:
2492 2504 data = nodemap.persistent_data(cl.index)
2493 2505 ui.write(data)
2494 2506 elif opts['dump_disk']:
2495 2507 unfi = repo.unfiltered()
2496 2508 cl = unfi.changelog
2497 2509 nm_data = nodemap.persisted_data(cl)
2498 2510 if nm_data is not None:
2499 2511 docket, data = nm_data
2500 2512 ui.write(data[:])
2501 2513 elif opts['check']:
2502 2514 unfi = repo.unfiltered()
2503 2515 cl = unfi.changelog
2504 2516 nm_data = nodemap.persisted_data(cl)
2505 2517 if nm_data is not None:
2506 2518 docket, data = nm_data
2507 2519 return nodemap.check_data(ui, cl.index, data)
2508 2520 elif opts['metadata']:
2509 2521 unfi = repo.unfiltered()
2510 2522 cl = unfi.changelog
2511 2523 nm_data = nodemap.persisted_data(cl)
2512 2524 if nm_data is not None:
2513 2525 docket, data = nm_data
2514 2526 ui.write((b"uid: %s\n") % docket.uid)
2515 2527 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2516 2528 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2517 2529 ui.write((b"data-length: %d\n") % docket.data_length)
2518 2530 ui.write((b"data-unused: %d\n") % docket.data_unused)
2519 2531 unused_perc = docket.data_unused * 100.0 / docket.data_length
2520 2532 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2521 2533
2522 2534
2523 2535 @command(
2524 2536 b'debugobsolete',
2525 2537 [
2526 2538 (b'', b'flags', 0, _(b'markers flag')),
2527 2539 (
2528 2540 b'',
2529 2541 b'record-parents',
2530 2542 False,
2531 2543 _(b'record parent information for the precursor'),
2532 2544 ),
2533 2545 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2534 2546 (
2535 2547 b'',
2536 2548 b'exclusive',
2537 2549 False,
2538 2550 _(b'restrict display to markers only relevant to REV'),
2539 2551 ),
2540 2552 (b'', b'index', False, _(b'display index of the marker')),
2541 2553 (b'', b'delete', [], _(b'delete markers specified by indices')),
2542 2554 ]
2543 2555 + cmdutil.commitopts2
2544 2556 + cmdutil.formatteropts,
2545 2557 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2546 2558 )
2547 2559 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2548 2560 """create arbitrary obsolete marker
2549 2561
2550 2562 With no arguments, displays the list of obsolescence markers."""
2551 2563
2552 2564 opts = pycompat.byteskwargs(opts)
2553 2565
2554 2566 def parsenodeid(s):
2555 2567 try:
2556 2568 # We do not use revsingle/revrange functions here to accept
2557 2569 # arbitrary node identifiers, possibly not present in the
2558 2570 # local repository.
2559 2571 n = bin(s)
2560 2572 if len(n) != repo.nodeconstants.nodelen:
2561 2573 raise TypeError()
2562 2574 return n
2563 2575 except TypeError:
2564 2576 raise error.InputError(
2565 2577 b'changeset references must be full hexadecimal '
2566 2578 b'node identifiers'
2567 2579 )
2568 2580
2569 2581 if opts.get(b'delete'):
2570 2582 indices = []
2571 2583 for v in opts.get(b'delete'):
2572 2584 try:
2573 2585 indices.append(int(v))
2574 2586 except ValueError:
2575 2587 raise error.InputError(
2576 2588 _(b'invalid index value: %r') % v,
2577 2589 hint=_(b'use integers for indices'),
2578 2590 )
2579 2591
2580 2592 if repo.currenttransaction():
2581 2593 raise error.Abort(
2582 2594 _(b'cannot delete obsmarkers in the middle of transaction.')
2583 2595 )
2584 2596
2585 2597 with repo.lock():
2586 2598 n = repair.deleteobsmarkers(repo.obsstore, indices)
2587 2599 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2588 2600
2589 2601 return
2590 2602
2591 2603 if precursor is not None:
2592 2604 if opts[b'rev']:
2593 2605 raise error.InputError(
2594 2606 b'cannot select revision when creating marker'
2595 2607 )
2596 2608 metadata = {}
2597 2609 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2598 2610 succs = tuple(parsenodeid(succ) for succ in successors)
2599 2611 l = repo.lock()
2600 2612 try:
2601 2613 tr = repo.transaction(b'debugobsolete')
2602 2614 try:
2603 2615 date = opts.get(b'date')
2604 2616 if date:
2605 2617 date = dateutil.parsedate(date)
2606 2618 else:
2607 2619 date = None
2608 2620 prec = parsenodeid(precursor)
2609 2621 parents = None
2610 2622 if opts[b'record_parents']:
2611 2623 if prec not in repo.unfiltered():
2612 2624 raise error.Abort(
2613 2625 b'cannot used --record-parents on '
2614 2626 b'unknown changesets'
2615 2627 )
2616 2628 parents = repo.unfiltered()[prec].parents()
2617 2629 parents = tuple(p.node() for p in parents)
2618 2630 repo.obsstore.create(
2619 2631 tr,
2620 2632 prec,
2621 2633 succs,
2622 2634 opts[b'flags'],
2623 2635 parents=parents,
2624 2636 date=date,
2625 2637 metadata=metadata,
2626 2638 ui=ui,
2627 2639 )
2628 2640 tr.close()
2629 2641 except ValueError as exc:
2630 2642 raise error.Abort(
2631 2643 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2632 2644 )
2633 2645 finally:
2634 2646 tr.release()
2635 2647 finally:
2636 2648 l.release()
2637 2649 else:
2638 2650 if opts[b'rev']:
2639 2651 revs = logcmdutil.revrange(repo, opts[b'rev'])
2640 2652 nodes = [repo[r].node() for r in revs]
2641 2653 markers = list(
2642 2654 obsutil.getmarkers(
2643 2655 repo, nodes=nodes, exclusive=opts[b'exclusive']
2644 2656 )
2645 2657 )
2646 2658 markers.sort(key=lambda x: x._data)
2647 2659 else:
2648 2660 markers = obsutil.getmarkers(repo)
2649 2661
2650 2662 markerstoiter = markers
2651 2663 isrelevant = lambda m: True
2652 2664 if opts.get(b'rev') and opts.get(b'index'):
2653 2665 markerstoiter = obsutil.getmarkers(repo)
2654 2666 markerset = set(markers)
2655 2667 isrelevant = lambda m: m in markerset
2656 2668
2657 2669 fm = ui.formatter(b'debugobsolete', opts)
2658 2670 for i, m in enumerate(markerstoiter):
2659 2671 if not isrelevant(m):
2660 2672 # marker can be irrelevant when we're iterating over a set
2661 2673 # of markers (markerstoiter) which is bigger than the set
2662 2674 # of markers we want to display (markers)
2663 2675 # this can happen if both --index and --rev options are
2664 2676 # provided and thus we need to iterate over all of the markers
2665 2677 # to get the correct indices, but only display the ones that
2666 2678 # are relevant to --rev value
2667 2679 continue
2668 2680 fm.startitem()
2669 2681 ind = i if opts.get(b'index') else None
2670 2682 cmdutil.showmarker(fm, m, index=ind)
2671 2683 fm.end()
2672 2684
2673 2685
2674 2686 @command(
2675 2687 b'debugp1copies',
2676 2688 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2677 2689 _(b'[-r REV]'),
2678 2690 )
2679 2691 def debugp1copies(ui, repo, **opts):
2680 2692 """dump copy information compared to p1"""
2681 2693
2682 2694 opts = pycompat.byteskwargs(opts)
2683 2695 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2684 2696 for dst, src in ctx.p1copies().items():
2685 2697 ui.write(b'%s -> %s\n' % (src, dst))
2686 2698
2687 2699
2688 2700 @command(
2689 2701 b'debugp2copies',
2690 2702 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2691 2703 _(b'[-r REV]'),
2692 2704 )
2693 2705 def debugp1copies(ui, repo, **opts):
2694 2706 """dump copy information compared to p2"""
2695 2707
2696 2708 opts = pycompat.byteskwargs(opts)
2697 2709 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2698 2710 for dst, src in ctx.p2copies().items():
2699 2711 ui.write(b'%s -> %s\n' % (src, dst))
2700 2712
2701 2713
2702 2714 @command(
2703 2715 b'debugpathcomplete',
2704 2716 [
2705 2717 (b'f', b'full', None, _(b'complete an entire path')),
2706 2718 (b'n', b'normal', None, _(b'show only normal files')),
2707 2719 (b'a', b'added', None, _(b'show only added files')),
2708 2720 (b'r', b'removed', None, _(b'show only removed files')),
2709 2721 ],
2710 2722 _(b'FILESPEC...'),
2711 2723 )
2712 2724 def debugpathcomplete(ui, repo, *specs, **opts):
2713 2725 """complete part or all of a tracked path
2714 2726
2715 2727 This command supports shells that offer path name completion. It
2716 2728 currently completes only files already known to the dirstate.
2717 2729
2718 2730 Completion extends only to the next path segment unless
2719 2731 --full is specified, in which case entire paths are used."""
2720 2732
2721 2733 def complete(path, acceptable):
2722 2734 dirstate = repo.dirstate
2723 2735 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2724 2736 rootdir = repo.root + pycompat.ossep
2725 2737 if spec != repo.root and not spec.startswith(rootdir):
2726 2738 return [], []
2727 2739 if os.path.isdir(spec):
2728 2740 spec += b'/'
2729 2741 spec = spec[len(rootdir) :]
2730 2742 fixpaths = pycompat.ossep != b'/'
2731 2743 if fixpaths:
2732 2744 spec = spec.replace(pycompat.ossep, b'/')
2733 2745 speclen = len(spec)
2734 2746 fullpaths = opts['full']
2735 2747 files, dirs = set(), set()
2736 2748 adddir, addfile = dirs.add, files.add
2737 2749 for f, st in dirstate.items():
2738 2750 if f.startswith(spec) and st.state in acceptable:
2739 2751 if fixpaths:
2740 2752 f = f.replace(b'/', pycompat.ossep)
2741 2753 if fullpaths:
2742 2754 addfile(f)
2743 2755 continue
2744 2756 s = f.find(pycompat.ossep, speclen)
2745 2757 if s >= 0:
2746 2758 adddir(f[:s])
2747 2759 else:
2748 2760 addfile(f)
2749 2761 return files, dirs
2750 2762
2751 2763 acceptable = b''
2752 2764 if opts['normal']:
2753 2765 acceptable += b'nm'
2754 2766 if opts['added']:
2755 2767 acceptable += b'a'
2756 2768 if opts['removed']:
2757 2769 acceptable += b'r'
2758 2770 cwd = repo.getcwd()
2759 2771 if not specs:
2760 2772 specs = [b'.']
2761 2773
2762 2774 files, dirs = set(), set()
2763 2775 for spec in specs:
2764 2776 f, d = complete(spec, acceptable or b'nmar')
2765 2777 files.update(f)
2766 2778 dirs.update(d)
2767 2779 files.update(dirs)
2768 2780 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2769 2781 ui.write(b'\n')
2770 2782
2771 2783
2772 2784 @command(
2773 2785 b'debugpathcopies',
2774 2786 cmdutil.walkopts,
2775 2787 b'hg debugpathcopies REV1 REV2 [FILE]',
2776 2788 inferrepo=True,
2777 2789 )
2778 2790 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2779 2791 """show copies between two revisions"""
2780 2792 ctx1 = scmutil.revsingle(repo, rev1)
2781 2793 ctx2 = scmutil.revsingle(repo, rev2)
2782 2794 m = scmutil.match(ctx1, pats, opts)
2783 2795 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2784 2796 ui.write(b'%s -> %s\n' % (src, dst))
2785 2797
2786 2798
2787 2799 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2788 2800 def debugpeer(ui, path):
2789 2801 """establish a connection to a peer repository"""
2790 2802 # Always enable peer request logging. Requires --debug to display
2791 2803 # though.
2792 2804 overrides = {
2793 2805 (b'devel', b'debug.peer-request'): True,
2794 2806 }
2795 2807
2796 2808 with ui.configoverride(overrides):
2797 2809 peer = hg.peer(ui, {}, path)
2798 2810
2799 2811 try:
2800 2812 local = peer.local() is not None
2801 2813 canpush = peer.canpush()
2802 2814
2803 2815 ui.write(_(b'url: %s\n') % peer.url())
2804 2816 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2805 2817 ui.write(
2806 2818 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2807 2819 )
2808 2820 finally:
2809 2821 peer.close()
2810 2822
2811 2823
2812 2824 @command(
2813 2825 b'debugpickmergetool',
2814 2826 [
2815 2827 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2816 2828 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2817 2829 ]
2818 2830 + cmdutil.walkopts
2819 2831 + cmdutil.mergetoolopts,
2820 2832 _(b'[PATTERN]...'),
2821 2833 inferrepo=True,
2822 2834 )
2823 2835 def debugpickmergetool(ui, repo, *pats, **opts):
2824 2836 """examine which merge tool is chosen for specified file
2825 2837
2826 2838 As described in :hg:`help merge-tools`, Mercurial examines
2827 2839 configurations below in this order to decide which merge tool is
2828 2840 chosen for specified file.
2829 2841
2830 2842 1. ``--tool`` option
2831 2843 2. ``HGMERGE`` environment variable
2832 2844 3. configurations in ``merge-patterns`` section
2833 2845 4. configuration of ``ui.merge``
2834 2846 5. configurations in ``merge-tools`` section
2835 2847 6. ``hgmerge`` tool (for historical reason only)
2836 2848 7. default tool for fallback (``:merge`` or ``:prompt``)
2837 2849
2838 2850 This command writes out examination result in the style below::
2839 2851
2840 2852 FILE = MERGETOOL
2841 2853
2842 2854 By default, all files known in the first parent context of the
2843 2855 working directory are examined. Use file patterns and/or -I/-X
2844 2856 options to limit target files. -r/--rev is also useful to examine
2845 2857 files in another context without actual updating to it.
2846 2858
2847 2859 With --debug, this command shows warning messages while matching
2848 2860 against ``merge-patterns`` and so on, too. It is recommended to
2849 2861 use this option with explicit file patterns and/or -I/-X options,
2850 2862 because this option increases amount of output per file according
2851 2863 to configurations in hgrc.
2852 2864
2853 2865 With -v/--verbose, this command shows configurations below at
2854 2866 first (only if specified).
2855 2867
2856 2868 - ``--tool`` option
2857 2869 - ``HGMERGE`` environment variable
2858 2870 - configuration of ``ui.merge``
2859 2871
2860 2872 If merge tool is chosen before matching against
2861 2873 ``merge-patterns``, this command can't show any helpful
2862 2874 information, even with --debug. In such case, information above is
2863 2875 useful to know why a merge tool is chosen.
2864 2876 """
2865 2877 opts = pycompat.byteskwargs(opts)
2866 2878 overrides = {}
2867 2879 if opts[b'tool']:
2868 2880 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2869 2881 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2870 2882
2871 2883 with ui.configoverride(overrides, b'debugmergepatterns'):
2872 2884 hgmerge = encoding.environ.get(b"HGMERGE")
2873 2885 if hgmerge is not None:
2874 2886 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2875 2887 uimerge = ui.config(b"ui", b"merge")
2876 2888 if uimerge:
2877 2889 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2878 2890
2879 2891 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2880 2892 m = scmutil.match(ctx, pats, opts)
2881 2893 changedelete = opts[b'changedelete']
2882 2894 for path in ctx.walk(m):
2883 2895 fctx = ctx[path]
2884 2896 with ui.silent(
2885 2897 error=True
2886 2898 ) if not ui.debugflag else util.nullcontextmanager():
2887 2899 tool, toolpath = filemerge._picktool(
2888 2900 repo,
2889 2901 ui,
2890 2902 path,
2891 2903 fctx.isbinary(),
2892 2904 b'l' in fctx.flags(),
2893 2905 changedelete,
2894 2906 )
2895 2907 ui.write(b'%s = %s\n' % (path, tool))
2896 2908
2897 2909
2898 2910 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2899 2911 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2900 2912 """access the pushkey key/value protocol
2901 2913
2902 2914 With two args, list the keys in the given namespace.
2903 2915
2904 2916 With five args, set a key to new if it currently is set to old.
2905 2917 Reports success or failure.
2906 2918 """
2907 2919
2908 2920 target = hg.peer(ui, {}, repopath)
2909 2921 try:
2910 2922 if keyinfo:
2911 2923 key, old, new = keyinfo
2912 2924 with target.commandexecutor() as e:
2913 2925 r = e.callcommand(
2914 2926 b'pushkey',
2915 2927 {
2916 2928 b'namespace': namespace,
2917 2929 b'key': key,
2918 2930 b'old': old,
2919 2931 b'new': new,
2920 2932 },
2921 2933 ).result()
2922 2934
2923 2935 ui.status(pycompat.bytestr(r) + b'\n')
2924 2936 return not r
2925 2937 else:
2926 2938 for k, v in sorted(target.listkeys(namespace).items()):
2927 2939 ui.write(
2928 2940 b"%s\t%s\n"
2929 2941 % (stringutil.escapestr(k), stringutil.escapestr(v))
2930 2942 )
2931 2943 finally:
2932 2944 target.close()
2933 2945
2934 2946
2935 2947 @command(b'debugpvec', [], _(b'A B'))
2936 2948 def debugpvec(ui, repo, a, b=None):
2937 2949 ca = scmutil.revsingle(repo, a)
2938 2950 cb = scmutil.revsingle(repo, b)
2939 2951 pa = pvec.ctxpvec(ca)
2940 2952 pb = pvec.ctxpvec(cb)
2941 2953 if pa == pb:
2942 2954 rel = b"="
2943 2955 elif pa > pb:
2944 2956 rel = b">"
2945 2957 elif pa < pb:
2946 2958 rel = b"<"
2947 2959 elif pa | pb:
2948 2960 rel = b"|"
2949 2961 ui.write(_(b"a: %s\n") % pa)
2950 2962 ui.write(_(b"b: %s\n") % pb)
2951 2963 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2952 2964 ui.write(
2953 2965 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2954 2966 % (
2955 2967 abs(pa._depth - pb._depth),
2956 2968 pvec._hamming(pa._vec, pb._vec),
2957 2969 pa.distance(pb),
2958 2970 rel,
2959 2971 )
2960 2972 )
2961 2973
2962 2974
2963 2975 @command(
2964 2976 b'debugrebuilddirstate|debugrebuildstate',
2965 2977 [
2966 2978 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2967 2979 (
2968 2980 b'',
2969 2981 b'minimal',
2970 2982 None,
2971 2983 _(
2972 2984 b'only rebuild files that are inconsistent with '
2973 2985 b'the working copy parent'
2974 2986 ),
2975 2987 ),
2976 2988 ],
2977 2989 _(b'[-r REV]'),
2978 2990 )
2979 2991 def debugrebuilddirstate(ui, repo, rev, **opts):
2980 2992 """rebuild the dirstate as it would look like for the given revision
2981 2993
2982 2994 If no revision is specified the first current parent will be used.
2983 2995
2984 2996 The dirstate will be set to the files of the given revision.
2985 2997 The actual working directory content or existing dirstate
2986 2998 information such as adds or removes is not considered.
2987 2999
2988 3000 ``minimal`` will only rebuild the dirstate status for files that claim to be
2989 3001 tracked but are not in the parent manifest, or that exist in the parent
2990 3002 manifest but are not in the dirstate. It will not change adds, removes, or
2991 3003 modified files that are in the working copy parent.
2992 3004
2993 3005 One use of this command is to make the next :hg:`status` invocation
2994 3006 check the actual file content.
2995 3007 """
2996 3008 ctx = scmutil.revsingle(repo, rev)
2997 3009 with repo.wlock():
2998 3010 dirstate = repo.dirstate
2999 3011 changedfiles = None
3000 3012 # See command doc for what minimal does.
3001 3013 if opts.get('minimal'):
3002 3014 manifestfiles = set(ctx.manifest().keys())
3003 3015 dirstatefiles = set(dirstate)
3004 3016 manifestonly = manifestfiles - dirstatefiles
3005 3017 dsonly = dirstatefiles - manifestfiles
3006 3018 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3007 3019 changedfiles = manifestonly | dsnotadded
3008 3020
3009 3021 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3010 3022
3011 3023
3012 3024 @command(
3013 3025 b'debugrebuildfncache',
3014 3026 [
3015 3027 (
3016 3028 b'',
3017 3029 b'only-data',
3018 3030 False,
3019 3031 _(b'only look for wrong .d files (much faster)'),
3020 3032 )
3021 3033 ],
3022 3034 b'',
3023 3035 )
3024 3036 def debugrebuildfncache(ui, repo, **opts):
3025 3037 """rebuild the fncache file"""
3026 3038 opts = pycompat.byteskwargs(opts)
3027 3039 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3028 3040
3029 3041
3030 3042 @command(
3031 3043 b'debugrename',
3032 3044 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3033 3045 _(b'[-r REV] [FILE]...'),
3034 3046 )
3035 3047 def debugrename(ui, repo, *pats, **opts):
3036 3048 """dump rename information"""
3037 3049
3038 3050 opts = pycompat.byteskwargs(opts)
3039 3051 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3040 3052 m = scmutil.match(ctx, pats, opts)
3041 3053 for abs in ctx.walk(m):
3042 3054 fctx = ctx[abs]
3043 3055 o = fctx.filelog().renamed(fctx.filenode())
3044 3056 rel = repo.pathto(abs)
3045 3057 if o:
3046 3058 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3047 3059 else:
3048 3060 ui.write(_(b"%s not renamed\n") % rel)
3049 3061
3050 3062
3051 3063 @command(b'debugrequires|debugrequirements', [], b'')
3052 3064 def debugrequirements(ui, repo):
3053 3065 """print the current repo requirements"""
3054 3066 for r in sorted(repo.requirements):
3055 3067 ui.write(b"%s\n" % r)
3056 3068
3057 3069
3058 3070 @command(
3059 3071 b'debugrevlog',
3060 3072 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3061 3073 _(b'-c|-m|FILE'),
3062 3074 optionalrepo=True,
3063 3075 )
3064 3076 def debugrevlog(ui, repo, file_=None, **opts):
3065 3077 """show data and statistics about a revlog"""
3066 3078 opts = pycompat.byteskwargs(opts)
3067 3079 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3068 3080
3069 3081 if opts.get(b"dump"):
3070 3082 numrevs = len(r)
3071 3083 ui.write(
3072 3084 (
3073 3085 b"# rev p1rev p2rev start end deltastart base p1 p2"
3074 3086 b" rawsize totalsize compression heads chainlen\n"
3075 3087 )
3076 3088 )
3077 3089 ts = 0
3078 3090 heads = set()
3079 3091
3080 3092 for rev in pycompat.xrange(numrevs):
3081 3093 dbase = r.deltaparent(rev)
3082 3094 if dbase == -1:
3083 3095 dbase = rev
3084 3096 cbase = r.chainbase(rev)
3085 3097 clen = r.chainlen(rev)
3086 3098 p1, p2 = r.parentrevs(rev)
3087 3099 rs = r.rawsize(rev)
3088 3100 ts = ts + rs
3089 3101 heads -= set(r.parentrevs(rev))
3090 3102 heads.add(rev)
3091 3103 try:
3092 3104 compression = ts / r.end(rev)
3093 3105 except ZeroDivisionError:
3094 3106 compression = 0
3095 3107 ui.write(
3096 3108 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3097 3109 b"%11d %5d %8d\n"
3098 3110 % (
3099 3111 rev,
3100 3112 p1,
3101 3113 p2,
3102 3114 r.start(rev),
3103 3115 r.end(rev),
3104 3116 r.start(dbase),
3105 3117 r.start(cbase),
3106 3118 r.start(p1),
3107 3119 r.start(p2),
3108 3120 rs,
3109 3121 ts,
3110 3122 compression,
3111 3123 len(heads),
3112 3124 clen,
3113 3125 )
3114 3126 )
3115 3127 return 0
3116 3128
3117 3129 format = r._format_version
3118 3130 v = r._format_flags
3119 3131 flags = []
3120 3132 gdelta = False
3121 3133 if v & revlog.FLAG_INLINE_DATA:
3122 3134 flags.append(b'inline')
3123 3135 if v & revlog.FLAG_GENERALDELTA:
3124 3136 gdelta = True
3125 3137 flags.append(b'generaldelta')
3126 3138 if not flags:
3127 3139 flags = [b'(none)']
3128 3140
3129 3141 ### tracks merge vs single parent
3130 3142 nummerges = 0
3131 3143
3132 3144 ### tracks ways the "delta" are build
3133 3145 # nodelta
3134 3146 numempty = 0
3135 3147 numemptytext = 0
3136 3148 numemptydelta = 0
3137 3149 # full file content
3138 3150 numfull = 0
3139 3151 # intermediate snapshot against a prior snapshot
3140 3152 numsemi = 0
3141 3153 # snapshot count per depth
3142 3154 numsnapdepth = collections.defaultdict(lambda: 0)
3143 3155 # delta against previous revision
3144 3156 numprev = 0
3145 3157 # delta against first or second parent (not prev)
3146 3158 nump1 = 0
3147 3159 nump2 = 0
3148 3160 # delta against neither prev nor parents
3149 3161 numother = 0
3150 3162 # delta against prev that are also first or second parent
3151 3163 # (details of `numprev`)
3152 3164 nump1prev = 0
3153 3165 nump2prev = 0
3154 3166
3155 3167 # data about delta chain of each revs
3156 3168 chainlengths = []
3157 3169 chainbases = []
3158 3170 chainspans = []
3159 3171
3160 3172 # data about each revision
3161 3173 datasize = [None, 0, 0]
3162 3174 fullsize = [None, 0, 0]
3163 3175 semisize = [None, 0, 0]
3164 3176 # snapshot count per depth
3165 3177 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3166 3178 deltasize = [None, 0, 0]
3167 3179 chunktypecounts = {}
3168 3180 chunktypesizes = {}
3169 3181
3170 3182 def addsize(size, l):
3171 3183 if l[0] is None or size < l[0]:
3172 3184 l[0] = size
3173 3185 if size > l[1]:
3174 3186 l[1] = size
3175 3187 l[2] += size
3176 3188
3177 3189 numrevs = len(r)
3178 3190 for rev in pycompat.xrange(numrevs):
3179 3191 p1, p2 = r.parentrevs(rev)
3180 3192 delta = r.deltaparent(rev)
3181 3193 if format > 0:
3182 3194 addsize(r.rawsize(rev), datasize)
3183 3195 if p2 != nullrev:
3184 3196 nummerges += 1
3185 3197 size = r.length(rev)
3186 3198 if delta == nullrev:
3187 3199 chainlengths.append(0)
3188 3200 chainbases.append(r.start(rev))
3189 3201 chainspans.append(size)
3190 3202 if size == 0:
3191 3203 numempty += 1
3192 3204 numemptytext += 1
3193 3205 else:
3194 3206 numfull += 1
3195 3207 numsnapdepth[0] += 1
3196 3208 addsize(size, fullsize)
3197 3209 addsize(size, snapsizedepth[0])
3198 3210 else:
3199 3211 chainlengths.append(chainlengths[delta] + 1)
3200 3212 baseaddr = chainbases[delta]
3201 3213 revaddr = r.start(rev)
3202 3214 chainbases.append(baseaddr)
3203 3215 chainspans.append((revaddr - baseaddr) + size)
3204 3216 if size == 0:
3205 3217 numempty += 1
3206 3218 numemptydelta += 1
3207 3219 elif r.issnapshot(rev):
3208 3220 addsize(size, semisize)
3209 3221 numsemi += 1
3210 3222 depth = r.snapshotdepth(rev)
3211 3223 numsnapdepth[depth] += 1
3212 3224 addsize(size, snapsizedepth[depth])
3213 3225 else:
3214 3226 addsize(size, deltasize)
3215 3227 if delta == rev - 1:
3216 3228 numprev += 1
3217 3229 if delta == p1:
3218 3230 nump1prev += 1
3219 3231 elif delta == p2:
3220 3232 nump2prev += 1
3221 3233 elif delta == p1:
3222 3234 nump1 += 1
3223 3235 elif delta == p2:
3224 3236 nump2 += 1
3225 3237 elif delta != nullrev:
3226 3238 numother += 1
3227 3239
3228 3240 # Obtain data on the raw chunks in the revlog.
3229 3241 if util.safehasattr(r, b'_getsegmentforrevs'):
3230 3242 segment = r._getsegmentforrevs(rev, rev)[1]
3231 3243 else:
3232 3244 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3233 3245 if segment:
3234 3246 chunktype = bytes(segment[0:1])
3235 3247 else:
3236 3248 chunktype = b'empty'
3237 3249
3238 3250 if chunktype not in chunktypecounts:
3239 3251 chunktypecounts[chunktype] = 0
3240 3252 chunktypesizes[chunktype] = 0
3241 3253
3242 3254 chunktypecounts[chunktype] += 1
3243 3255 chunktypesizes[chunktype] += size
3244 3256
3245 3257 # Adjust size min value for empty cases
3246 3258 for size in (datasize, fullsize, semisize, deltasize):
3247 3259 if size[0] is None:
3248 3260 size[0] = 0
3249 3261
3250 3262 numdeltas = numrevs - numfull - numempty - numsemi
3251 3263 numoprev = numprev - nump1prev - nump2prev
3252 3264 totalrawsize = datasize[2]
3253 3265 datasize[2] /= numrevs
3254 3266 fulltotal = fullsize[2]
3255 3267 if numfull == 0:
3256 3268 fullsize[2] = 0
3257 3269 else:
3258 3270 fullsize[2] /= numfull
3259 3271 semitotal = semisize[2]
3260 3272 snaptotal = {}
3261 3273 if numsemi > 0:
3262 3274 semisize[2] /= numsemi
3263 3275 for depth in snapsizedepth:
3264 3276 snaptotal[depth] = snapsizedepth[depth][2]
3265 3277 snapsizedepth[depth][2] /= numsnapdepth[depth]
3266 3278
3267 3279 deltatotal = deltasize[2]
3268 3280 if numdeltas > 0:
3269 3281 deltasize[2] /= numdeltas
3270 3282 totalsize = fulltotal + semitotal + deltatotal
3271 3283 avgchainlen = sum(chainlengths) / numrevs
3272 3284 maxchainlen = max(chainlengths)
3273 3285 maxchainspan = max(chainspans)
3274 3286 compratio = 1
3275 3287 if totalsize:
3276 3288 compratio = totalrawsize / totalsize
3277 3289
3278 3290 basedfmtstr = b'%%%dd\n'
3279 3291 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3280 3292
3281 3293 def dfmtstr(max):
3282 3294 return basedfmtstr % len(str(max))
3283 3295
3284 3296 def pcfmtstr(max, padding=0):
3285 3297 return basepcfmtstr % (len(str(max)), b' ' * padding)
3286 3298
3287 3299 def pcfmt(value, total):
3288 3300 if total:
3289 3301 return (value, 100 * float(value) / total)
3290 3302 else:
3291 3303 return value, 100.0
3292 3304
3293 3305 ui.writenoi18n(b'format : %d\n' % format)
3294 3306 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3295 3307
3296 3308 ui.write(b'\n')
3297 3309 fmt = pcfmtstr(totalsize)
3298 3310 fmt2 = dfmtstr(totalsize)
3299 3311 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3300 3312 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3301 3313 ui.writenoi18n(
3302 3314 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3303 3315 )
3304 3316 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3305 3317 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3306 3318 ui.writenoi18n(
3307 3319 b' text : '
3308 3320 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3309 3321 )
3310 3322 ui.writenoi18n(
3311 3323 b' delta : '
3312 3324 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3313 3325 )
3314 3326 ui.writenoi18n(
3315 3327 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3316 3328 )
3317 3329 for depth in sorted(numsnapdepth):
3318 3330 ui.write(
3319 3331 (b' lvl-%-3d : ' % depth)
3320 3332 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3321 3333 )
3322 3334 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3323 3335 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3324 3336 ui.writenoi18n(
3325 3337 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3326 3338 )
3327 3339 for depth in sorted(numsnapdepth):
3328 3340 ui.write(
3329 3341 (b' lvl-%-3d : ' % depth)
3330 3342 + fmt % pcfmt(snaptotal[depth], totalsize)
3331 3343 )
3332 3344 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3333 3345
3334 3346 def fmtchunktype(chunktype):
3335 3347 if chunktype == b'empty':
3336 3348 return b' %s : ' % chunktype
3337 3349 elif chunktype in pycompat.bytestr(string.ascii_letters):
3338 3350 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3339 3351 else:
3340 3352 return b' 0x%s : ' % hex(chunktype)
3341 3353
3342 3354 ui.write(b'\n')
3343 3355 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3344 3356 for chunktype in sorted(chunktypecounts):
3345 3357 ui.write(fmtchunktype(chunktype))
3346 3358 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3347 3359 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3348 3360 for chunktype in sorted(chunktypecounts):
3349 3361 ui.write(fmtchunktype(chunktype))
3350 3362 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3351 3363
3352 3364 ui.write(b'\n')
3353 3365 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3354 3366 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3355 3367 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3356 3368 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3357 3369 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3358 3370
3359 3371 if format > 0:
3360 3372 ui.write(b'\n')
3361 3373 ui.writenoi18n(
3362 3374 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3363 3375 % tuple(datasize)
3364 3376 )
3365 3377 ui.writenoi18n(
3366 3378 b'full revision size (min/max/avg) : %d / %d / %d\n'
3367 3379 % tuple(fullsize)
3368 3380 )
3369 3381 ui.writenoi18n(
3370 3382 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3371 3383 % tuple(semisize)
3372 3384 )
3373 3385 for depth in sorted(snapsizedepth):
3374 3386 if depth == 0:
3375 3387 continue
3376 3388 ui.writenoi18n(
3377 3389 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3378 3390 % ((depth,) + tuple(snapsizedepth[depth]))
3379 3391 )
3380 3392 ui.writenoi18n(
3381 3393 b'delta size (min/max/avg) : %d / %d / %d\n'
3382 3394 % tuple(deltasize)
3383 3395 )
3384 3396
3385 3397 if numdeltas > 0:
3386 3398 ui.write(b'\n')
3387 3399 fmt = pcfmtstr(numdeltas)
3388 3400 fmt2 = pcfmtstr(numdeltas, 4)
3389 3401 ui.writenoi18n(
3390 3402 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3391 3403 )
3392 3404 if numprev > 0:
3393 3405 ui.writenoi18n(
3394 3406 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3395 3407 )
3396 3408 ui.writenoi18n(
3397 3409 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3398 3410 )
3399 3411 ui.writenoi18n(
3400 3412 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3401 3413 )
3402 3414 if gdelta:
3403 3415 ui.writenoi18n(
3404 3416 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3405 3417 )
3406 3418 ui.writenoi18n(
3407 3419 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3408 3420 )
3409 3421 ui.writenoi18n(
3410 3422 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3411 3423 )
3412 3424
3413 3425
3414 3426 @command(
3415 3427 b'debugrevlogindex',
3416 3428 cmdutil.debugrevlogopts
3417 3429 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3418 3430 _(b'[-f FORMAT] -c|-m|FILE'),
3419 3431 optionalrepo=True,
3420 3432 )
3421 3433 def debugrevlogindex(ui, repo, file_=None, **opts):
3422 3434 """dump the contents of a revlog index"""
3423 3435 opts = pycompat.byteskwargs(opts)
3424 3436 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3425 3437 format = opts.get(b'format', 0)
3426 3438 if format not in (0, 1):
3427 3439 raise error.Abort(_(b"unknown format %d") % format)
3428 3440
3429 3441 if ui.debugflag:
3430 3442 shortfn = hex
3431 3443 else:
3432 3444 shortfn = short
3433 3445
3434 3446 # There might not be anything in r, so have a sane default
3435 3447 idlen = 12
3436 3448 for i in r:
3437 3449 idlen = len(shortfn(r.node(i)))
3438 3450 break
3439 3451
3440 3452 if format == 0:
3441 3453 if ui.verbose:
3442 3454 ui.writenoi18n(
3443 3455 b" rev offset length linkrev %s %s p2\n"
3444 3456 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3445 3457 )
3446 3458 else:
3447 3459 ui.writenoi18n(
3448 3460 b" rev linkrev %s %s p2\n"
3449 3461 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3450 3462 )
3451 3463 elif format == 1:
3452 3464 if ui.verbose:
3453 3465 ui.writenoi18n(
3454 3466 (
3455 3467 b" rev flag offset length size link p1"
3456 3468 b" p2 %s\n"
3457 3469 )
3458 3470 % b"nodeid".rjust(idlen)
3459 3471 )
3460 3472 else:
3461 3473 ui.writenoi18n(
3462 3474 b" rev flag size link p1 p2 %s\n"
3463 3475 % b"nodeid".rjust(idlen)
3464 3476 )
3465 3477
3466 3478 for i in r:
3467 3479 node = r.node(i)
3468 3480 if format == 0:
3469 3481 try:
3470 3482 pp = r.parents(node)
3471 3483 except Exception:
3472 3484 pp = [repo.nullid, repo.nullid]
3473 3485 if ui.verbose:
3474 3486 ui.write(
3475 3487 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3476 3488 % (
3477 3489 i,
3478 3490 r.start(i),
3479 3491 r.length(i),
3480 3492 r.linkrev(i),
3481 3493 shortfn(node),
3482 3494 shortfn(pp[0]),
3483 3495 shortfn(pp[1]),
3484 3496 )
3485 3497 )
3486 3498 else:
3487 3499 ui.write(
3488 3500 b"% 6d % 7d %s %s %s\n"
3489 3501 % (
3490 3502 i,
3491 3503 r.linkrev(i),
3492 3504 shortfn(node),
3493 3505 shortfn(pp[0]),
3494 3506 shortfn(pp[1]),
3495 3507 )
3496 3508 )
3497 3509 elif format == 1:
3498 3510 pr = r.parentrevs(i)
3499 3511 if ui.verbose:
3500 3512 ui.write(
3501 3513 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3502 3514 % (
3503 3515 i,
3504 3516 r.flags(i),
3505 3517 r.start(i),
3506 3518 r.length(i),
3507 3519 r.rawsize(i),
3508 3520 r.linkrev(i),
3509 3521 pr[0],
3510 3522 pr[1],
3511 3523 shortfn(node),
3512 3524 )
3513 3525 )
3514 3526 else:
3515 3527 ui.write(
3516 3528 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3517 3529 % (
3518 3530 i,
3519 3531 r.flags(i),
3520 3532 r.rawsize(i),
3521 3533 r.linkrev(i),
3522 3534 pr[0],
3523 3535 pr[1],
3524 3536 shortfn(node),
3525 3537 )
3526 3538 )
3527 3539
3528 3540
3529 3541 @command(
3530 3542 b'debugrevspec',
3531 3543 [
3532 3544 (
3533 3545 b'',
3534 3546 b'optimize',
3535 3547 None,
3536 3548 _(b'print parsed tree after optimizing (DEPRECATED)'),
3537 3549 ),
3538 3550 (
3539 3551 b'',
3540 3552 b'show-revs',
3541 3553 True,
3542 3554 _(b'print list of result revisions (default)'),
3543 3555 ),
3544 3556 (
3545 3557 b's',
3546 3558 b'show-set',
3547 3559 None,
3548 3560 _(b'print internal representation of result set'),
3549 3561 ),
3550 3562 (
3551 3563 b'p',
3552 3564 b'show-stage',
3553 3565 [],
3554 3566 _(b'print parsed tree at the given stage'),
3555 3567 _(b'NAME'),
3556 3568 ),
3557 3569 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3558 3570 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3559 3571 ],
3560 3572 b'REVSPEC',
3561 3573 )
3562 3574 def debugrevspec(ui, repo, expr, **opts):
3563 3575 """parse and apply a revision specification
3564 3576
3565 3577 Use -p/--show-stage option to print the parsed tree at the given stages.
3566 3578 Use -p all to print tree at every stage.
3567 3579
3568 3580 Use --no-show-revs option with -s or -p to print only the set
3569 3581 representation or the parsed tree respectively.
3570 3582
3571 3583 Use --verify-optimized to compare the optimized result with the unoptimized
3572 3584 one. Returns 1 if the optimized result differs.
3573 3585 """
3574 3586 opts = pycompat.byteskwargs(opts)
3575 3587 aliases = ui.configitems(b'revsetalias')
3576 3588 stages = [
3577 3589 (b'parsed', lambda tree: tree),
3578 3590 (
3579 3591 b'expanded',
3580 3592 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3581 3593 ),
3582 3594 (b'concatenated', revsetlang.foldconcat),
3583 3595 (b'analyzed', revsetlang.analyze),
3584 3596 (b'optimized', revsetlang.optimize),
3585 3597 ]
3586 3598 if opts[b'no_optimized']:
3587 3599 stages = stages[:-1]
3588 3600 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3589 3601 raise error.Abort(
3590 3602 _(b'cannot use --verify-optimized with --no-optimized')
3591 3603 )
3592 3604 stagenames = {n for n, f in stages}
3593 3605
3594 3606 showalways = set()
3595 3607 showchanged = set()
3596 3608 if ui.verbose and not opts[b'show_stage']:
3597 3609 # show parsed tree by --verbose (deprecated)
3598 3610 showalways.add(b'parsed')
3599 3611 showchanged.update([b'expanded', b'concatenated'])
3600 3612 if opts[b'optimize']:
3601 3613 showalways.add(b'optimized')
3602 3614 if opts[b'show_stage'] and opts[b'optimize']:
3603 3615 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3604 3616 if opts[b'show_stage'] == [b'all']:
3605 3617 showalways.update(stagenames)
3606 3618 else:
3607 3619 for n in opts[b'show_stage']:
3608 3620 if n not in stagenames:
3609 3621 raise error.Abort(_(b'invalid stage name: %s') % n)
3610 3622 showalways.update(opts[b'show_stage'])
3611 3623
3612 3624 treebystage = {}
3613 3625 printedtree = None
3614 3626 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3615 3627 for n, f in stages:
3616 3628 treebystage[n] = tree = f(tree)
3617 3629 if n in showalways or (n in showchanged and tree != printedtree):
3618 3630 if opts[b'show_stage'] or n != b'parsed':
3619 3631 ui.write(b"* %s:\n" % n)
3620 3632 ui.write(revsetlang.prettyformat(tree), b"\n")
3621 3633 printedtree = tree
3622 3634
3623 3635 if opts[b'verify_optimized']:
3624 3636 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3625 3637 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3626 3638 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3627 3639 ui.writenoi18n(
3628 3640 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3629 3641 )
3630 3642 ui.writenoi18n(
3631 3643 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3632 3644 )
3633 3645 arevs = list(arevs)
3634 3646 brevs = list(brevs)
3635 3647 if arevs == brevs:
3636 3648 return 0
3637 3649 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3638 3650 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3639 3651 sm = difflib.SequenceMatcher(None, arevs, brevs)
3640 3652 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3641 3653 if tag in ('delete', 'replace'):
3642 3654 for c in arevs[alo:ahi]:
3643 3655 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3644 3656 if tag in ('insert', 'replace'):
3645 3657 for c in brevs[blo:bhi]:
3646 3658 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3647 3659 if tag == 'equal':
3648 3660 for c in arevs[alo:ahi]:
3649 3661 ui.write(b' %d\n' % c)
3650 3662 return 1
3651 3663
3652 3664 func = revset.makematcher(tree)
3653 3665 revs = func(repo)
3654 3666 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3655 3667 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3656 3668 if not opts[b'show_revs']:
3657 3669 return
3658 3670 for c in revs:
3659 3671 ui.write(b"%d\n" % c)
3660 3672
3661 3673
3662 3674 @command(
3663 3675 b'debugserve',
3664 3676 [
3665 3677 (
3666 3678 b'',
3667 3679 b'sshstdio',
3668 3680 False,
3669 3681 _(b'run an SSH server bound to process handles'),
3670 3682 ),
3671 3683 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3672 3684 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3673 3685 ],
3674 3686 b'',
3675 3687 )
3676 3688 def debugserve(ui, repo, **opts):
3677 3689 """run a server with advanced settings
3678 3690
3679 3691 This command is similar to :hg:`serve`. It exists partially as a
3680 3692 workaround to the fact that ``hg serve --stdio`` must have specific
3681 3693 arguments for security reasons.
3682 3694 """
3683 3695 opts = pycompat.byteskwargs(opts)
3684 3696
3685 3697 if not opts[b'sshstdio']:
3686 3698 raise error.Abort(_(b'only --sshstdio is currently supported'))
3687 3699
3688 3700 logfh = None
3689 3701
3690 3702 if opts[b'logiofd'] and opts[b'logiofile']:
3691 3703 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3692 3704
3693 3705 if opts[b'logiofd']:
3694 3706 # Ideally we would be line buffered. But line buffering in binary
3695 3707 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3696 3708 # buffering could have performance impacts. But since this isn't
3697 3709 # performance critical code, it should be fine.
3698 3710 try:
3699 3711 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3700 3712 except OSError as e:
3701 3713 if e.errno != errno.ESPIPE:
3702 3714 raise
3703 3715 # can't seek a pipe, so `ab` mode fails on py3
3704 3716 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3705 3717 elif opts[b'logiofile']:
3706 3718 logfh = open(opts[b'logiofile'], b'ab', 0)
3707 3719
3708 3720 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3709 3721 s.serve_forever()
3710 3722
3711 3723
3712 3724 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3713 3725 def debugsetparents(ui, repo, rev1, rev2=None):
3714 3726 """manually set the parents of the current working directory (DANGEROUS)
3715 3727
3716 3728 This command is not what you are looking for and should not be used. Using
3717 3729 this command will most certainly results in slight corruption of the file
3718 3730 level histories withing your repository. DO NOT USE THIS COMMAND.
3719 3731
3720 3732 The command update the p1 and p2 field in the dirstate, and not touching
3721 3733 anything else. This useful for writing repository conversion tools, but
3722 3734 should be used with extreme care. For example, neither the working
3723 3735 directory nor the dirstate is updated, so file status may be incorrect
3724 3736 after running this command. Only used if you are one of the few people that
3725 3737 deeply unstand both conversion tools and file level histories. If you are
3726 3738 reading this help, you are not one of this people (most of them sailed west
3727 3739 from Mithlond anyway.
3728 3740
3729 3741 So one last time DO NOT USE THIS COMMAND.
3730 3742
3731 3743 Returns 0 on success.
3732 3744 """
3733 3745
3734 3746 node1 = scmutil.revsingle(repo, rev1).node()
3735 3747 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3736 3748
3737 3749 with repo.wlock():
3738 3750 repo.setparents(node1, node2)
3739 3751
3740 3752
3741 3753 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3742 3754 def debugsidedata(ui, repo, file_, rev=None, **opts):
3743 3755 """dump the side data for a cl/manifest/file revision
3744 3756
3745 3757 Use --verbose to dump the sidedata content."""
3746 3758 opts = pycompat.byteskwargs(opts)
3747 3759 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3748 3760 if rev is not None:
3749 3761 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3750 3762 file_, rev = None, file_
3751 3763 elif rev is None:
3752 3764 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3753 3765 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3754 3766 r = getattr(r, '_revlog', r)
3755 3767 try:
3756 3768 sidedata = r.sidedata(r.lookup(rev))
3757 3769 except KeyError:
3758 3770 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3759 3771 if sidedata:
3760 3772 sidedata = list(sidedata.items())
3761 3773 sidedata.sort()
3762 3774 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3763 3775 for key, value in sidedata:
3764 3776 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3765 3777 if ui.verbose:
3766 3778 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3767 3779
3768 3780
3769 3781 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3770 3782 def debugssl(ui, repo, source=None, **opts):
3771 3783 """test a secure connection to a server
3772 3784
3773 3785 This builds the certificate chain for the server on Windows, installing the
3774 3786 missing intermediates and trusted root via Windows Update if necessary. It
3775 3787 does nothing on other platforms.
3776 3788
3777 3789 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3778 3790 that server is used. See :hg:`help urls` for more information.
3779 3791
3780 3792 If the update succeeds, retry the original operation. Otherwise, the cause
3781 3793 of the SSL error is likely another issue.
3782 3794 """
3783 3795 if not pycompat.iswindows:
3784 3796 raise error.Abort(
3785 3797 _(b'certificate chain building is only possible on Windows')
3786 3798 )
3787 3799
3788 3800 if not source:
3789 3801 if not repo:
3790 3802 raise error.Abort(
3791 3803 _(
3792 3804 b"there is no Mercurial repository here, and no "
3793 3805 b"server specified"
3794 3806 )
3795 3807 )
3796 3808 source = b"default"
3797 3809
3798 3810 source, branches = urlutil.get_unique_pull_path(
3799 3811 b'debugssl', repo, ui, source
3800 3812 )
3801 3813 url = urlutil.url(source)
3802 3814
3803 3815 defaultport = {b'https': 443, b'ssh': 22}
3804 3816 if url.scheme in defaultport:
3805 3817 try:
3806 3818 addr = (url.host, int(url.port or defaultport[url.scheme]))
3807 3819 except ValueError:
3808 3820 raise error.Abort(_(b"malformed port number in URL"))
3809 3821 else:
3810 3822 raise error.Abort(_(b"only https and ssh connections are supported"))
3811 3823
3812 3824 from . import win32
3813 3825
3814 3826 s = ssl.wrap_socket(
3815 3827 socket.socket(),
3816 3828 ssl_version=ssl.PROTOCOL_TLS,
3817 3829 cert_reqs=ssl.CERT_NONE,
3818 3830 ca_certs=None,
3819 3831 )
3820 3832
3821 3833 try:
3822 3834 s.connect(addr)
3823 3835 cert = s.getpeercert(True)
3824 3836
3825 3837 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3826 3838
3827 3839 complete = win32.checkcertificatechain(cert, build=False)
3828 3840
3829 3841 if not complete:
3830 3842 ui.status(_(b'certificate chain is incomplete, updating... '))
3831 3843
3832 3844 if not win32.checkcertificatechain(cert):
3833 3845 ui.status(_(b'failed.\n'))
3834 3846 else:
3835 3847 ui.status(_(b'done.\n'))
3836 3848 else:
3837 3849 ui.status(_(b'full certificate chain is available\n'))
3838 3850 finally:
3839 3851 s.close()
3840 3852
3841 3853
3842 3854 @command(
3843 3855 b"debugbackupbundle",
3844 3856 [
3845 3857 (
3846 3858 b"",
3847 3859 b"recover",
3848 3860 b"",
3849 3861 b"brings the specified changeset back into the repository",
3850 3862 )
3851 3863 ]
3852 3864 + cmdutil.logopts,
3853 3865 _(b"hg debugbackupbundle [--recover HASH]"),
3854 3866 )
3855 3867 def debugbackupbundle(ui, repo, *pats, **opts):
3856 3868 """lists the changesets available in backup bundles
3857 3869
3858 3870 Without any arguments, this command prints a list of the changesets in each
3859 3871 backup bundle.
3860 3872
3861 3873 --recover takes a changeset hash and unbundles the first bundle that
3862 3874 contains that hash, which puts that changeset back in your repository.
3863 3875
3864 3876 --verbose will print the entire commit message and the bundle path for that
3865 3877 backup.
3866 3878 """
3867 3879 backups = list(
3868 3880 filter(
3869 3881 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3870 3882 )
3871 3883 )
3872 3884 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3873 3885
3874 3886 opts = pycompat.byteskwargs(opts)
3875 3887 opts[b"bundle"] = b""
3876 3888 opts[b"force"] = None
3877 3889 limit = logcmdutil.getlimit(opts)
3878 3890
3879 3891 def display(other, chlist, displayer):
3880 3892 if opts.get(b"newest_first"):
3881 3893 chlist.reverse()
3882 3894 count = 0
3883 3895 for n in chlist:
3884 3896 if limit is not None and count >= limit:
3885 3897 break
3886 3898 parents = [
3887 3899 True for p in other.changelog.parents(n) if p != repo.nullid
3888 3900 ]
3889 3901 if opts.get(b"no_merges") and len(parents) == 2:
3890 3902 continue
3891 3903 count += 1
3892 3904 displayer.show(other[n])
3893 3905
3894 3906 recovernode = opts.get(b"recover")
3895 3907 if recovernode:
3896 3908 if scmutil.isrevsymbol(repo, recovernode):
3897 3909 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3898 3910 return
3899 3911 elif backups:
3900 3912 msg = _(
3901 3913 b"Recover changesets using: hg debugbackupbundle --recover "
3902 3914 b"<changeset hash>\n\nAvailable backup changesets:"
3903 3915 )
3904 3916 ui.status(msg, label=b"status.removed")
3905 3917 else:
3906 3918 ui.status(_(b"no backup changesets found\n"))
3907 3919 return
3908 3920
3909 3921 for backup in backups:
3910 3922 # Much of this is copied from the hg incoming logic
3911 3923 source = os.path.relpath(backup, encoding.getcwd())
3912 3924 source, branches = urlutil.get_unique_pull_path(
3913 3925 b'debugbackupbundle',
3914 3926 repo,
3915 3927 ui,
3916 3928 source,
3917 3929 default_branches=opts.get(b'branch'),
3918 3930 )
3919 3931 try:
3920 3932 other = hg.peer(repo, opts, source)
3921 3933 except error.LookupError as ex:
3922 3934 msg = _(b"\nwarning: unable to open bundle %s") % source
3923 3935 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3924 3936 ui.warn(msg, hint=hint)
3925 3937 continue
3926 3938 revs, checkout = hg.addbranchrevs(
3927 3939 repo, other, branches, opts.get(b"rev")
3928 3940 )
3929 3941
3930 3942 if revs:
3931 3943 revs = [other.lookup(rev) for rev in revs]
3932 3944
3933 3945 with ui.silent():
3934 3946 try:
3935 3947 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3936 3948 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3937 3949 )
3938 3950 except error.LookupError:
3939 3951 continue
3940 3952
3941 3953 try:
3942 3954 if not chlist:
3943 3955 continue
3944 3956 if recovernode:
3945 3957 with repo.lock(), repo.transaction(b"unbundle") as tr:
3946 3958 if scmutil.isrevsymbol(other, recovernode):
3947 3959 ui.status(_(b"Unbundling %s\n") % (recovernode))
3948 3960 f = hg.openpath(ui, source)
3949 3961 gen = exchange.readbundle(ui, f, source)
3950 3962 if isinstance(gen, bundle2.unbundle20):
3951 3963 bundle2.applybundle(
3952 3964 repo,
3953 3965 gen,
3954 3966 tr,
3955 3967 source=b"unbundle",
3956 3968 url=b"bundle:" + source,
3957 3969 )
3958 3970 else:
3959 3971 gen.apply(repo, b"unbundle", b"bundle:" + source)
3960 3972 break
3961 3973 else:
3962 3974 backupdate = encoding.strtolocal(
3963 3975 time.strftime(
3964 3976 "%a %H:%M, %Y-%m-%d",
3965 3977 time.localtime(os.path.getmtime(source)),
3966 3978 )
3967 3979 )
3968 3980 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3969 3981 if ui.verbose:
3970 3982 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3971 3983 else:
3972 3984 opts[
3973 3985 b"template"
3974 3986 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3975 3987 displayer = logcmdutil.changesetdisplayer(
3976 3988 ui, other, opts, False
3977 3989 )
3978 3990 display(other, chlist, displayer)
3979 3991 displayer.close()
3980 3992 finally:
3981 3993 cleanupfn()
3982 3994
3983 3995
3984 3996 @command(
3985 3997 b'debugsub',
3986 3998 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3987 3999 _(b'[-r REV] [REV]'),
3988 4000 )
3989 4001 def debugsub(ui, repo, rev=None):
3990 4002 ctx = scmutil.revsingle(repo, rev, None)
3991 4003 for k, v in sorted(ctx.substate.items()):
3992 4004 ui.writenoi18n(b'path %s\n' % k)
3993 4005 ui.writenoi18n(b' source %s\n' % v[0])
3994 4006 ui.writenoi18n(b' revision %s\n' % v[1])
3995 4007
3996 4008
3997 4009 @command(b'debugshell', optionalrepo=True)
3998 4010 def debugshell(ui, repo):
3999 4011 """run an interactive Python interpreter
4000 4012
4001 4013 The local namespace is provided with a reference to the ui and
4002 4014 the repo instance (if available).
4003 4015 """
4004 4016 import code
4005 4017
4006 4018 imported_objects = {
4007 4019 'ui': ui,
4008 4020 'repo': repo,
4009 4021 }
4010 4022
4011 4023 code.interact(local=imported_objects)
4012 4024
4013 4025
4014 4026 @command(
4015 4027 b'debugsuccessorssets',
4016 4028 [(b'', b'closest', False, _(b'return closest successors sets only'))],
4017 4029 _(b'[REV]'),
4018 4030 )
4019 4031 def debugsuccessorssets(ui, repo, *revs, **opts):
4020 4032 """show set of successors for revision
4021 4033
4022 4034 A successors set of changeset A is a consistent group of revisions that
4023 4035 succeed A. It contains non-obsolete changesets only unless closests
4024 4036 successors set is set.
4025 4037
4026 4038 In most cases a changeset A has a single successors set containing a single
4027 4039 successor (changeset A replaced by A').
4028 4040
4029 4041 A changeset that is made obsolete with no successors are called "pruned".
4030 4042 Such changesets have no successors sets at all.
4031 4043
4032 4044 A changeset that has been "split" will have a successors set containing
4033 4045 more than one successor.
4034 4046
4035 4047 A changeset that has been rewritten in multiple different ways is called
4036 4048 "divergent". Such changesets have multiple successor sets (each of which
4037 4049 may also be split, i.e. have multiple successors).
4038 4050
4039 4051 Results are displayed as follows::
4040 4052
4041 4053 <rev1>
4042 4054 <successors-1A>
4043 4055 <rev2>
4044 4056 <successors-2A>
4045 4057 <successors-2B1> <successors-2B2> <successors-2B3>
4046 4058
4047 4059 Here rev2 has two possible (i.e. divergent) successors sets. The first
4048 4060 holds one element, whereas the second holds three (i.e. the changeset has
4049 4061 been split).
4050 4062 """
4051 4063 # passed to successorssets caching computation from one call to another
4052 4064 cache = {}
4053 4065 ctx2str = bytes
4054 4066 node2str = short
4055 4067 for rev in logcmdutil.revrange(repo, revs):
4056 4068 ctx = repo[rev]
4057 4069 ui.write(b'%s\n' % ctx2str(ctx))
4058 4070 for succsset in obsutil.successorssets(
4059 4071 repo, ctx.node(), closest=opts['closest'], cache=cache
4060 4072 ):
4061 4073 if succsset:
4062 4074 ui.write(b' ')
4063 4075 ui.write(node2str(succsset[0]))
4064 4076 for node in succsset[1:]:
4065 4077 ui.write(b' ')
4066 4078 ui.write(node2str(node))
4067 4079 ui.write(b'\n')
4068 4080
4069 4081
4070 4082 @command(b'debugtagscache', [])
4071 4083 def debugtagscache(ui, repo):
4072 4084 """display the contents of .hg/cache/hgtagsfnodes1"""
4073 4085 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4074 4086 flog = repo.file(b'.hgtags')
4075 4087 for r in repo:
4076 4088 node = repo[r].node()
4077 4089 tagsnode = cache.getfnode(node, computemissing=False)
4078 4090 if tagsnode:
4079 4091 tagsnodedisplay = hex(tagsnode)
4080 4092 if not flog.hasnode(tagsnode):
4081 4093 tagsnodedisplay += b' (unknown node)'
4082 4094 elif tagsnode is None:
4083 4095 tagsnodedisplay = b'missing'
4084 4096 else:
4085 4097 tagsnodedisplay = b'invalid'
4086 4098
4087 4099 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4088 4100
4089 4101
4090 4102 @command(
4091 4103 b'debugtemplate',
4092 4104 [
4093 4105 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4094 4106 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4095 4107 ],
4096 4108 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4097 4109 optionalrepo=True,
4098 4110 )
4099 4111 def debugtemplate(ui, repo, tmpl, **opts):
4100 4112 """parse and apply a template
4101 4113
4102 4114 If -r/--rev is given, the template is processed as a log template and
4103 4115 applied to the given changesets. Otherwise, it is processed as a generic
4104 4116 template.
4105 4117
4106 4118 Use --verbose to print the parsed tree.
4107 4119 """
4108 4120 revs = None
4109 4121 if opts['rev']:
4110 4122 if repo is None:
4111 4123 raise error.RepoError(
4112 4124 _(b'there is no Mercurial repository here (.hg not found)')
4113 4125 )
4114 4126 revs = logcmdutil.revrange(repo, opts['rev'])
4115 4127
4116 4128 props = {}
4117 4129 for d in opts['define']:
4118 4130 try:
4119 4131 k, v = (e.strip() for e in d.split(b'=', 1))
4120 4132 if not k or k == b'ui':
4121 4133 raise ValueError
4122 4134 props[k] = v
4123 4135 except ValueError:
4124 4136 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4125 4137
4126 4138 if ui.verbose:
4127 4139 aliases = ui.configitems(b'templatealias')
4128 4140 tree = templater.parse(tmpl)
4129 4141 ui.note(templater.prettyformat(tree), b'\n')
4130 4142 newtree = templater.expandaliases(tree, aliases)
4131 4143 if newtree != tree:
4132 4144 ui.notenoi18n(
4133 4145 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4134 4146 )
4135 4147
4136 4148 if revs is None:
4137 4149 tres = formatter.templateresources(ui, repo)
4138 4150 t = formatter.maketemplater(ui, tmpl, resources=tres)
4139 4151 if ui.verbose:
4140 4152 kwds, funcs = t.symbolsuseddefault()
4141 4153 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4142 4154 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4143 4155 ui.write(t.renderdefault(props))
4144 4156 else:
4145 4157 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4146 4158 if ui.verbose:
4147 4159 kwds, funcs = displayer.t.symbolsuseddefault()
4148 4160 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4149 4161 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4150 4162 for r in revs:
4151 4163 displayer.show(repo[r], **pycompat.strkwargs(props))
4152 4164 displayer.close()
4153 4165
4154 4166
4155 4167 @command(
4156 4168 b'debuguigetpass',
4157 4169 [
4158 4170 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4159 4171 ],
4160 4172 _(b'[-p TEXT]'),
4161 4173 norepo=True,
4162 4174 )
4163 4175 def debuguigetpass(ui, prompt=b''):
4164 4176 """show prompt to type password"""
4165 4177 r = ui.getpass(prompt)
4166 4178 if r is None:
4167 4179 r = b"<default response>"
4168 4180 ui.writenoi18n(b'response: %s\n' % r)
4169 4181
4170 4182
4171 4183 @command(
4172 4184 b'debuguiprompt',
4173 4185 [
4174 4186 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4175 4187 ],
4176 4188 _(b'[-p TEXT]'),
4177 4189 norepo=True,
4178 4190 )
4179 4191 def debuguiprompt(ui, prompt=b''):
4180 4192 """show plain prompt"""
4181 4193 r = ui.prompt(prompt)
4182 4194 ui.writenoi18n(b'response: %s\n' % r)
4183 4195
4184 4196
4185 4197 @command(b'debugupdatecaches', [])
4186 4198 def debugupdatecaches(ui, repo, *pats, **opts):
4187 4199 """warm all known caches in the repository"""
4188 4200 with repo.wlock(), repo.lock():
4189 4201 repo.updatecaches(caches=repository.CACHES_ALL)
4190 4202
4191 4203
4192 4204 @command(
4193 4205 b'debugupgraderepo',
4194 4206 [
4195 4207 (
4196 4208 b'o',
4197 4209 b'optimize',
4198 4210 [],
4199 4211 _(b'extra optimization to perform'),
4200 4212 _(b'NAME'),
4201 4213 ),
4202 4214 (b'', b'run', False, _(b'performs an upgrade')),
4203 4215 (b'', b'backup', True, _(b'keep the old repository content around')),
4204 4216 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4205 4217 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4206 4218 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4207 4219 ],
4208 4220 )
4209 4221 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4210 4222 """upgrade a repository to use different features
4211 4223
4212 4224 If no arguments are specified, the repository is evaluated for upgrade
4213 4225 and a list of problems and potential optimizations is printed.
4214 4226
4215 4227 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4216 4228 can be influenced via additional arguments. More details will be provided
4217 4229 by the command output when run without ``--run``.
4218 4230
4219 4231 During the upgrade, the repository will be locked and no writes will be
4220 4232 allowed.
4221 4233
4222 4234 At the end of the upgrade, the repository may not be readable while new
4223 4235 repository data is swapped in. This window will be as long as it takes to
4224 4236 rename some directories inside the ``.hg`` directory. On most machines, this
4225 4237 should complete almost instantaneously and the chances of a consumer being
4226 4238 unable to access the repository should be low.
4227 4239
4228 4240 By default, all revlogs will be upgraded. You can restrict this using flags
4229 4241 such as `--manifest`:
4230 4242
4231 4243 * `--manifest`: only optimize the manifest
4232 4244 * `--no-manifest`: optimize all revlog but the manifest
4233 4245 * `--changelog`: optimize the changelog only
4234 4246 * `--no-changelog --no-manifest`: optimize filelogs only
4235 4247 * `--filelogs`: optimize the filelogs only
4236 4248 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4237 4249 """
4238 4250 return upgrade.upgraderepo(
4239 4251 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4240 4252 )
4241 4253
4242 4254
4243 4255 @command(
4244 4256 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4245 4257 )
4246 4258 def debugwalk(ui, repo, *pats, **opts):
4247 4259 """show how files match on given patterns"""
4248 4260 opts = pycompat.byteskwargs(opts)
4249 4261 m = scmutil.match(repo[None], pats, opts)
4250 4262 if ui.verbose:
4251 4263 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4252 4264 items = list(repo[None].walk(m))
4253 4265 if not items:
4254 4266 return
4255 4267 f = lambda fn: fn
4256 4268 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4257 4269 f = lambda fn: util.normpath(fn)
4258 4270 fmt = b'f %%-%ds %%-%ds %%s' % (
4259 4271 max([len(abs) for abs in items]),
4260 4272 max([len(repo.pathto(abs)) for abs in items]),
4261 4273 )
4262 4274 for abs in items:
4263 4275 line = fmt % (
4264 4276 abs,
4265 4277 f(repo.pathto(abs)),
4266 4278 m.exact(abs) and b'exact' or b'',
4267 4279 )
4268 4280 ui.write(b"%s\n" % line.rstrip())
4269 4281
4270 4282
4271 4283 @command(b'debugwhyunstable', [], _(b'REV'))
4272 4284 def debugwhyunstable(ui, repo, rev):
4273 4285 """explain instabilities of a changeset"""
4274 4286 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4275 4287 dnodes = b''
4276 4288 if entry.get(b'divergentnodes'):
4277 4289 dnodes = (
4278 4290 b' '.join(
4279 4291 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4280 4292 for ctx in entry[b'divergentnodes']
4281 4293 )
4282 4294 + b' '
4283 4295 )
4284 4296 ui.write(
4285 4297 b'%s: %s%s %s\n'
4286 4298 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4287 4299 )
4288 4300
4289 4301
4290 4302 @command(
4291 4303 b'debugwireargs',
4292 4304 [
4293 4305 (b'', b'three', b'', b'three'),
4294 4306 (b'', b'four', b'', b'four'),
4295 4307 (b'', b'five', b'', b'five'),
4296 4308 ]
4297 4309 + cmdutil.remoteopts,
4298 4310 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4299 4311 norepo=True,
4300 4312 )
4301 4313 def debugwireargs(ui, repopath, *vals, **opts):
4302 4314 opts = pycompat.byteskwargs(opts)
4303 4315 repo = hg.peer(ui, opts, repopath)
4304 4316 try:
4305 4317 for opt in cmdutil.remoteopts:
4306 4318 del opts[opt[1]]
4307 4319 args = {}
4308 4320 for k, v in opts.items():
4309 4321 if v:
4310 4322 args[k] = v
4311 4323 args = pycompat.strkwargs(args)
4312 4324 # run twice to check that we don't mess up the stream for the next command
4313 4325 res1 = repo.debugwireargs(*vals, **args)
4314 4326 res2 = repo.debugwireargs(*vals, **args)
4315 4327 ui.write(b"%s\n" % res1)
4316 4328 if res1 != res2:
4317 4329 ui.warn(b"%s\n" % res2)
4318 4330 finally:
4319 4331 repo.close()
4320 4332
4321 4333
4322 4334 def _parsewirelangblocks(fh):
4323 4335 activeaction = None
4324 4336 blocklines = []
4325 4337 lastindent = 0
4326 4338
4327 4339 for line in fh:
4328 4340 line = line.rstrip()
4329 4341 if not line:
4330 4342 continue
4331 4343
4332 4344 if line.startswith(b'#'):
4333 4345 continue
4334 4346
4335 4347 if not line.startswith(b' '):
4336 4348 # New block. Flush previous one.
4337 4349 if activeaction:
4338 4350 yield activeaction, blocklines
4339 4351
4340 4352 activeaction = line
4341 4353 blocklines = []
4342 4354 lastindent = 0
4343 4355 continue
4344 4356
4345 4357 # Else we start with an indent.
4346 4358
4347 4359 if not activeaction:
4348 4360 raise error.Abort(_(b'indented line outside of block'))
4349 4361
4350 4362 indent = len(line) - len(line.lstrip())
4351 4363
4352 4364 # If this line is indented more than the last line, concatenate it.
4353 4365 if indent > lastindent and blocklines:
4354 4366 blocklines[-1] += line.lstrip()
4355 4367 else:
4356 4368 blocklines.append(line)
4357 4369 lastindent = indent
4358 4370
4359 4371 # Flush last block.
4360 4372 if activeaction:
4361 4373 yield activeaction, blocklines
4362 4374
4363 4375
4364 4376 @command(
4365 4377 b'debugwireproto',
4366 4378 [
4367 4379 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4368 4380 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4369 4381 (
4370 4382 b'',
4371 4383 b'noreadstderr',
4372 4384 False,
4373 4385 _(b'do not read from stderr of the remote'),
4374 4386 ),
4375 4387 (
4376 4388 b'',
4377 4389 b'nologhandshake',
4378 4390 False,
4379 4391 _(b'do not log I/O related to the peer handshake'),
4380 4392 ),
4381 4393 ]
4382 4394 + cmdutil.remoteopts,
4383 4395 _(b'[PATH]'),
4384 4396 optionalrepo=True,
4385 4397 )
4386 4398 def debugwireproto(ui, repo, path=None, **opts):
4387 4399 """send wire protocol commands to a server
4388 4400
4389 4401 This command can be used to issue wire protocol commands to remote
4390 4402 peers and to debug the raw data being exchanged.
4391 4403
4392 4404 ``--localssh`` will start an SSH server against the current repository
4393 4405 and connect to that. By default, the connection will perform a handshake
4394 4406 and establish an appropriate peer instance.
4395 4407
4396 4408 ``--peer`` can be used to bypass the handshake protocol and construct a
4397 4409 peer instance using the specified class type. Valid values are ``raw``,
4398 4410 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4399 4411 don't support higher-level command actions.
4400 4412
4401 4413 ``--noreadstderr`` can be used to disable automatic reading from stderr
4402 4414 of the peer (for SSH connections only). Disabling automatic reading of
4403 4415 stderr is useful for making output more deterministic.
4404 4416
4405 4417 Commands are issued via a mini language which is specified via stdin.
4406 4418 The language consists of individual actions to perform. An action is
4407 4419 defined by a block. A block is defined as a line with no leading
4408 4420 space followed by 0 or more lines with leading space. Blocks are
4409 4421 effectively a high-level command with additional metadata.
4410 4422
4411 4423 Lines beginning with ``#`` are ignored.
4412 4424
4413 4425 The following sections denote available actions.
4414 4426
4415 4427 raw
4416 4428 ---
4417 4429
4418 4430 Send raw data to the server.
4419 4431
4420 4432 The block payload contains the raw data to send as one atomic send
4421 4433 operation. The data may not actually be delivered in a single system
4422 4434 call: it depends on the abilities of the transport being used.
4423 4435
4424 4436 Each line in the block is de-indented and concatenated. Then, that
4425 4437 value is evaluated as a Python b'' literal. This allows the use of
4426 4438 backslash escaping, etc.
4427 4439
4428 4440 raw+
4429 4441 ----
4430 4442
4431 4443 Behaves like ``raw`` except flushes output afterwards.
4432 4444
4433 4445 command <X>
4434 4446 -----------
4435 4447
4436 4448 Send a request to run a named command, whose name follows the ``command``
4437 4449 string.
4438 4450
4439 4451 Arguments to the command are defined as lines in this block. The format of
4440 4452 each line is ``<key> <value>``. e.g.::
4441 4453
4442 4454 command listkeys
4443 4455 namespace bookmarks
4444 4456
4445 4457 If the value begins with ``eval:``, it will be interpreted as a Python
4446 4458 literal expression. Otherwise values are interpreted as Python b'' literals.
4447 4459 This allows sending complex types and encoding special byte sequences via
4448 4460 backslash escaping.
4449 4461
4450 4462 The following arguments have special meaning:
4451 4463
4452 4464 ``PUSHFILE``
4453 4465 When defined, the *push* mechanism of the peer will be used instead
4454 4466 of the static request-response mechanism and the content of the
4455 4467 file specified in the value of this argument will be sent as the
4456 4468 command payload.
4457 4469
4458 4470 This can be used to submit a local bundle file to the remote.
4459 4471
4460 4472 batchbegin
4461 4473 ----------
4462 4474
4463 4475 Instruct the peer to begin a batched send.
4464 4476
4465 4477 All ``command`` blocks are queued for execution until the next
4466 4478 ``batchsubmit`` block.
4467 4479
4468 4480 batchsubmit
4469 4481 -----------
4470 4482
4471 4483 Submit previously queued ``command`` blocks as a batch request.
4472 4484
4473 4485 This action MUST be paired with a ``batchbegin`` action.
4474 4486
4475 4487 httprequest <method> <path>
4476 4488 ---------------------------
4477 4489
4478 4490 (HTTP peer only)
4479 4491
4480 4492 Send an HTTP request to the peer.
4481 4493
4482 4494 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4483 4495
4484 4496 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4485 4497 headers to add to the request. e.g. ``Accept: foo``.
4486 4498
4487 4499 The following arguments are special:
4488 4500
4489 4501 ``BODYFILE``
4490 4502 The content of the file defined as the value to this argument will be
4491 4503 transferred verbatim as the HTTP request body.
4492 4504
4493 4505 ``frame <type> <flags> <payload>``
4494 4506 Send a unified protocol frame as part of the request body.
4495 4507
4496 4508 All frames will be collected and sent as the body to the HTTP
4497 4509 request.
4498 4510
4499 4511 close
4500 4512 -----
4501 4513
4502 4514 Close the connection to the server.
4503 4515
4504 4516 flush
4505 4517 -----
4506 4518
4507 4519 Flush data written to the server.
4508 4520
4509 4521 readavailable
4510 4522 -------------
4511 4523
4512 4524 Close the write end of the connection and read all available data from
4513 4525 the server.
4514 4526
4515 4527 If the connection to the server encompasses multiple pipes, we poll both
4516 4528 pipes and read available data.
4517 4529
4518 4530 readline
4519 4531 --------
4520 4532
4521 4533 Read a line of output from the server. If there are multiple output
4522 4534 pipes, reads only the main pipe.
4523 4535
4524 4536 ereadline
4525 4537 ---------
4526 4538
4527 4539 Like ``readline``, but read from the stderr pipe, if available.
4528 4540
4529 4541 read <X>
4530 4542 --------
4531 4543
4532 4544 ``read()`` N bytes from the server's main output pipe.
4533 4545
4534 4546 eread <X>
4535 4547 ---------
4536 4548
4537 4549 ``read()`` N bytes from the server's stderr pipe, if available.
4538 4550
4539 4551 Specifying Unified Frame-Based Protocol Frames
4540 4552 ----------------------------------------------
4541 4553
4542 4554 It is possible to emit a *Unified Frame-Based Protocol* by using special
4543 4555 syntax.
4544 4556
4545 4557 A frame is composed as a type, flags, and payload. These can be parsed
4546 4558 from a string of the form:
4547 4559
4548 4560 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4549 4561
4550 4562 ``request-id`` and ``stream-id`` are integers defining the request and
4551 4563 stream identifiers.
4552 4564
4553 4565 ``type`` can be an integer value for the frame type or the string name
4554 4566 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4555 4567 ``command-name``.
4556 4568
4557 4569 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4558 4570 components. Each component (and there can be just one) can be an integer
4559 4571 or a flag name for stream flags or frame flags, respectively. Values are
4560 4572 resolved to integers and then bitwise OR'd together.
4561 4573
4562 4574 ``payload`` represents the raw frame payload. If it begins with
4563 4575 ``cbor:``, the following string is evaluated as Python code and the
4564 4576 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4565 4577 as a Python byte string literal.
4566 4578 """
4567 4579 opts = pycompat.byteskwargs(opts)
4568 4580
4569 4581 if opts[b'localssh'] and not repo:
4570 4582 raise error.Abort(_(b'--localssh requires a repository'))
4571 4583
4572 4584 if opts[b'peer'] and opts[b'peer'] not in (
4573 4585 b'raw',
4574 4586 b'ssh1',
4575 4587 ):
4576 4588 raise error.Abort(
4577 4589 _(b'invalid value for --peer'),
4578 4590 hint=_(b'valid values are "raw" and "ssh1"'),
4579 4591 )
4580 4592
4581 4593 if path and opts[b'localssh']:
4582 4594 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4583 4595
4584 4596 if ui.interactive():
4585 4597 ui.write(_(b'(waiting for commands on stdin)\n'))
4586 4598
4587 4599 blocks = list(_parsewirelangblocks(ui.fin))
4588 4600
4589 4601 proc = None
4590 4602 stdin = None
4591 4603 stdout = None
4592 4604 stderr = None
4593 4605 opener = None
4594 4606
4595 4607 if opts[b'localssh']:
4596 4608 # We start the SSH server in its own process so there is process
4597 4609 # separation. This prevents a whole class of potential bugs around
4598 4610 # shared state from interfering with server operation.
4599 4611 args = procutil.hgcmd() + [
4600 4612 b'-R',
4601 4613 repo.root,
4602 4614 b'debugserve',
4603 4615 b'--sshstdio',
4604 4616 ]
4605 4617 proc = subprocess.Popen(
4606 4618 pycompat.rapply(procutil.tonativestr, args),
4607 4619 stdin=subprocess.PIPE,
4608 4620 stdout=subprocess.PIPE,
4609 4621 stderr=subprocess.PIPE,
4610 4622 bufsize=0,
4611 4623 )
4612 4624
4613 4625 stdin = proc.stdin
4614 4626 stdout = proc.stdout
4615 4627 stderr = proc.stderr
4616 4628
4617 4629 # We turn the pipes into observers so we can log I/O.
4618 4630 if ui.verbose or opts[b'peer'] == b'raw':
4619 4631 stdin = util.makeloggingfileobject(
4620 4632 ui, proc.stdin, b'i', logdata=True
4621 4633 )
4622 4634 stdout = util.makeloggingfileobject(
4623 4635 ui, proc.stdout, b'o', logdata=True
4624 4636 )
4625 4637 stderr = util.makeloggingfileobject(
4626 4638 ui, proc.stderr, b'e', logdata=True
4627 4639 )
4628 4640
4629 4641 # --localssh also implies the peer connection settings.
4630 4642
4631 4643 url = b'ssh://localserver'
4632 4644 autoreadstderr = not opts[b'noreadstderr']
4633 4645
4634 4646 if opts[b'peer'] == b'ssh1':
4635 4647 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4636 4648 peer = sshpeer.sshv1peer(
4637 4649 ui,
4638 4650 url,
4639 4651 proc,
4640 4652 stdin,
4641 4653 stdout,
4642 4654 stderr,
4643 4655 None,
4644 4656 autoreadstderr=autoreadstderr,
4645 4657 )
4646 4658 elif opts[b'peer'] == b'raw':
4647 4659 ui.write(_(b'using raw connection to peer\n'))
4648 4660 peer = None
4649 4661 else:
4650 4662 ui.write(_(b'creating ssh peer from handshake results\n'))
4651 4663 peer = sshpeer.makepeer(
4652 4664 ui,
4653 4665 url,
4654 4666 proc,
4655 4667 stdin,
4656 4668 stdout,
4657 4669 stderr,
4658 4670 autoreadstderr=autoreadstderr,
4659 4671 )
4660 4672
4661 4673 elif path:
4662 4674 # We bypass hg.peer() so we can proxy the sockets.
4663 4675 # TODO consider not doing this because we skip
4664 4676 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4665 4677 u = urlutil.url(path)
4666 4678 if u.scheme != b'http':
4667 4679 raise error.Abort(_(b'only http:// paths are currently supported'))
4668 4680
4669 4681 url, authinfo = u.authinfo()
4670 4682 openerargs = {
4671 4683 'useragent': b'Mercurial debugwireproto',
4672 4684 }
4673 4685
4674 4686 # Turn pipes/sockets into observers so we can log I/O.
4675 4687 if ui.verbose:
4676 4688 openerargs.update(
4677 4689 {
4678 4690 'loggingfh': ui,
4679 4691 'loggingname': b's',
4680 4692 'loggingopts': {
4681 4693 'logdata': True,
4682 4694 'logdataapis': False,
4683 4695 },
4684 4696 }
4685 4697 )
4686 4698
4687 4699 if ui.debugflag:
4688 4700 openerargs['loggingopts']['logdataapis'] = True
4689 4701
4690 4702 # Don't send default headers when in raw mode. This allows us to
4691 4703 # bypass most of the behavior of our URL handling code so we can
4692 4704 # have near complete control over what's sent on the wire.
4693 4705 if opts[b'peer'] == b'raw':
4694 4706 openerargs['sendaccept'] = False
4695 4707
4696 4708 opener = urlmod.opener(ui, authinfo, **openerargs)
4697 4709
4698 4710 if opts[b'peer'] == b'raw':
4699 4711 ui.write(_(b'using raw connection to peer\n'))
4700 4712 peer = None
4701 4713 elif opts[b'peer']:
4702 4714 raise error.Abort(
4703 4715 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4704 4716 )
4705 4717 else:
4706 4718 peer = httppeer.makepeer(ui, path, opener=opener)
4707 4719
4708 4720 # We /could/ populate stdin/stdout with sock.makefile()...
4709 4721 else:
4710 4722 raise error.Abort(_(b'unsupported connection configuration'))
4711 4723
4712 4724 batchedcommands = None
4713 4725
4714 4726 # Now perform actions based on the parsed wire language instructions.
4715 4727 for action, lines in blocks:
4716 4728 if action in (b'raw', b'raw+'):
4717 4729 if not stdin:
4718 4730 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4719 4731
4720 4732 # Concatenate the data together.
4721 4733 data = b''.join(l.lstrip() for l in lines)
4722 4734 data = stringutil.unescapestr(data)
4723 4735 stdin.write(data)
4724 4736
4725 4737 if action == b'raw+':
4726 4738 stdin.flush()
4727 4739 elif action == b'flush':
4728 4740 if not stdin:
4729 4741 raise error.Abort(_(b'cannot call flush on this peer'))
4730 4742 stdin.flush()
4731 4743 elif action.startswith(b'command'):
4732 4744 if not peer:
4733 4745 raise error.Abort(
4734 4746 _(
4735 4747 b'cannot send commands unless peer instance '
4736 4748 b'is available'
4737 4749 )
4738 4750 )
4739 4751
4740 4752 command = action.split(b' ', 1)[1]
4741 4753
4742 4754 args = {}
4743 4755 for line in lines:
4744 4756 # We need to allow empty values.
4745 4757 fields = line.lstrip().split(b' ', 1)
4746 4758 if len(fields) == 1:
4747 4759 key = fields[0]
4748 4760 value = b''
4749 4761 else:
4750 4762 key, value = fields
4751 4763
4752 4764 if value.startswith(b'eval:'):
4753 4765 value = stringutil.evalpythonliteral(value[5:])
4754 4766 else:
4755 4767 value = stringutil.unescapestr(value)
4756 4768
4757 4769 args[key] = value
4758 4770
4759 4771 if batchedcommands is not None:
4760 4772 batchedcommands.append((command, args))
4761 4773 continue
4762 4774
4763 4775 ui.status(_(b'sending %s command\n') % command)
4764 4776
4765 4777 if b'PUSHFILE' in args:
4766 4778 with open(args[b'PUSHFILE'], 'rb') as fh:
4767 4779 del args[b'PUSHFILE']
4768 4780 res, output = peer._callpush(
4769 4781 command, fh, **pycompat.strkwargs(args)
4770 4782 )
4771 4783 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4772 4784 ui.status(
4773 4785 _(b'remote output: %s\n') % stringutil.escapestr(output)
4774 4786 )
4775 4787 else:
4776 4788 with peer.commandexecutor() as e:
4777 4789 res = e.callcommand(command, args).result()
4778 4790
4779 4791 ui.status(
4780 4792 _(b'response: %s\n')
4781 4793 % stringutil.pprint(res, bprefix=True, indent=2)
4782 4794 )
4783 4795
4784 4796 elif action == b'batchbegin':
4785 4797 if batchedcommands is not None:
4786 4798 raise error.Abort(_(b'nested batchbegin not allowed'))
4787 4799
4788 4800 batchedcommands = []
4789 4801 elif action == b'batchsubmit':
4790 4802 # There is a batching API we could go through. But it would be
4791 4803 # difficult to normalize requests into function calls. It is easier
4792 4804 # to bypass this layer and normalize to commands + args.
4793 4805 ui.status(
4794 4806 _(b'sending batch with %d sub-commands\n')
4795 4807 % len(batchedcommands)
4796 4808 )
4797 4809 assert peer is not None
4798 4810 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4799 4811 ui.status(
4800 4812 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4801 4813 )
4802 4814
4803 4815 batchedcommands = None
4804 4816
4805 4817 elif action.startswith(b'httprequest '):
4806 4818 if not opener:
4807 4819 raise error.Abort(
4808 4820 _(b'cannot use httprequest without an HTTP peer')
4809 4821 )
4810 4822
4811 4823 request = action.split(b' ', 2)
4812 4824 if len(request) != 3:
4813 4825 raise error.Abort(
4814 4826 _(
4815 4827 b'invalid httprequest: expected format is '
4816 4828 b'"httprequest <method> <path>'
4817 4829 )
4818 4830 )
4819 4831
4820 4832 method, httppath = request[1:]
4821 4833 headers = {}
4822 4834 body = None
4823 4835 frames = []
4824 4836 for line in lines:
4825 4837 line = line.lstrip()
4826 4838 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4827 4839 if m:
4828 4840 # Headers need to use native strings.
4829 4841 key = pycompat.strurl(m.group(1))
4830 4842 value = pycompat.strurl(m.group(2))
4831 4843 headers[key] = value
4832 4844 continue
4833 4845
4834 4846 if line.startswith(b'BODYFILE '):
4835 4847 with open(line.split(b' ', 1), b'rb') as fh:
4836 4848 body = fh.read()
4837 4849 elif line.startswith(b'frame '):
4838 4850 frame = wireprotoframing.makeframefromhumanstring(
4839 4851 line[len(b'frame ') :]
4840 4852 )
4841 4853
4842 4854 frames.append(frame)
4843 4855 else:
4844 4856 raise error.Abort(
4845 4857 _(b'unknown argument to httprequest: %s') % line
4846 4858 )
4847 4859
4848 4860 url = path + httppath
4849 4861
4850 4862 if frames:
4851 4863 body = b''.join(bytes(f) for f in frames)
4852 4864
4853 4865 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4854 4866
4855 4867 # urllib.Request insists on using has_data() as a proxy for
4856 4868 # determining the request method. Override that to use our
4857 4869 # explicitly requested method.
4858 4870 req.get_method = lambda: pycompat.sysstr(method)
4859 4871
4860 4872 try:
4861 4873 res = opener.open(req)
4862 4874 body = res.read()
4863 4875 except util.urlerr.urlerror as e:
4864 4876 # read() method must be called, but only exists in Python 2
4865 4877 getattr(e, 'read', lambda: None)()
4866 4878 continue
4867 4879
4868 4880 ct = res.headers.get('Content-Type')
4869 4881 if ct == 'application/mercurial-cbor':
4870 4882 ui.write(
4871 4883 _(b'cbor> %s\n')
4872 4884 % stringutil.pprint(
4873 4885 cborutil.decodeall(body), bprefix=True, indent=2
4874 4886 )
4875 4887 )
4876 4888
4877 4889 elif action == b'close':
4878 4890 assert peer is not None
4879 4891 peer.close()
4880 4892 elif action == b'readavailable':
4881 4893 if not stdout or not stderr:
4882 4894 raise error.Abort(
4883 4895 _(b'readavailable not available on this peer')
4884 4896 )
4885 4897
4886 4898 stdin.close()
4887 4899 stdout.read()
4888 4900 stderr.read()
4889 4901
4890 4902 elif action == b'readline':
4891 4903 if not stdout:
4892 4904 raise error.Abort(_(b'readline not available on this peer'))
4893 4905 stdout.readline()
4894 4906 elif action == b'ereadline':
4895 4907 if not stderr:
4896 4908 raise error.Abort(_(b'ereadline not available on this peer'))
4897 4909 stderr.readline()
4898 4910 elif action.startswith(b'read '):
4899 4911 count = int(action.split(b' ', 1)[1])
4900 4912 if not stdout:
4901 4913 raise error.Abort(_(b'read not available on this peer'))
4902 4914 stdout.read(count)
4903 4915 elif action.startswith(b'eread '):
4904 4916 count = int(action.split(b' ', 1)[1])
4905 4917 if not stderr:
4906 4918 raise error.Abort(_(b'eread not available on this peer'))
4907 4919 stderr.read(count)
4908 4920 else:
4909 4921 raise error.Abort(_(b'unknown action: %s') % action)
4910 4922
4911 4923 if batchedcommands is not None:
4912 4924 raise error.Abort(_(b'unclosed "batchbegin" request'))
4913 4925
4914 4926 if peer:
4915 4927 peer.close()
4916 4928
4917 4929 if proc:
4918 4930 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now