##// END OF EJS Templates
dirstate-item: use `added` in debugrebuilddirstate...
marmoute -
r48913:78e66649 default
parent child Browse files
Show More
@@ -1,4923 +1,4923 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import binascii
11 11 import codecs
12 12 import collections
13 13 import contextlib
14 14 import difflib
15 15 import errno
16 16 import glob
17 17 import operator
18 18 import os
19 19 import platform
20 20 import random
21 21 import re
22 22 import socket
23 23 import ssl
24 24 import stat
25 25 import string
26 26 import subprocess
27 27 import sys
28 28 import time
29 29
30 30 from .i18n import _
31 31 from .node import (
32 32 bin,
33 33 hex,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 wireprotov2peer,
95 95 )
96 96 from .interfaces import repository
97 97 from .utils import (
98 98 cborutil,
99 99 compression,
100 100 dateutil,
101 101 procutil,
102 102 stringutil,
103 103 urlutil,
104 104 )
105 105
106 106 from .revlogutils import (
107 107 deltas as deltautil,
108 108 nodemap,
109 109 rewrite,
110 110 sidedata,
111 111 )
112 112
113 113 release = lockmod.release
114 114
115 115 table = {}
116 116 table.update(strip.command._table)
117 117 command = registrar.command(table)
118 118
119 119
120 120 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
121 121 def debugancestor(ui, repo, *args):
122 122 """find the ancestor revision of two revisions in a given index"""
123 123 if len(args) == 3:
124 124 index, rev1, rev2 = args
125 125 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
126 126 lookup = r.lookup
127 127 elif len(args) == 2:
128 128 if not repo:
129 129 raise error.Abort(
130 130 _(b'there is no Mercurial repository here (.hg not found)')
131 131 )
132 132 rev1, rev2 = args
133 133 r = repo.changelog
134 134 lookup = repo.lookup
135 135 else:
136 136 raise error.Abort(_(b'either two or three arguments required'))
137 137 a = r.ancestor(lookup(rev1), lookup(rev2))
138 138 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
139 139
140 140
141 141 @command(b'debugantivirusrunning', [])
142 142 def debugantivirusrunning(ui, repo):
143 143 """attempt to trigger an antivirus scanner to see if one is active"""
144 144 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
145 145 f.write(
146 146 util.b85decode(
147 147 # This is a base85-armored version of the EICAR test file. See
148 148 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
149 149 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
150 150 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
151 151 )
152 152 )
153 153 # Give an AV engine time to scan the file.
154 154 time.sleep(2)
155 155 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
156 156
157 157
158 158 @command(b'debugapplystreamclonebundle', [], b'FILE')
159 159 def debugapplystreamclonebundle(ui, repo, fname):
160 160 """apply a stream clone bundle file"""
161 161 f = hg.openpath(ui, fname)
162 162 gen = exchange.readbundle(ui, f, fname)
163 163 gen.apply(repo)
164 164
165 165
166 166 @command(
167 167 b'debugbuilddag',
168 168 [
169 169 (
170 170 b'm',
171 171 b'mergeable-file',
172 172 None,
173 173 _(b'add single file mergeable changes'),
174 174 ),
175 175 (
176 176 b'o',
177 177 b'overwritten-file',
178 178 None,
179 179 _(b'add single file all revs overwrite'),
180 180 ),
181 181 (b'n', b'new-file', None, _(b'add new file at each rev')),
182 182 ],
183 183 _(b'[OPTION]... [TEXT]'),
184 184 )
185 185 def debugbuilddag(
186 186 ui,
187 187 repo,
188 188 text=None,
189 189 mergeable_file=False,
190 190 overwritten_file=False,
191 191 new_file=False,
192 192 ):
193 193 """builds a repo with a given DAG from scratch in the current empty repo
194 194
195 195 The description of the DAG is read from stdin if not given on the
196 196 command line.
197 197
198 198 Elements:
199 199
200 200 - "+n" is a linear run of n nodes based on the current default parent
201 201 - "." is a single node based on the current default parent
202 202 - "$" resets the default parent to null (implied at the start);
203 203 otherwise the default parent is always the last node created
204 204 - "<p" sets the default parent to the backref p
205 205 - "*p" is a fork at parent p, which is a backref
206 206 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
207 207 - "/p2" is a merge of the preceding node and p2
208 208 - ":tag" defines a local tag for the preceding node
209 209 - "@branch" sets the named branch for subsequent nodes
210 210 - "#...\\n" is a comment up to the end of the line
211 211
212 212 Whitespace between the above elements is ignored.
213 213
214 214 A backref is either
215 215
216 216 - a number n, which references the node curr-n, where curr is the current
217 217 node, or
218 218 - the name of a local tag you placed earlier using ":tag", or
219 219 - empty to denote the default parent.
220 220
221 221 All string valued-elements are either strictly alphanumeric, or must
222 222 be enclosed in double quotes ("..."), with "\\" as escape character.
223 223 """
224 224
225 225 if text is None:
226 226 ui.status(_(b"reading DAG from stdin\n"))
227 227 text = ui.fin.read()
228 228
229 229 cl = repo.changelog
230 230 if len(cl) > 0:
231 231 raise error.Abort(_(b'repository is not empty'))
232 232
233 233 # determine number of revs in DAG
234 234 total = 0
235 235 for type, data in dagparser.parsedag(text):
236 236 if type == b'n':
237 237 total += 1
238 238
239 239 if mergeable_file:
240 240 linesperrev = 2
241 241 # make a file with k lines per rev
242 242 initialmergedlines = [
243 243 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
244 244 ]
245 245 initialmergedlines.append(b"")
246 246
247 247 tags = []
248 248 progress = ui.makeprogress(
249 249 _(b'building'), unit=_(b'revisions'), total=total
250 250 )
251 251 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
252 252 at = -1
253 253 atbranch = b'default'
254 254 nodeids = []
255 255 id = 0
256 256 progress.update(id)
257 257 for type, data in dagparser.parsedag(text):
258 258 if type == b'n':
259 259 ui.note((b'node %s\n' % pycompat.bytestr(data)))
260 260 id, ps = data
261 261
262 262 files = []
263 263 filecontent = {}
264 264
265 265 p2 = None
266 266 if mergeable_file:
267 267 fn = b"mf"
268 268 p1 = repo[ps[0]]
269 269 if len(ps) > 1:
270 270 p2 = repo[ps[1]]
271 271 pa = p1.ancestor(p2)
272 272 base, local, other = [
273 273 x[fn].data() for x in (pa, p1, p2)
274 274 ]
275 275 m3 = simplemerge.Merge3Text(base, local, other)
276 276 ml = [l.strip() for l in m3.merge_lines()]
277 277 ml.append(b"")
278 278 elif at > 0:
279 279 ml = p1[fn].data().split(b"\n")
280 280 else:
281 281 ml = initialmergedlines
282 282 ml[id * linesperrev] += b" r%i" % id
283 283 mergedtext = b"\n".join(ml)
284 284 files.append(fn)
285 285 filecontent[fn] = mergedtext
286 286
287 287 if overwritten_file:
288 288 fn = b"of"
289 289 files.append(fn)
290 290 filecontent[fn] = b"r%i\n" % id
291 291
292 292 if new_file:
293 293 fn = b"nf%i" % id
294 294 files.append(fn)
295 295 filecontent[fn] = b"r%i\n" % id
296 296 if len(ps) > 1:
297 297 if not p2:
298 298 p2 = repo[ps[1]]
299 299 for fn in p2:
300 300 if fn.startswith(b"nf"):
301 301 files.append(fn)
302 302 filecontent[fn] = p2[fn].data()
303 303
304 304 def fctxfn(repo, cx, path):
305 305 if path in filecontent:
306 306 return context.memfilectx(
307 307 repo, cx, path, filecontent[path]
308 308 )
309 309 return None
310 310
311 311 if len(ps) == 0 or ps[0] < 0:
312 312 pars = [None, None]
313 313 elif len(ps) == 1:
314 314 pars = [nodeids[ps[0]], None]
315 315 else:
316 316 pars = [nodeids[p] for p in ps]
317 317 cx = context.memctx(
318 318 repo,
319 319 pars,
320 320 b"r%i" % id,
321 321 files,
322 322 fctxfn,
323 323 date=(id, 0),
324 324 user=b"debugbuilddag",
325 325 extra={b'branch': atbranch},
326 326 )
327 327 nodeid = repo.commitctx(cx)
328 328 nodeids.append(nodeid)
329 329 at = id
330 330 elif type == b'l':
331 331 id, name = data
332 332 ui.note((b'tag %s\n' % name))
333 333 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
334 334 elif type == b'a':
335 335 ui.note((b'branch %s\n' % data))
336 336 atbranch = data
337 337 progress.update(id)
338 338
339 339 if tags:
340 340 repo.vfs.write(b"localtags", b"".join(tags))
341 341
342 342
343 343 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
344 344 indent_string = b' ' * indent
345 345 if all:
346 346 ui.writenoi18n(
347 347 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
348 348 % indent_string
349 349 )
350 350
351 351 def showchunks(named):
352 352 ui.write(b"\n%s%s\n" % (indent_string, named))
353 353 for deltadata in gen.deltaiter():
354 354 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
355 355 ui.write(
356 356 b"%s%s %s %s %s %s %d\n"
357 357 % (
358 358 indent_string,
359 359 hex(node),
360 360 hex(p1),
361 361 hex(p2),
362 362 hex(cs),
363 363 hex(deltabase),
364 364 len(delta),
365 365 )
366 366 )
367 367
368 368 gen.changelogheader()
369 369 showchunks(b"changelog")
370 370 gen.manifestheader()
371 371 showchunks(b"manifest")
372 372 for chunkdata in iter(gen.filelogheader, {}):
373 373 fname = chunkdata[b'filename']
374 374 showchunks(fname)
375 375 else:
376 376 if isinstance(gen, bundle2.unbundle20):
377 377 raise error.Abort(_(b'use debugbundle2 for this file'))
378 378 gen.changelogheader()
379 379 for deltadata in gen.deltaiter():
380 380 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
381 381 ui.write(b"%s%s\n" % (indent_string, hex(node)))
382 382
383 383
384 384 def _debugobsmarkers(ui, part, indent=0, **opts):
385 385 """display version and markers contained in 'data'"""
386 386 opts = pycompat.byteskwargs(opts)
387 387 data = part.read()
388 388 indent_string = b' ' * indent
389 389 try:
390 390 version, markers = obsolete._readmarkers(data)
391 391 except error.UnknownVersion as exc:
392 392 msg = b"%sunsupported version: %s (%d bytes)\n"
393 393 msg %= indent_string, exc.version, len(data)
394 394 ui.write(msg)
395 395 else:
396 396 msg = b"%sversion: %d (%d bytes)\n"
397 397 msg %= indent_string, version, len(data)
398 398 ui.write(msg)
399 399 fm = ui.formatter(b'debugobsolete', opts)
400 400 for rawmarker in sorted(markers):
401 401 m = obsutil.marker(None, rawmarker)
402 402 fm.startitem()
403 403 fm.plain(indent_string)
404 404 cmdutil.showmarker(fm, m)
405 405 fm.end()
406 406
407 407
408 408 def _debugphaseheads(ui, data, indent=0):
409 409 """display version and markers contained in 'data'"""
410 410 indent_string = b' ' * indent
411 411 headsbyphase = phases.binarydecode(data)
412 412 for phase in phases.allphases:
413 413 for head in headsbyphase[phase]:
414 414 ui.write(indent_string)
415 415 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
416 416
417 417
418 418 def _quasirepr(thing):
419 419 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
420 420 return b'{%s}' % (
421 421 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
422 422 )
423 423 return pycompat.bytestr(repr(thing))
424 424
425 425
426 426 def _debugbundle2(ui, gen, all=None, **opts):
427 427 """lists the contents of a bundle2"""
428 428 if not isinstance(gen, bundle2.unbundle20):
429 429 raise error.Abort(_(b'not a bundle2 file'))
430 430 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
431 431 parttypes = opts.get('part_type', [])
432 432 for part in gen.iterparts():
433 433 if parttypes and part.type not in parttypes:
434 434 continue
435 435 msg = b'%s -- %s (mandatory: %r)\n'
436 436 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
437 437 if part.type == b'changegroup':
438 438 version = part.params.get(b'version', b'01')
439 439 cg = changegroup.getunbundler(version, part, b'UN')
440 440 if not ui.quiet:
441 441 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
442 442 if part.type == b'obsmarkers':
443 443 if not ui.quiet:
444 444 _debugobsmarkers(ui, part, indent=4, **opts)
445 445 if part.type == b'phase-heads':
446 446 if not ui.quiet:
447 447 _debugphaseheads(ui, part, indent=4)
448 448
449 449
450 450 @command(
451 451 b'debugbundle',
452 452 [
453 453 (b'a', b'all', None, _(b'show all details')),
454 454 (b'', b'part-type', [], _(b'show only the named part type')),
455 455 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
456 456 ],
457 457 _(b'FILE'),
458 458 norepo=True,
459 459 )
460 460 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
461 461 """lists the contents of a bundle"""
462 462 with hg.openpath(ui, bundlepath) as f:
463 463 if spec:
464 464 spec = exchange.getbundlespec(ui, f)
465 465 ui.write(b'%s\n' % spec)
466 466 return
467 467
468 468 gen = exchange.readbundle(ui, f, bundlepath)
469 469 if isinstance(gen, bundle2.unbundle20):
470 470 return _debugbundle2(ui, gen, all=all, **opts)
471 471 _debugchangegroup(ui, gen, all=all, **opts)
472 472
473 473
474 474 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
475 475 def debugcapabilities(ui, path, **opts):
476 476 """lists the capabilities of a remote peer"""
477 477 opts = pycompat.byteskwargs(opts)
478 478 peer = hg.peer(ui, opts, path)
479 479 try:
480 480 caps = peer.capabilities()
481 481 ui.writenoi18n(b'Main capabilities:\n')
482 482 for c in sorted(caps):
483 483 ui.write(b' %s\n' % c)
484 484 b2caps = bundle2.bundle2caps(peer)
485 485 if b2caps:
486 486 ui.writenoi18n(b'Bundle2 capabilities:\n')
487 487 for key, values in sorted(pycompat.iteritems(b2caps)):
488 488 ui.write(b' %s\n' % key)
489 489 for v in values:
490 490 ui.write(b' %s\n' % v)
491 491 finally:
492 492 peer.close()
493 493
494 494
495 495 @command(
496 496 b'debugchangedfiles',
497 497 [
498 498 (
499 499 b'',
500 500 b'compute',
501 501 False,
502 502 b"compute information instead of reading it from storage",
503 503 ),
504 504 ],
505 505 b'REV',
506 506 )
507 507 def debugchangedfiles(ui, repo, rev, **opts):
508 508 """list the stored files changes for a revision"""
509 509 ctx = scmutil.revsingle(repo, rev, None)
510 510 files = None
511 511
512 512 if opts['compute']:
513 513 files = metadata.compute_all_files_changes(ctx)
514 514 else:
515 515 sd = repo.changelog.sidedata(ctx.rev())
516 516 files_block = sd.get(sidedata.SD_FILES)
517 517 if files_block is not None:
518 518 files = metadata.decode_files_sidedata(sd)
519 519 if files is not None:
520 520 for f in sorted(files.touched):
521 521 if f in files.added:
522 522 action = b"added"
523 523 elif f in files.removed:
524 524 action = b"removed"
525 525 elif f in files.merged:
526 526 action = b"merged"
527 527 elif f in files.salvaged:
528 528 action = b"salvaged"
529 529 else:
530 530 action = b"touched"
531 531
532 532 copy_parent = b""
533 533 copy_source = b""
534 534 if f in files.copied_from_p1:
535 535 copy_parent = b"p1"
536 536 copy_source = files.copied_from_p1[f]
537 537 elif f in files.copied_from_p2:
538 538 copy_parent = b"p2"
539 539 copy_source = files.copied_from_p2[f]
540 540
541 541 data = (action, copy_parent, f, copy_source)
542 542 template = b"%-8s %2s: %s, %s;\n"
543 543 ui.write(template % data)
544 544
545 545
546 546 @command(b'debugcheckstate', [], b'')
547 547 def debugcheckstate(ui, repo):
548 548 """validate the correctness of the current dirstate"""
549 549 parent1, parent2 = repo.dirstate.parents()
550 550 m1 = repo[parent1].manifest()
551 551 m2 = repo[parent2].manifest()
552 552 errors = 0
553 553 for err in repo.dirstate.verify(m1, m2):
554 554 ui.warn(err[0] % err[1:])
555 555 errors += 1
556 556 if errors:
557 557 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
558 558 raise error.Abort(errstr)
559 559
560 560
561 561 @command(
562 562 b'debugcolor',
563 563 [(b'', b'style', None, _(b'show all configured styles'))],
564 564 b'hg debugcolor',
565 565 )
566 566 def debugcolor(ui, repo, **opts):
567 567 """show available color, effects or style"""
568 568 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
569 569 if opts.get('style'):
570 570 return _debugdisplaystyle(ui)
571 571 else:
572 572 return _debugdisplaycolor(ui)
573 573
574 574
575 575 def _debugdisplaycolor(ui):
576 576 ui = ui.copy()
577 577 ui._styles.clear()
578 578 for effect in color._activeeffects(ui).keys():
579 579 ui._styles[effect] = effect
580 580 if ui._terminfoparams:
581 581 for k, v in ui.configitems(b'color'):
582 582 if k.startswith(b'color.'):
583 583 ui._styles[k] = k[6:]
584 584 elif k.startswith(b'terminfo.'):
585 585 ui._styles[k] = k[9:]
586 586 ui.write(_(b'available colors:\n'))
587 587 # sort label with a '_' after the other to group '_background' entry.
588 588 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
589 589 for colorname, label in items:
590 590 ui.write(b'%s\n' % colorname, label=label)
591 591
592 592
593 593 def _debugdisplaystyle(ui):
594 594 ui.write(_(b'available style:\n'))
595 595 if not ui._styles:
596 596 return
597 597 width = max(len(s) for s in ui._styles)
598 598 for label, effects in sorted(ui._styles.items()):
599 599 ui.write(b'%s' % label, label=label)
600 600 if effects:
601 601 # 50
602 602 ui.write(b': ')
603 603 ui.write(b' ' * (max(0, width - len(label))))
604 604 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
605 605 ui.write(b'\n')
606 606
607 607
608 608 @command(b'debugcreatestreamclonebundle', [], b'FILE')
609 609 def debugcreatestreamclonebundle(ui, repo, fname):
610 610 """create a stream clone bundle file
611 611
612 612 Stream bundles are special bundles that are essentially archives of
613 613 revlog files. They are commonly used for cloning very quickly.
614 614 """
615 615 # TODO we may want to turn this into an abort when this functionality
616 616 # is moved into `hg bundle`.
617 617 if phases.hassecret(repo):
618 618 ui.warn(
619 619 _(
620 620 b'(warning: stream clone bundle will contain secret '
621 621 b'revisions)\n'
622 622 )
623 623 )
624 624
625 625 requirements, gen = streamclone.generatebundlev1(repo)
626 626 changegroup.writechunks(ui, gen, fname)
627 627
628 628 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
629 629
630 630
631 631 @command(
632 632 b'debugdag',
633 633 [
634 634 (b't', b'tags', None, _(b'use tags as labels')),
635 635 (b'b', b'branches', None, _(b'annotate with branch names')),
636 636 (b'', b'dots', None, _(b'use dots for runs')),
637 637 (b's', b'spaces', None, _(b'separate elements by spaces')),
638 638 ],
639 639 _(b'[OPTION]... [FILE [REV]...]'),
640 640 optionalrepo=True,
641 641 )
642 642 def debugdag(ui, repo, file_=None, *revs, **opts):
643 643 """format the changelog or an index DAG as a concise textual description
644 644
645 645 If you pass a revlog index, the revlog's DAG is emitted. If you list
646 646 revision numbers, they get labeled in the output as rN.
647 647
648 648 Otherwise, the changelog DAG of the current repo is emitted.
649 649 """
650 650 spaces = opts.get('spaces')
651 651 dots = opts.get('dots')
652 652 if file_:
653 653 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
654 654 revs = {int(r) for r in revs}
655 655
656 656 def events():
657 657 for r in rlog:
658 658 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
659 659 if r in revs:
660 660 yield b'l', (r, b"r%i" % r)
661 661
662 662 elif repo:
663 663 cl = repo.changelog
664 664 tags = opts.get('tags')
665 665 branches = opts.get('branches')
666 666 if tags:
667 667 labels = {}
668 668 for l, n in repo.tags().items():
669 669 labels.setdefault(cl.rev(n), []).append(l)
670 670
671 671 def events():
672 672 b = b"default"
673 673 for r in cl:
674 674 if branches:
675 675 newb = cl.read(cl.node(r))[5][b'branch']
676 676 if newb != b:
677 677 yield b'a', newb
678 678 b = newb
679 679 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
680 680 if tags:
681 681 ls = labels.get(r)
682 682 if ls:
683 683 for l in ls:
684 684 yield b'l', (r, l)
685 685
686 686 else:
687 687 raise error.Abort(_(b'need repo for changelog dag'))
688 688
689 689 for line in dagparser.dagtextlines(
690 690 events(),
691 691 addspaces=spaces,
692 692 wraplabels=True,
693 693 wrapannotations=True,
694 694 wrapnonlinear=dots,
695 695 usedots=dots,
696 696 maxlinewidth=70,
697 697 ):
698 698 ui.write(line)
699 699 ui.write(b"\n")
700 700
701 701
702 702 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
703 703 def debugdata(ui, repo, file_, rev=None, **opts):
704 704 """dump the contents of a data file revision"""
705 705 opts = pycompat.byteskwargs(opts)
706 706 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
707 707 if rev is not None:
708 708 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
709 709 file_, rev = None, file_
710 710 elif rev is None:
711 711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
712 712 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
713 713 try:
714 714 ui.write(r.rawdata(r.lookup(rev)))
715 715 except KeyError:
716 716 raise error.Abort(_(b'invalid revision identifier %s') % rev)
717 717
718 718
719 719 @command(
720 720 b'debugdate',
721 721 [(b'e', b'extended', None, _(b'try extended date formats'))],
722 722 _(b'[-e] DATE [RANGE]'),
723 723 norepo=True,
724 724 optionalrepo=True,
725 725 )
726 726 def debugdate(ui, date, range=None, **opts):
727 727 """parse and display a date"""
728 728 if opts["extended"]:
729 729 d = dateutil.parsedate(date, dateutil.extendeddateformats)
730 730 else:
731 731 d = dateutil.parsedate(date)
732 732 ui.writenoi18n(b"internal: %d %d\n" % d)
733 733 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
734 734 if range:
735 735 m = dateutil.matchdate(range)
736 736 ui.writenoi18n(b"match: %s\n" % m(d[0]))
737 737
738 738
739 739 @command(
740 740 b'debugdeltachain',
741 741 cmdutil.debugrevlogopts + cmdutil.formatteropts,
742 742 _(b'-c|-m|FILE'),
743 743 optionalrepo=True,
744 744 )
745 745 def debugdeltachain(ui, repo, file_=None, **opts):
746 746 """dump information about delta chains in a revlog
747 747
748 748 Output can be templatized. Available template keywords are:
749 749
750 750 :``rev``: revision number
751 751 :``chainid``: delta chain identifier (numbered by unique base)
752 752 :``chainlen``: delta chain length to this revision
753 753 :``prevrev``: previous revision in delta chain
754 754 :``deltatype``: role of delta / how it was computed
755 755 :``compsize``: compressed size of revision
756 756 :``uncompsize``: uncompressed size of revision
757 757 :``chainsize``: total size of compressed revisions in chain
758 758 :``chainratio``: total chain size divided by uncompressed revision size
759 759 (new delta chains typically start at ratio 2.00)
760 760 :``lindist``: linear distance from base revision in delta chain to end
761 761 of this revision
762 762 :``extradist``: total size of revisions not part of this delta chain from
763 763 base of delta chain to end of this revision; a measurement
764 764 of how much extra data we need to read/seek across to read
765 765 the delta chain for this revision
766 766 :``extraratio``: extradist divided by chainsize; another representation of
767 767 how much unrelated data is needed to load this delta chain
768 768
769 769 If the repository is configured to use the sparse read, additional keywords
770 770 are available:
771 771
772 772 :``readsize``: total size of data read from the disk for a revision
773 773 (sum of the sizes of all the blocks)
774 774 :``largestblock``: size of the largest block of data read from the disk
775 775 :``readdensity``: density of useful bytes in the data read from the disk
776 776 :``srchunks``: in how many data hunks the whole revision would be read
777 777
778 778 The sparse read can be enabled with experimental.sparse-read = True
779 779 """
780 780 opts = pycompat.byteskwargs(opts)
781 781 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
782 782 index = r.index
783 783 start = r.start
784 784 length = r.length
785 785 generaldelta = r._generaldelta
786 786 withsparseread = getattr(r, '_withsparseread', False)
787 787
788 788 def revinfo(rev):
789 789 e = index[rev]
790 790 compsize = e[1]
791 791 uncompsize = e[2]
792 792 chainsize = 0
793 793
794 794 if generaldelta:
795 795 if e[3] == e[5]:
796 796 deltatype = b'p1'
797 797 elif e[3] == e[6]:
798 798 deltatype = b'p2'
799 799 elif e[3] == rev - 1:
800 800 deltatype = b'prev'
801 801 elif e[3] == rev:
802 802 deltatype = b'base'
803 803 else:
804 804 deltatype = b'other'
805 805 else:
806 806 if e[3] == rev:
807 807 deltatype = b'base'
808 808 else:
809 809 deltatype = b'prev'
810 810
811 811 chain = r._deltachain(rev)[0]
812 812 for iterrev in chain:
813 813 e = index[iterrev]
814 814 chainsize += e[1]
815 815
816 816 return compsize, uncompsize, deltatype, chain, chainsize
817 817
818 818 fm = ui.formatter(b'debugdeltachain', opts)
819 819
820 820 fm.plain(
821 821 b' rev chain# chainlen prev delta '
822 822 b'size rawsize chainsize ratio lindist extradist '
823 823 b'extraratio'
824 824 )
825 825 if withsparseread:
826 826 fm.plain(b' readsize largestblk rddensity srchunks')
827 827 fm.plain(b'\n')
828 828
829 829 chainbases = {}
830 830 for rev in r:
831 831 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
832 832 chainbase = chain[0]
833 833 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
834 834 basestart = start(chainbase)
835 835 revstart = start(rev)
836 836 lineardist = revstart + comp - basestart
837 837 extradist = lineardist - chainsize
838 838 try:
839 839 prevrev = chain[-2]
840 840 except IndexError:
841 841 prevrev = -1
842 842
843 843 if uncomp != 0:
844 844 chainratio = float(chainsize) / float(uncomp)
845 845 else:
846 846 chainratio = chainsize
847 847
848 848 if chainsize != 0:
849 849 extraratio = float(extradist) / float(chainsize)
850 850 else:
851 851 extraratio = extradist
852 852
853 853 fm.startitem()
854 854 fm.write(
855 855 b'rev chainid chainlen prevrev deltatype compsize '
856 856 b'uncompsize chainsize chainratio lindist extradist '
857 857 b'extraratio',
858 858 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
859 859 rev,
860 860 chainid,
861 861 len(chain),
862 862 prevrev,
863 863 deltatype,
864 864 comp,
865 865 uncomp,
866 866 chainsize,
867 867 chainratio,
868 868 lineardist,
869 869 extradist,
870 870 extraratio,
871 871 rev=rev,
872 872 chainid=chainid,
873 873 chainlen=len(chain),
874 874 prevrev=prevrev,
875 875 deltatype=deltatype,
876 876 compsize=comp,
877 877 uncompsize=uncomp,
878 878 chainsize=chainsize,
879 879 chainratio=chainratio,
880 880 lindist=lineardist,
881 881 extradist=extradist,
882 882 extraratio=extraratio,
883 883 )
884 884 if withsparseread:
885 885 readsize = 0
886 886 largestblock = 0
887 887 srchunks = 0
888 888
889 889 for revschunk in deltautil.slicechunk(r, chain):
890 890 srchunks += 1
891 891 blkend = start(revschunk[-1]) + length(revschunk[-1])
892 892 blksize = blkend - start(revschunk[0])
893 893
894 894 readsize += blksize
895 895 if largestblock < blksize:
896 896 largestblock = blksize
897 897
898 898 if readsize:
899 899 readdensity = float(chainsize) / float(readsize)
900 900 else:
901 901 readdensity = 1
902 902
903 903 fm.write(
904 904 b'readsize largestblock readdensity srchunks',
905 905 b' %10d %10d %9.5f %8d',
906 906 readsize,
907 907 largestblock,
908 908 readdensity,
909 909 srchunks,
910 910 readsize=readsize,
911 911 largestblock=largestblock,
912 912 readdensity=readdensity,
913 913 srchunks=srchunks,
914 914 )
915 915
916 916 fm.plain(b'\n')
917 917
918 918 fm.end()
919 919
920 920
921 921 @command(
922 922 b'debugdirstate|debugstate',
923 923 [
924 924 (
925 925 b'',
926 926 b'nodates',
927 927 None,
928 928 _(b'do not display the saved mtime (DEPRECATED)'),
929 929 ),
930 930 (b'', b'dates', True, _(b'display the saved mtime')),
931 931 (b'', b'datesort', None, _(b'sort by saved mtime')),
932 932 (
933 933 b'',
934 934 b'all',
935 935 False,
936 936 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
937 937 ),
938 938 ],
939 939 _(b'[OPTION]...'),
940 940 )
941 941 def debugstate(ui, repo, **opts):
942 942 """show the contents of the current dirstate"""
943 943
944 944 nodates = not opts['dates']
945 945 if opts.get('nodates') is not None:
946 946 nodates = True
947 947 datesort = opts.get('datesort')
948 948
949 949 if datesort:
950 950
951 951 def keyfunc(entry):
952 952 filename, _state, _mode, _size, mtime = entry
953 953 return (mtime, filename)
954 954
955 955 else:
956 956 keyfunc = None # sort by filename
957 957 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
958 958 entries.sort(key=keyfunc)
959 959 for entry in entries:
960 960 filename, state, mode, size, mtime = entry
961 961 if mtime == -1:
962 962 timestr = b'unset '
963 963 elif nodates:
964 964 timestr = b'set '
965 965 else:
966 966 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
967 967 timestr = encoding.strtolocal(timestr)
968 968 if mode & 0o20000:
969 969 mode = b'lnk'
970 970 else:
971 971 mode = b'%3o' % (mode & 0o777 & ~util.umask)
972 972 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
973 973 for f in repo.dirstate.copies():
974 974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975 975
976 976
977 977 @command(
978 978 b'debugdirstateignorepatternshash',
979 979 [],
980 980 _(b''),
981 981 )
982 982 def debugdirstateignorepatternshash(ui, repo, **opts):
983 983 """show the hash of ignore patterns stored in dirstate if v2,
984 984 or nothing for dirstate-v2
985 985 """
986 986 if repo.dirstate._use_dirstate_v2:
987 987 docket = repo.dirstate._map.docket
988 988 hash_len = 20 # 160 bits for SHA-1
989 989 hash_bytes = docket.tree_metadata[-hash_len:]
990 990 ui.write(binascii.hexlify(hash_bytes) + b'\n')
991 991
992 992
993 993 @command(
994 994 b'debugdiscovery',
995 995 [
996 996 (b'', b'old', None, _(b'use old-style discovery')),
997 997 (
998 998 b'',
999 999 b'nonheads',
1000 1000 None,
1001 1001 _(b'use old-style discovery with non-heads included'),
1002 1002 ),
1003 1003 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1004 1004 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1005 1005 (
1006 1006 b'',
1007 1007 b'local-as-revs',
1008 1008 b"",
1009 1009 b'treat local has having these revisions only',
1010 1010 ),
1011 1011 (
1012 1012 b'',
1013 1013 b'remote-as-revs',
1014 1014 b"",
1015 1015 b'use local as remote, with only these these revisions',
1016 1016 ),
1017 1017 ]
1018 1018 + cmdutil.remoteopts
1019 1019 + cmdutil.formatteropts,
1020 1020 _(b'[--rev REV] [OTHER]'),
1021 1021 )
1022 1022 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1023 1023 """runs the changeset discovery protocol in isolation
1024 1024
1025 1025 The local peer can be "replaced" by a subset of the local repository by
1026 1026 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1027 1027 be "replaced" by a subset of the local repository using the
1028 1028 `--local-as-revs` flag. This is useful to efficiently debug pathological
1029 1029 discovery situation.
1030 1030
1031 1031 The following developer oriented config are relevant for people playing with this command:
1032 1032
1033 1033 * devel.discovery.exchange-heads=True
1034 1034
1035 1035 If False, the discovery will not start with
1036 1036 remote head fetching and local head querying.
1037 1037
1038 1038 * devel.discovery.grow-sample=True
1039 1039
1040 1040 If False, the sample size used in set discovery will not be increased
1041 1041 through the process
1042 1042
1043 1043 * devel.discovery.grow-sample.dynamic=True
1044 1044
1045 1045 When discovery.grow-sample.dynamic is True, the default, the sample size is
1046 1046 adapted to the shape of the undecided set (it is set to the max of:
1047 1047 <target-size>, len(roots(undecided)), len(heads(undecided)
1048 1048
1049 1049 * devel.discovery.grow-sample.rate=1.05
1050 1050
1051 1051 the rate at which the sample grow
1052 1052
1053 1053 * devel.discovery.randomize=True
1054 1054
1055 1055 If andom sampling during discovery are deterministic. It is meant for
1056 1056 integration tests.
1057 1057
1058 1058 * devel.discovery.sample-size=200
1059 1059
1060 1060 Control the initial size of the discovery sample
1061 1061
1062 1062 * devel.discovery.sample-size.initial=100
1063 1063
1064 1064 Control the initial size of the discovery for initial change
1065 1065 """
1066 1066 opts = pycompat.byteskwargs(opts)
1067 1067 unfi = repo.unfiltered()
1068 1068
1069 1069 # setup potential extra filtering
1070 1070 local_revs = opts[b"local_as_revs"]
1071 1071 remote_revs = opts[b"remote_as_revs"]
1072 1072
1073 1073 # make sure tests are repeatable
1074 1074 random.seed(int(opts[b'seed']))
1075 1075
1076 1076 if not remote_revs:
1077 1077
1078 1078 remoteurl, branches = urlutil.get_unique_pull_path(
1079 1079 b'debugdiscovery', repo, ui, remoteurl
1080 1080 )
1081 1081 remote = hg.peer(repo, opts, remoteurl)
1082 1082 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1083 1083 else:
1084 1084 branches = (None, [])
1085 1085 remote_filtered_revs = scmutil.revrange(
1086 1086 unfi, [b"not (::(%s))" % remote_revs]
1087 1087 )
1088 1088 remote_filtered_revs = frozenset(remote_filtered_revs)
1089 1089
1090 1090 def remote_func(x):
1091 1091 return remote_filtered_revs
1092 1092
1093 1093 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1094 1094
1095 1095 remote = repo.peer()
1096 1096 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1097 1097
1098 1098 if local_revs:
1099 1099 local_filtered_revs = scmutil.revrange(
1100 1100 unfi, [b"not (::(%s))" % local_revs]
1101 1101 )
1102 1102 local_filtered_revs = frozenset(local_filtered_revs)
1103 1103
1104 1104 def local_func(x):
1105 1105 return local_filtered_revs
1106 1106
1107 1107 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1108 1108 repo = repo.filtered(b'debug-discovery-local-filter')
1109 1109
1110 1110 data = {}
1111 1111 if opts.get(b'old'):
1112 1112
1113 1113 def doit(pushedrevs, remoteheads, remote=remote):
1114 1114 if not util.safehasattr(remote, b'branches'):
1115 1115 # enable in-client legacy support
1116 1116 remote = localrepo.locallegacypeer(remote.local())
1117 1117 common, _in, hds = treediscovery.findcommonincoming(
1118 1118 repo, remote, force=True, audit=data
1119 1119 )
1120 1120 common = set(common)
1121 1121 if not opts.get(b'nonheads'):
1122 1122 ui.writenoi18n(
1123 1123 b"unpruned common: %s\n"
1124 1124 % b" ".join(sorted(short(n) for n in common))
1125 1125 )
1126 1126
1127 1127 clnode = repo.changelog.node
1128 1128 common = repo.revs(b'heads(::%ln)', common)
1129 1129 common = {clnode(r) for r in common}
1130 1130 return common, hds
1131 1131
1132 1132 else:
1133 1133
1134 1134 def doit(pushedrevs, remoteheads, remote=remote):
1135 1135 nodes = None
1136 1136 if pushedrevs:
1137 1137 revs = scmutil.revrange(repo, pushedrevs)
1138 1138 nodes = [repo[r].node() for r in revs]
1139 1139 common, any, hds = setdiscovery.findcommonheads(
1140 1140 ui, repo, remote, ancestorsof=nodes, audit=data
1141 1141 )
1142 1142 return common, hds
1143 1143
1144 1144 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1145 1145 localrevs = opts[b'rev']
1146 1146
1147 1147 fm = ui.formatter(b'debugdiscovery', opts)
1148 1148 if fm.strict_format:
1149 1149
1150 1150 @contextlib.contextmanager
1151 1151 def may_capture_output():
1152 1152 ui.pushbuffer()
1153 1153 yield
1154 1154 data[b'output'] = ui.popbuffer()
1155 1155
1156 1156 else:
1157 1157 may_capture_output = util.nullcontextmanager
1158 1158 with may_capture_output():
1159 1159 with util.timedcm('debug-discovery') as t:
1160 1160 common, hds = doit(localrevs, remoterevs)
1161 1161
1162 1162 # compute all statistics
1163 1163 heads_common = set(common)
1164 1164 heads_remote = set(hds)
1165 1165 heads_local = set(repo.heads())
1166 1166 # note: they cannot be a local or remote head that is in common and not
1167 1167 # itself a head of common.
1168 1168 heads_common_local = heads_common & heads_local
1169 1169 heads_common_remote = heads_common & heads_remote
1170 1170 heads_common_both = heads_common & heads_remote & heads_local
1171 1171
1172 1172 all = repo.revs(b'all()')
1173 1173 common = repo.revs(b'::%ln', common)
1174 1174 roots_common = repo.revs(b'roots(::%ld)', common)
1175 1175 missing = repo.revs(b'not ::%ld', common)
1176 1176 heads_missing = repo.revs(b'heads(%ld)', missing)
1177 1177 roots_missing = repo.revs(b'roots(%ld)', missing)
1178 1178 assert len(common) + len(missing) == len(all)
1179 1179
1180 1180 initial_undecided = repo.revs(
1181 1181 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1182 1182 )
1183 1183 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1184 1184 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1185 1185 common_initial_undecided = initial_undecided & common
1186 1186 missing_initial_undecided = initial_undecided & missing
1187 1187
1188 1188 data[b'elapsed'] = t.elapsed
1189 1189 data[b'nb-common-heads'] = len(heads_common)
1190 1190 data[b'nb-common-heads-local'] = len(heads_common_local)
1191 1191 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1192 1192 data[b'nb-common-heads-both'] = len(heads_common_both)
1193 1193 data[b'nb-common-roots'] = len(roots_common)
1194 1194 data[b'nb-head-local'] = len(heads_local)
1195 1195 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1196 1196 data[b'nb-head-remote'] = len(heads_remote)
1197 1197 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1198 1198 heads_common_remote
1199 1199 )
1200 1200 data[b'nb-revs'] = len(all)
1201 1201 data[b'nb-revs-common'] = len(common)
1202 1202 data[b'nb-revs-missing'] = len(missing)
1203 1203 data[b'nb-missing-heads'] = len(heads_missing)
1204 1204 data[b'nb-missing-roots'] = len(roots_missing)
1205 1205 data[b'nb-ini_und'] = len(initial_undecided)
1206 1206 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1207 1207 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1208 1208 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1209 1209 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1210 1210
1211 1211 fm.startitem()
1212 1212 fm.data(**pycompat.strkwargs(data))
1213 1213 # display discovery summary
1214 1214 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1215 1215 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1216 1216 fm.plain(b"heads summary:\n")
1217 1217 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1218 1218 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1219 1219 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1220 1220 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1221 1221 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1222 1222 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1223 1223 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1224 1224 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1225 1225 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1226 1226 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1227 1227 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1228 1228 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1229 1229 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1230 1230 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1231 1231 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1232 1232 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1233 1233 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1234 1234 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1235 1235 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1236 1236 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1237 1237 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1238 1238 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1239 1239
1240 1240 if ui.verbose:
1241 1241 fm.plain(
1242 1242 b"common heads: %s\n"
1243 1243 % b" ".join(sorted(short(n) for n in heads_common))
1244 1244 )
1245 1245 fm.end()
1246 1246
1247 1247
1248 1248 _chunksize = 4 << 10
1249 1249
1250 1250
1251 1251 @command(
1252 1252 b'debugdownload',
1253 1253 [
1254 1254 (b'o', b'output', b'', _(b'path')),
1255 1255 ],
1256 1256 optionalrepo=True,
1257 1257 )
1258 1258 def debugdownload(ui, repo, url, output=None, **opts):
1259 1259 """download a resource using Mercurial logic and config"""
1260 1260 fh = urlmod.open(ui, url, output)
1261 1261
1262 1262 dest = ui
1263 1263 if output:
1264 1264 dest = open(output, b"wb", _chunksize)
1265 1265 try:
1266 1266 data = fh.read(_chunksize)
1267 1267 while data:
1268 1268 dest.write(data)
1269 1269 data = fh.read(_chunksize)
1270 1270 finally:
1271 1271 if output:
1272 1272 dest.close()
1273 1273
1274 1274
1275 1275 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1276 1276 def debugextensions(ui, repo, **opts):
1277 1277 '''show information about active extensions'''
1278 1278 opts = pycompat.byteskwargs(opts)
1279 1279 exts = extensions.extensions(ui)
1280 1280 hgver = util.version()
1281 1281 fm = ui.formatter(b'debugextensions', opts)
1282 1282 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1283 1283 isinternal = extensions.ismoduleinternal(extmod)
1284 1284 extsource = None
1285 1285
1286 1286 if util.safehasattr(extmod, '__file__'):
1287 1287 extsource = pycompat.fsencode(extmod.__file__)
1288 1288 elif getattr(sys, 'oxidized', False):
1289 1289 extsource = pycompat.sysexecutable
1290 1290 if isinternal:
1291 1291 exttestedwith = [] # never expose magic string to users
1292 1292 else:
1293 1293 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1294 1294 extbuglink = getattr(extmod, 'buglink', None)
1295 1295
1296 1296 fm.startitem()
1297 1297
1298 1298 if ui.quiet or ui.verbose:
1299 1299 fm.write(b'name', b'%s\n', extname)
1300 1300 else:
1301 1301 fm.write(b'name', b'%s', extname)
1302 1302 if isinternal or hgver in exttestedwith:
1303 1303 fm.plain(b'\n')
1304 1304 elif not exttestedwith:
1305 1305 fm.plain(_(b' (untested!)\n'))
1306 1306 else:
1307 1307 lasttestedversion = exttestedwith[-1]
1308 1308 fm.plain(b' (%s!)\n' % lasttestedversion)
1309 1309
1310 1310 fm.condwrite(
1311 1311 ui.verbose and extsource,
1312 1312 b'source',
1313 1313 _(b' location: %s\n'),
1314 1314 extsource or b"",
1315 1315 )
1316 1316
1317 1317 if ui.verbose:
1318 1318 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1319 1319 fm.data(bundled=isinternal)
1320 1320
1321 1321 fm.condwrite(
1322 1322 ui.verbose and exttestedwith,
1323 1323 b'testedwith',
1324 1324 _(b' tested with: %s\n'),
1325 1325 fm.formatlist(exttestedwith, name=b'ver'),
1326 1326 )
1327 1327
1328 1328 fm.condwrite(
1329 1329 ui.verbose and extbuglink,
1330 1330 b'buglink',
1331 1331 _(b' bug reporting: %s\n'),
1332 1332 extbuglink or b"",
1333 1333 )
1334 1334
1335 1335 fm.end()
1336 1336
1337 1337
1338 1338 @command(
1339 1339 b'debugfileset',
1340 1340 [
1341 1341 (
1342 1342 b'r',
1343 1343 b'rev',
1344 1344 b'',
1345 1345 _(b'apply the filespec on this revision'),
1346 1346 _(b'REV'),
1347 1347 ),
1348 1348 (
1349 1349 b'',
1350 1350 b'all-files',
1351 1351 False,
1352 1352 _(b'test files from all revisions and working directory'),
1353 1353 ),
1354 1354 (
1355 1355 b's',
1356 1356 b'show-matcher',
1357 1357 None,
1358 1358 _(b'print internal representation of matcher'),
1359 1359 ),
1360 1360 (
1361 1361 b'p',
1362 1362 b'show-stage',
1363 1363 [],
1364 1364 _(b'print parsed tree at the given stage'),
1365 1365 _(b'NAME'),
1366 1366 ),
1367 1367 ],
1368 1368 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1369 1369 )
1370 1370 def debugfileset(ui, repo, expr, **opts):
1371 1371 '''parse and apply a fileset specification'''
1372 1372 from . import fileset
1373 1373
1374 1374 fileset.symbols # force import of fileset so we have predicates to optimize
1375 1375 opts = pycompat.byteskwargs(opts)
1376 1376 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1377 1377
1378 1378 stages = [
1379 1379 (b'parsed', pycompat.identity),
1380 1380 (b'analyzed', filesetlang.analyze),
1381 1381 (b'optimized', filesetlang.optimize),
1382 1382 ]
1383 1383 stagenames = {n for n, f in stages}
1384 1384
1385 1385 showalways = set()
1386 1386 if ui.verbose and not opts[b'show_stage']:
1387 1387 # show parsed tree by --verbose (deprecated)
1388 1388 showalways.add(b'parsed')
1389 1389 if opts[b'show_stage'] == [b'all']:
1390 1390 showalways.update(stagenames)
1391 1391 else:
1392 1392 for n in opts[b'show_stage']:
1393 1393 if n not in stagenames:
1394 1394 raise error.Abort(_(b'invalid stage name: %s') % n)
1395 1395 showalways.update(opts[b'show_stage'])
1396 1396
1397 1397 tree = filesetlang.parse(expr)
1398 1398 for n, f in stages:
1399 1399 tree = f(tree)
1400 1400 if n in showalways:
1401 1401 if opts[b'show_stage'] or n != b'parsed':
1402 1402 ui.write(b"* %s:\n" % n)
1403 1403 ui.write(filesetlang.prettyformat(tree), b"\n")
1404 1404
1405 1405 files = set()
1406 1406 if opts[b'all_files']:
1407 1407 for r in repo:
1408 1408 c = repo[r]
1409 1409 files.update(c.files())
1410 1410 files.update(c.substate)
1411 1411 if opts[b'all_files'] or ctx.rev() is None:
1412 1412 wctx = repo[None]
1413 1413 files.update(
1414 1414 repo.dirstate.walk(
1415 1415 scmutil.matchall(repo),
1416 1416 subrepos=list(wctx.substate),
1417 1417 unknown=True,
1418 1418 ignored=True,
1419 1419 )
1420 1420 )
1421 1421 files.update(wctx.substate)
1422 1422 else:
1423 1423 files.update(ctx.files())
1424 1424 files.update(ctx.substate)
1425 1425
1426 1426 m = ctx.matchfileset(repo.getcwd(), expr)
1427 1427 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1428 1428 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1429 1429 for f in sorted(files):
1430 1430 if not m(f):
1431 1431 continue
1432 1432 ui.write(b"%s\n" % f)
1433 1433
1434 1434
1435 1435 @command(
1436 1436 b"debug-repair-issue6528",
1437 1437 [
1438 1438 (
1439 1439 b'',
1440 1440 b'to-report',
1441 1441 b'',
1442 1442 _(b'build a report of affected revisions to this file'),
1443 1443 _(b'FILE'),
1444 1444 ),
1445 1445 (
1446 1446 b'',
1447 1447 b'from-report',
1448 1448 b'',
1449 1449 _(b'repair revisions listed in this report file'),
1450 1450 _(b'FILE'),
1451 1451 ),
1452 1452 (
1453 1453 b'',
1454 1454 b'paranoid',
1455 1455 False,
1456 1456 _(b'check that both detection methods do the same thing'),
1457 1457 ),
1458 1458 ]
1459 1459 + cmdutil.dryrunopts,
1460 1460 )
1461 1461 def debug_repair_issue6528(ui, repo, **opts):
1462 1462 """find affected revisions and repair them. See issue6528 for more details.
1463 1463
1464 1464 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1465 1465 computation of affected revisions for a given repository across clones.
1466 1466 The report format is line-based (with empty lines ignored):
1467 1467
1468 1468 ```
1469 1469 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1470 1470 ```
1471 1471
1472 1472 There can be multiple broken revisions per filelog, they are separated by
1473 1473 a comma with no spaces. The only space is between the revision(s) and the
1474 1474 filename.
1475 1475
1476 1476 Note that this does *not* mean that this repairs future affected revisions,
1477 1477 that needs a separate fix at the exchange level that hasn't been written yet
1478 1478 (as of 5.9rc0).
1479 1479
1480 1480 There is a `--paranoid` flag to test that the fast implementation is correct
1481 1481 by checking it against the slow implementation. Since this matter is quite
1482 1482 urgent and testing every edge-case is probably quite costly, we use this
1483 1483 method to test on large repositories as a fuzzing method of sorts.
1484 1484 """
1485 1485 cmdutil.check_incompatible_arguments(
1486 1486 opts, 'to_report', ['from_report', 'dry_run']
1487 1487 )
1488 1488 dry_run = opts.get('dry_run')
1489 1489 to_report = opts.get('to_report')
1490 1490 from_report = opts.get('from_report')
1491 1491 paranoid = opts.get('paranoid')
1492 1492 # TODO maybe add filelog pattern and revision pattern parameters to help
1493 1493 # narrow down the search for users that know what they're looking for?
1494 1494
1495 1495 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1496 1496 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1497 1497 raise error.Abort(_(msg))
1498 1498
1499 1499 rewrite.repair_issue6528(
1500 1500 ui,
1501 1501 repo,
1502 1502 dry_run=dry_run,
1503 1503 to_report=to_report,
1504 1504 from_report=from_report,
1505 1505 paranoid=paranoid,
1506 1506 )
1507 1507
1508 1508
1509 1509 @command(b'debugformat', [] + cmdutil.formatteropts)
1510 1510 def debugformat(ui, repo, **opts):
1511 1511 """display format information about the current repository
1512 1512
1513 1513 Use --verbose to get extra information about current config value and
1514 1514 Mercurial default."""
1515 1515 opts = pycompat.byteskwargs(opts)
1516 1516 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1517 1517 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1518 1518
1519 1519 def makeformatname(name):
1520 1520 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1521 1521
1522 1522 fm = ui.formatter(b'debugformat', opts)
1523 1523 if fm.isplain():
1524 1524
1525 1525 def formatvalue(value):
1526 1526 if util.safehasattr(value, b'startswith'):
1527 1527 return value
1528 1528 if value:
1529 1529 return b'yes'
1530 1530 else:
1531 1531 return b'no'
1532 1532
1533 1533 else:
1534 1534 formatvalue = pycompat.identity
1535 1535
1536 1536 fm.plain(b'format-variant')
1537 1537 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1538 1538 fm.plain(b' repo')
1539 1539 if ui.verbose:
1540 1540 fm.plain(b' config default')
1541 1541 fm.plain(b'\n')
1542 1542 for fv in upgrade.allformatvariant:
1543 1543 fm.startitem()
1544 1544 repovalue = fv.fromrepo(repo)
1545 1545 configvalue = fv.fromconfig(repo)
1546 1546
1547 1547 if repovalue != configvalue:
1548 1548 namelabel = b'formatvariant.name.mismatchconfig'
1549 1549 repolabel = b'formatvariant.repo.mismatchconfig'
1550 1550 elif repovalue != fv.default:
1551 1551 namelabel = b'formatvariant.name.mismatchdefault'
1552 1552 repolabel = b'formatvariant.repo.mismatchdefault'
1553 1553 else:
1554 1554 namelabel = b'formatvariant.name.uptodate'
1555 1555 repolabel = b'formatvariant.repo.uptodate'
1556 1556
1557 1557 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1558 1558 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1559 1559 if fv.default != configvalue:
1560 1560 configlabel = b'formatvariant.config.special'
1561 1561 else:
1562 1562 configlabel = b'formatvariant.config.default'
1563 1563 fm.condwrite(
1564 1564 ui.verbose,
1565 1565 b'config',
1566 1566 b' %6s',
1567 1567 formatvalue(configvalue),
1568 1568 label=configlabel,
1569 1569 )
1570 1570 fm.condwrite(
1571 1571 ui.verbose,
1572 1572 b'default',
1573 1573 b' %7s',
1574 1574 formatvalue(fv.default),
1575 1575 label=b'formatvariant.default',
1576 1576 )
1577 1577 fm.plain(b'\n')
1578 1578 fm.end()
1579 1579
1580 1580
1581 1581 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1582 1582 def debugfsinfo(ui, path=b"."):
1583 1583 """show information detected about current filesystem"""
1584 1584 ui.writenoi18n(b'path: %s\n' % path)
1585 1585 ui.writenoi18n(
1586 1586 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1587 1587 )
1588 1588 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1589 1589 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1590 1590 ui.writenoi18n(
1591 1591 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1592 1592 )
1593 1593 ui.writenoi18n(
1594 1594 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1595 1595 )
1596 1596 casesensitive = b'(unknown)'
1597 1597 try:
1598 1598 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1599 1599 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1600 1600 except OSError:
1601 1601 pass
1602 1602 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1603 1603
1604 1604
1605 1605 @command(
1606 1606 b'debuggetbundle',
1607 1607 [
1608 1608 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1609 1609 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1610 1610 (
1611 1611 b't',
1612 1612 b'type',
1613 1613 b'bzip2',
1614 1614 _(b'bundle compression type to use'),
1615 1615 _(b'TYPE'),
1616 1616 ),
1617 1617 ],
1618 1618 _(b'REPO FILE [-H|-C ID]...'),
1619 1619 norepo=True,
1620 1620 )
1621 1621 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1622 1622 """retrieves a bundle from a repo
1623 1623
1624 1624 Every ID must be a full-length hex node id string. Saves the bundle to the
1625 1625 given file.
1626 1626 """
1627 1627 opts = pycompat.byteskwargs(opts)
1628 1628 repo = hg.peer(ui, opts, repopath)
1629 1629 if not repo.capable(b'getbundle'):
1630 1630 raise error.Abort(b"getbundle() not supported by target repository")
1631 1631 args = {}
1632 1632 if common:
1633 1633 args['common'] = [bin(s) for s in common]
1634 1634 if head:
1635 1635 args['heads'] = [bin(s) for s in head]
1636 1636 # TODO: get desired bundlecaps from command line.
1637 1637 args['bundlecaps'] = None
1638 1638 bundle = repo.getbundle(b'debug', **args)
1639 1639
1640 1640 bundletype = opts.get(b'type', b'bzip2').lower()
1641 1641 btypes = {
1642 1642 b'none': b'HG10UN',
1643 1643 b'bzip2': b'HG10BZ',
1644 1644 b'gzip': b'HG10GZ',
1645 1645 b'bundle2': b'HG20',
1646 1646 }
1647 1647 bundletype = btypes.get(bundletype)
1648 1648 if bundletype not in bundle2.bundletypes:
1649 1649 raise error.Abort(_(b'unknown bundle type specified with --type'))
1650 1650 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1651 1651
1652 1652
1653 1653 @command(b'debugignore', [], b'[FILE]')
1654 1654 def debugignore(ui, repo, *files, **opts):
1655 1655 """display the combined ignore pattern and information about ignored files
1656 1656
1657 1657 With no argument display the combined ignore pattern.
1658 1658
1659 1659 Given space separated file names, shows if the given file is ignored and
1660 1660 if so, show the ignore rule (file and line number) that matched it.
1661 1661 """
1662 1662 ignore = repo.dirstate._ignore
1663 1663 if not files:
1664 1664 # Show all the patterns
1665 1665 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1666 1666 else:
1667 1667 m = scmutil.match(repo[None], pats=files)
1668 1668 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1669 1669 for f in m.files():
1670 1670 nf = util.normpath(f)
1671 1671 ignored = None
1672 1672 ignoredata = None
1673 1673 if nf != b'.':
1674 1674 if ignore(nf):
1675 1675 ignored = nf
1676 1676 ignoredata = repo.dirstate._ignorefileandline(nf)
1677 1677 else:
1678 1678 for p in pathutil.finddirs(nf):
1679 1679 if ignore(p):
1680 1680 ignored = p
1681 1681 ignoredata = repo.dirstate._ignorefileandline(p)
1682 1682 break
1683 1683 if ignored:
1684 1684 if ignored == nf:
1685 1685 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1686 1686 else:
1687 1687 ui.write(
1688 1688 _(
1689 1689 b"%s is ignored because of "
1690 1690 b"containing directory %s\n"
1691 1691 )
1692 1692 % (uipathfn(f), ignored)
1693 1693 )
1694 1694 ignorefile, lineno, line = ignoredata
1695 1695 ui.write(
1696 1696 _(b"(ignore rule in %s, line %d: '%s')\n")
1697 1697 % (ignorefile, lineno, line)
1698 1698 )
1699 1699 else:
1700 1700 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1701 1701
1702 1702
1703 1703 @command(
1704 1704 b'debugindex',
1705 1705 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1706 1706 _(b'-c|-m|FILE'),
1707 1707 )
1708 1708 def debugindex(ui, repo, file_=None, **opts):
1709 1709 """dump index data for a storage primitive"""
1710 1710 opts = pycompat.byteskwargs(opts)
1711 1711 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1712 1712
1713 1713 if ui.debugflag:
1714 1714 shortfn = hex
1715 1715 else:
1716 1716 shortfn = short
1717 1717
1718 1718 idlen = 12
1719 1719 for i in store:
1720 1720 idlen = len(shortfn(store.node(i)))
1721 1721 break
1722 1722
1723 1723 fm = ui.formatter(b'debugindex', opts)
1724 1724 fm.plain(
1725 1725 b' rev linkrev %s %s p2\n'
1726 1726 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1727 1727 )
1728 1728
1729 1729 for rev in store:
1730 1730 node = store.node(rev)
1731 1731 parents = store.parents(node)
1732 1732
1733 1733 fm.startitem()
1734 1734 fm.write(b'rev', b'%6d ', rev)
1735 1735 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1736 1736 fm.write(b'node', b'%s ', shortfn(node))
1737 1737 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1738 1738 fm.write(b'p2', b'%s', shortfn(parents[1]))
1739 1739 fm.plain(b'\n')
1740 1740
1741 1741 fm.end()
1742 1742
1743 1743
1744 1744 @command(
1745 1745 b'debugindexdot',
1746 1746 cmdutil.debugrevlogopts,
1747 1747 _(b'-c|-m|FILE'),
1748 1748 optionalrepo=True,
1749 1749 )
1750 1750 def debugindexdot(ui, repo, file_=None, **opts):
1751 1751 """dump an index DAG as a graphviz dot file"""
1752 1752 opts = pycompat.byteskwargs(opts)
1753 1753 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1754 1754 ui.writenoi18n(b"digraph G {\n")
1755 1755 for i in r:
1756 1756 node = r.node(i)
1757 1757 pp = r.parents(node)
1758 1758 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1759 1759 if pp[1] != repo.nullid:
1760 1760 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1761 1761 ui.write(b"}\n")
1762 1762
1763 1763
1764 1764 @command(b'debugindexstats', [])
1765 1765 def debugindexstats(ui, repo):
1766 1766 """show stats related to the changelog index"""
1767 1767 repo.changelog.shortest(repo.nullid, 1)
1768 1768 index = repo.changelog.index
1769 1769 if not util.safehasattr(index, b'stats'):
1770 1770 raise error.Abort(_(b'debugindexstats only works with native code'))
1771 1771 for k, v in sorted(index.stats().items()):
1772 1772 ui.write(b'%s: %d\n' % (k, v))
1773 1773
1774 1774
1775 1775 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1776 1776 def debuginstall(ui, **opts):
1777 1777 """test Mercurial installation
1778 1778
1779 1779 Returns 0 on success.
1780 1780 """
1781 1781 opts = pycompat.byteskwargs(opts)
1782 1782
1783 1783 problems = 0
1784 1784
1785 1785 fm = ui.formatter(b'debuginstall', opts)
1786 1786 fm.startitem()
1787 1787
1788 1788 # encoding might be unknown or wrong. don't translate these messages.
1789 1789 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1790 1790 err = None
1791 1791 try:
1792 1792 codecs.lookup(pycompat.sysstr(encoding.encoding))
1793 1793 except LookupError as inst:
1794 1794 err = stringutil.forcebytestr(inst)
1795 1795 problems += 1
1796 1796 fm.condwrite(
1797 1797 err,
1798 1798 b'encodingerror',
1799 1799 b" %s\n (check that your locale is properly set)\n",
1800 1800 err,
1801 1801 )
1802 1802
1803 1803 # Python
1804 1804 pythonlib = None
1805 1805 if util.safehasattr(os, '__file__'):
1806 1806 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1807 1807 elif getattr(sys, 'oxidized', False):
1808 1808 pythonlib = pycompat.sysexecutable
1809 1809
1810 1810 fm.write(
1811 1811 b'pythonexe',
1812 1812 _(b"checking Python executable (%s)\n"),
1813 1813 pycompat.sysexecutable or _(b"unknown"),
1814 1814 )
1815 1815 fm.write(
1816 1816 b'pythonimplementation',
1817 1817 _(b"checking Python implementation (%s)\n"),
1818 1818 pycompat.sysbytes(platform.python_implementation()),
1819 1819 )
1820 1820 fm.write(
1821 1821 b'pythonver',
1822 1822 _(b"checking Python version (%s)\n"),
1823 1823 (b"%d.%d.%d" % sys.version_info[:3]),
1824 1824 )
1825 1825 fm.write(
1826 1826 b'pythonlib',
1827 1827 _(b"checking Python lib (%s)...\n"),
1828 1828 pythonlib or _(b"unknown"),
1829 1829 )
1830 1830
1831 1831 try:
1832 1832 from . import rustext # pytype: disable=import-error
1833 1833
1834 1834 rustext.__doc__ # trigger lazy import
1835 1835 except ImportError:
1836 1836 rustext = None
1837 1837
1838 1838 security = set(sslutil.supportedprotocols)
1839 1839 if sslutil.hassni:
1840 1840 security.add(b'sni')
1841 1841
1842 1842 fm.write(
1843 1843 b'pythonsecurity',
1844 1844 _(b"checking Python security support (%s)\n"),
1845 1845 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1846 1846 )
1847 1847
1848 1848 # These are warnings, not errors. So don't increment problem count. This
1849 1849 # may change in the future.
1850 1850 if b'tls1.2' not in security:
1851 1851 fm.plain(
1852 1852 _(
1853 1853 b' TLS 1.2 not supported by Python install; '
1854 1854 b'network connections lack modern security\n'
1855 1855 )
1856 1856 )
1857 1857 if b'sni' not in security:
1858 1858 fm.plain(
1859 1859 _(
1860 1860 b' SNI not supported by Python install; may have '
1861 1861 b'connectivity issues with some servers\n'
1862 1862 )
1863 1863 )
1864 1864
1865 1865 fm.plain(
1866 1866 _(
1867 1867 b"checking Rust extensions (%s)\n"
1868 1868 % (b'missing' if rustext is None else b'installed')
1869 1869 ),
1870 1870 )
1871 1871
1872 1872 # TODO print CA cert info
1873 1873
1874 1874 # hg version
1875 1875 hgver = util.version()
1876 1876 fm.write(
1877 1877 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1878 1878 )
1879 1879 fm.write(
1880 1880 b'hgverextra',
1881 1881 _(b"checking Mercurial custom build (%s)\n"),
1882 1882 b'+'.join(hgver.split(b'+')[1:]),
1883 1883 )
1884 1884
1885 1885 # compiled modules
1886 1886 hgmodules = None
1887 1887 if util.safehasattr(sys.modules[__name__], '__file__'):
1888 1888 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1889 1889 elif getattr(sys, 'oxidized', False):
1890 1890 hgmodules = pycompat.sysexecutable
1891 1891
1892 1892 fm.write(
1893 1893 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1894 1894 )
1895 1895 fm.write(
1896 1896 b'hgmodules',
1897 1897 _(b"checking installed modules (%s)...\n"),
1898 1898 hgmodules or _(b"unknown"),
1899 1899 )
1900 1900
1901 1901 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1902 1902 rustext = rustandc # for now, that's the only case
1903 1903 cext = policy.policy in (b'c', b'allow') or rustandc
1904 1904 nopure = cext or rustext
1905 1905 if nopure:
1906 1906 err = None
1907 1907 try:
1908 1908 if cext:
1909 1909 from .cext import ( # pytype: disable=import-error
1910 1910 base85,
1911 1911 bdiff,
1912 1912 mpatch,
1913 1913 osutil,
1914 1914 )
1915 1915
1916 1916 # quiet pyflakes
1917 1917 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1918 1918 if rustext:
1919 1919 from .rustext import ( # pytype: disable=import-error
1920 1920 ancestor,
1921 1921 dirstate,
1922 1922 )
1923 1923
1924 1924 dir(ancestor), dir(dirstate) # quiet pyflakes
1925 1925 except Exception as inst:
1926 1926 err = stringutil.forcebytestr(inst)
1927 1927 problems += 1
1928 1928 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1929 1929
1930 1930 compengines = util.compengines._engines.values()
1931 1931 fm.write(
1932 1932 b'compengines',
1933 1933 _(b'checking registered compression engines (%s)\n'),
1934 1934 fm.formatlist(
1935 1935 sorted(e.name() for e in compengines),
1936 1936 name=b'compengine',
1937 1937 fmt=b'%s',
1938 1938 sep=b', ',
1939 1939 ),
1940 1940 )
1941 1941 fm.write(
1942 1942 b'compenginesavail',
1943 1943 _(b'checking available compression engines (%s)\n'),
1944 1944 fm.formatlist(
1945 1945 sorted(e.name() for e in compengines if e.available()),
1946 1946 name=b'compengine',
1947 1947 fmt=b'%s',
1948 1948 sep=b', ',
1949 1949 ),
1950 1950 )
1951 1951 wirecompengines = compression.compengines.supportedwireengines(
1952 1952 compression.SERVERROLE
1953 1953 )
1954 1954 fm.write(
1955 1955 b'compenginesserver',
1956 1956 _(
1957 1957 b'checking available compression engines '
1958 1958 b'for wire protocol (%s)\n'
1959 1959 ),
1960 1960 fm.formatlist(
1961 1961 [e.name() for e in wirecompengines if e.wireprotosupport()],
1962 1962 name=b'compengine',
1963 1963 fmt=b'%s',
1964 1964 sep=b', ',
1965 1965 ),
1966 1966 )
1967 1967 re2 = b'missing'
1968 1968 if util._re2:
1969 1969 re2 = b'available'
1970 1970 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1971 1971 fm.data(re2=bool(util._re2))
1972 1972
1973 1973 # templates
1974 1974 p = templater.templatedir()
1975 1975 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1976 1976 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1977 1977 if p:
1978 1978 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1979 1979 if m:
1980 1980 # template found, check if it is working
1981 1981 err = None
1982 1982 try:
1983 1983 templater.templater.frommapfile(m)
1984 1984 except Exception as inst:
1985 1985 err = stringutil.forcebytestr(inst)
1986 1986 p = None
1987 1987 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1988 1988 else:
1989 1989 p = None
1990 1990 fm.condwrite(
1991 1991 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1992 1992 )
1993 1993 fm.condwrite(
1994 1994 not m,
1995 1995 b'defaulttemplatenotfound',
1996 1996 _(b" template '%s' not found\n"),
1997 1997 b"default",
1998 1998 )
1999 1999 if not p:
2000 2000 problems += 1
2001 2001 fm.condwrite(
2002 2002 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2003 2003 )
2004 2004
2005 2005 # editor
2006 2006 editor = ui.geteditor()
2007 2007 editor = util.expandpath(editor)
2008 2008 editorbin = procutil.shellsplit(editor)[0]
2009 2009 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2010 2010 cmdpath = procutil.findexe(editorbin)
2011 2011 fm.condwrite(
2012 2012 not cmdpath and editor == b'vi',
2013 2013 b'vinotfound',
2014 2014 _(
2015 2015 b" No commit editor set and can't find %s in PATH\n"
2016 2016 b" (specify a commit editor in your configuration"
2017 2017 b" file)\n"
2018 2018 ),
2019 2019 not cmdpath and editor == b'vi' and editorbin,
2020 2020 )
2021 2021 fm.condwrite(
2022 2022 not cmdpath and editor != b'vi',
2023 2023 b'editornotfound',
2024 2024 _(
2025 2025 b" Can't find editor '%s' in PATH\n"
2026 2026 b" (specify a commit editor in your configuration"
2027 2027 b" file)\n"
2028 2028 ),
2029 2029 not cmdpath and editorbin,
2030 2030 )
2031 2031 if not cmdpath and editor != b'vi':
2032 2032 problems += 1
2033 2033
2034 2034 # check username
2035 2035 username = None
2036 2036 err = None
2037 2037 try:
2038 2038 username = ui.username()
2039 2039 except error.Abort as e:
2040 2040 err = e.message
2041 2041 problems += 1
2042 2042
2043 2043 fm.condwrite(
2044 2044 username, b'username', _(b"checking username (%s)\n"), username
2045 2045 )
2046 2046 fm.condwrite(
2047 2047 err,
2048 2048 b'usernameerror',
2049 2049 _(
2050 2050 b"checking username...\n %s\n"
2051 2051 b" (specify a username in your configuration file)\n"
2052 2052 ),
2053 2053 err,
2054 2054 )
2055 2055
2056 2056 for name, mod in extensions.extensions():
2057 2057 handler = getattr(mod, 'debuginstall', None)
2058 2058 if handler is not None:
2059 2059 problems += handler(ui, fm)
2060 2060
2061 2061 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2062 2062 if not problems:
2063 2063 fm.data(problems=problems)
2064 2064 fm.condwrite(
2065 2065 problems,
2066 2066 b'problems',
2067 2067 _(b"%d problems detected, please check your install!\n"),
2068 2068 problems,
2069 2069 )
2070 2070 fm.end()
2071 2071
2072 2072 return problems
2073 2073
2074 2074
2075 2075 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2076 2076 def debugknown(ui, repopath, *ids, **opts):
2077 2077 """test whether node ids are known to a repo
2078 2078
2079 2079 Every ID must be a full-length hex node id string. Returns a list of 0s
2080 2080 and 1s indicating unknown/known.
2081 2081 """
2082 2082 opts = pycompat.byteskwargs(opts)
2083 2083 repo = hg.peer(ui, opts, repopath)
2084 2084 if not repo.capable(b'known'):
2085 2085 raise error.Abort(b"known() not supported by target repository")
2086 2086 flags = repo.known([bin(s) for s in ids])
2087 2087 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2088 2088
2089 2089
2090 2090 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2091 2091 def debuglabelcomplete(ui, repo, *args):
2092 2092 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2093 2093 debugnamecomplete(ui, repo, *args)
2094 2094
2095 2095
2096 2096 @command(
2097 2097 b'debuglocks',
2098 2098 [
2099 2099 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2100 2100 (
2101 2101 b'W',
2102 2102 b'force-free-wlock',
2103 2103 None,
2104 2104 _(b'free the working state lock (DANGEROUS)'),
2105 2105 ),
2106 2106 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2107 2107 (
2108 2108 b'S',
2109 2109 b'set-wlock',
2110 2110 None,
2111 2111 _(b'set the working state lock until stopped'),
2112 2112 ),
2113 2113 ],
2114 2114 _(b'[OPTION]...'),
2115 2115 )
2116 2116 def debuglocks(ui, repo, **opts):
2117 2117 """show or modify state of locks
2118 2118
2119 2119 By default, this command will show which locks are held. This
2120 2120 includes the user and process holding the lock, the amount of time
2121 2121 the lock has been held, and the machine name where the process is
2122 2122 running if it's not local.
2123 2123
2124 2124 Locks protect the integrity of Mercurial's data, so should be
2125 2125 treated with care. System crashes or other interruptions may cause
2126 2126 locks to not be properly released, though Mercurial will usually
2127 2127 detect and remove such stale locks automatically.
2128 2128
2129 2129 However, detecting stale locks may not always be possible (for
2130 2130 instance, on a shared filesystem). Removing locks may also be
2131 2131 blocked by filesystem permissions.
2132 2132
2133 2133 Setting a lock will prevent other commands from changing the data.
2134 2134 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2135 2135 The set locks are removed when the command exits.
2136 2136
2137 2137 Returns 0 if no locks are held.
2138 2138
2139 2139 """
2140 2140
2141 2141 if opts.get('force_free_lock'):
2142 2142 repo.svfs.unlink(b'lock')
2143 2143 if opts.get('force_free_wlock'):
2144 2144 repo.vfs.unlink(b'wlock')
2145 2145 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2146 2146 return 0
2147 2147
2148 2148 locks = []
2149 2149 try:
2150 2150 if opts.get('set_wlock'):
2151 2151 try:
2152 2152 locks.append(repo.wlock(False))
2153 2153 except error.LockHeld:
2154 2154 raise error.Abort(_(b'wlock is already held'))
2155 2155 if opts.get('set_lock'):
2156 2156 try:
2157 2157 locks.append(repo.lock(False))
2158 2158 except error.LockHeld:
2159 2159 raise error.Abort(_(b'lock is already held'))
2160 2160 if len(locks):
2161 2161 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2162 2162 return 0
2163 2163 finally:
2164 2164 release(*locks)
2165 2165
2166 2166 now = time.time()
2167 2167 held = 0
2168 2168
2169 2169 def report(vfs, name, method):
2170 2170 # this causes stale locks to get reaped for more accurate reporting
2171 2171 try:
2172 2172 l = method(False)
2173 2173 except error.LockHeld:
2174 2174 l = None
2175 2175
2176 2176 if l:
2177 2177 l.release()
2178 2178 else:
2179 2179 try:
2180 2180 st = vfs.lstat(name)
2181 2181 age = now - st[stat.ST_MTIME]
2182 2182 user = util.username(st.st_uid)
2183 2183 locker = vfs.readlock(name)
2184 2184 if b":" in locker:
2185 2185 host, pid = locker.split(b':')
2186 2186 if host == socket.gethostname():
2187 2187 locker = b'user %s, process %s' % (user or b'None', pid)
2188 2188 else:
2189 2189 locker = b'user %s, process %s, host %s' % (
2190 2190 user or b'None',
2191 2191 pid,
2192 2192 host,
2193 2193 )
2194 2194 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2195 2195 return 1
2196 2196 except OSError as e:
2197 2197 if e.errno != errno.ENOENT:
2198 2198 raise
2199 2199
2200 2200 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2201 2201 return 0
2202 2202
2203 2203 held += report(repo.svfs, b"lock", repo.lock)
2204 2204 held += report(repo.vfs, b"wlock", repo.wlock)
2205 2205
2206 2206 return held
2207 2207
2208 2208
2209 2209 @command(
2210 2210 b'debugmanifestfulltextcache',
2211 2211 [
2212 2212 (b'', b'clear', False, _(b'clear the cache')),
2213 2213 (
2214 2214 b'a',
2215 2215 b'add',
2216 2216 [],
2217 2217 _(b'add the given manifest nodes to the cache'),
2218 2218 _(b'NODE'),
2219 2219 ),
2220 2220 ],
2221 2221 b'',
2222 2222 )
2223 2223 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2224 2224 """show, clear or amend the contents of the manifest fulltext cache"""
2225 2225
2226 2226 def getcache():
2227 2227 r = repo.manifestlog.getstorage(b'')
2228 2228 try:
2229 2229 return r._fulltextcache
2230 2230 except AttributeError:
2231 2231 msg = _(
2232 2232 b"Current revlog implementation doesn't appear to have a "
2233 2233 b"manifest fulltext cache\n"
2234 2234 )
2235 2235 raise error.Abort(msg)
2236 2236
2237 2237 if opts.get('clear'):
2238 2238 with repo.wlock():
2239 2239 cache = getcache()
2240 2240 cache.clear(clear_persisted_data=True)
2241 2241 return
2242 2242
2243 2243 if add:
2244 2244 with repo.wlock():
2245 2245 m = repo.manifestlog
2246 2246 store = m.getstorage(b'')
2247 2247 for n in add:
2248 2248 try:
2249 2249 manifest = m[store.lookup(n)]
2250 2250 except error.LookupError as e:
2251 2251 raise error.Abort(
2252 2252 bytes(e), hint=b"Check your manifest node id"
2253 2253 )
2254 2254 manifest.read() # stores revisision in cache too
2255 2255 return
2256 2256
2257 2257 cache = getcache()
2258 2258 if not len(cache):
2259 2259 ui.write(_(b'cache empty\n'))
2260 2260 else:
2261 2261 ui.write(
2262 2262 _(
2263 2263 b'cache contains %d manifest entries, in order of most to '
2264 2264 b'least recent:\n'
2265 2265 )
2266 2266 % (len(cache),)
2267 2267 )
2268 2268 totalsize = 0
2269 2269 for nodeid in cache:
2270 2270 # Use cache.get to not update the LRU order
2271 2271 data = cache.peek(nodeid)
2272 2272 size = len(data)
2273 2273 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2274 2274 ui.write(
2275 2275 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2276 2276 )
2277 2277 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2278 2278 ui.write(
2279 2279 _(b'total cache data size %s, on-disk %s\n')
2280 2280 % (util.bytecount(totalsize), util.bytecount(ondisk))
2281 2281 )
2282 2282
2283 2283
2284 2284 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2285 2285 def debugmergestate(ui, repo, *args, **opts):
2286 2286 """print merge state
2287 2287
2288 2288 Use --verbose to print out information about whether v1 or v2 merge state
2289 2289 was chosen."""
2290 2290
2291 2291 if ui.verbose:
2292 2292 ms = mergestatemod.mergestate(repo)
2293 2293
2294 2294 # sort so that reasonable information is on top
2295 2295 v1records = ms._readrecordsv1()
2296 2296 v2records = ms._readrecordsv2()
2297 2297
2298 2298 if not v1records and not v2records:
2299 2299 pass
2300 2300 elif not v2records:
2301 2301 ui.writenoi18n(b'no version 2 merge state\n')
2302 2302 elif ms._v1v2match(v1records, v2records):
2303 2303 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2304 2304 else:
2305 2305 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2306 2306
2307 2307 opts = pycompat.byteskwargs(opts)
2308 2308 if not opts[b'template']:
2309 2309 opts[b'template'] = (
2310 2310 b'{if(commits, "", "no merge state found\n")}'
2311 2311 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2312 2312 b'{files % "file: {path} (state \\"{state}\\")\n'
2313 2313 b'{if(local_path, "'
2314 2314 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2315 2315 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2316 2316 b' other path: {other_path} (node {other_node})\n'
2317 2317 b'")}'
2318 2318 b'{if(rename_side, "'
2319 2319 b' rename side: {rename_side}\n'
2320 2320 b' renamed path: {renamed_path}\n'
2321 2321 b'")}'
2322 2322 b'{extras % " extra: {key} = {value}\n"}'
2323 2323 b'"}'
2324 2324 b'{extras % "extra: {file} ({key} = {value})\n"}'
2325 2325 )
2326 2326
2327 2327 ms = mergestatemod.mergestate.read(repo)
2328 2328
2329 2329 fm = ui.formatter(b'debugmergestate', opts)
2330 2330 fm.startitem()
2331 2331
2332 2332 fm_commits = fm.nested(b'commits')
2333 2333 if ms.active():
2334 2334 for name, node, label_index in (
2335 2335 (b'local', ms.local, 0),
2336 2336 (b'other', ms.other, 1),
2337 2337 ):
2338 2338 fm_commits.startitem()
2339 2339 fm_commits.data(name=name)
2340 2340 fm_commits.data(node=hex(node))
2341 2341 if ms._labels and len(ms._labels) > label_index:
2342 2342 fm_commits.data(label=ms._labels[label_index])
2343 2343 fm_commits.end()
2344 2344
2345 2345 fm_files = fm.nested(b'files')
2346 2346 if ms.active():
2347 2347 for f in ms:
2348 2348 fm_files.startitem()
2349 2349 fm_files.data(path=f)
2350 2350 state = ms._state[f]
2351 2351 fm_files.data(state=state[0])
2352 2352 if state[0] in (
2353 2353 mergestatemod.MERGE_RECORD_UNRESOLVED,
2354 2354 mergestatemod.MERGE_RECORD_RESOLVED,
2355 2355 ):
2356 2356 fm_files.data(local_key=state[1])
2357 2357 fm_files.data(local_path=state[2])
2358 2358 fm_files.data(ancestor_path=state[3])
2359 2359 fm_files.data(ancestor_node=state[4])
2360 2360 fm_files.data(other_path=state[5])
2361 2361 fm_files.data(other_node=state[6])
2362 2362 fm_files.data(local_flags=state[7])
2363 2363 elif state[0] in (
2364 2364 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2365 2365 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2366 2366 ):
2367 2367 fm_files.data(renamed_path=state[1])
2368 2368 fm_files.data(rename_side=state[2])
2369 2369 fm_extras = fm_files.nested(b'extras')
2370 2370 for k, v in sorted(ms.extras(f).items()):
2371 2371 fm_extras.startitem()
2372 2372 fm_extras.data(key=k)
2373 2373 fm_extras.data(value=v)
2374 2374 fm_extras.end()
2375 2375
2376 2376 fm_files.end()
2377 2377
2378 2378 fm_extras = fm.nested(b'extras')
2379 2379 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2380 2380 if f in ms:
2381 2381 # If file is in mergestate, we have already processed it's extras
2382 2382 continue
2383 2383 for k, v in pycompat.iteritems(d):
2384 2384 fm_extras.startitem()
2385 2385 fm_extras.data(file=f)
2386 2386 fm_extras.data(key=k)
2387 2387 fm_extras.data(value=v)
2388 2388 fm_extras.end()
2389 2389
2390 2390 fm.end()
2391 2391
2392 2392
2393 2393 @command(b'debugnamecomplete', [], _(b'NAME...'))
2394 2394 def debugnamecomplete(ui, repo, *args):
2395 2395 '''complete "names" - tags, open branch names, bookmark names'''
2396 2396
2397 2397 names = set()
2398 2398 # since we previously only listed open branches, we will handle that
2399 2399 # specially (after this for loop)
2400 2400 for name, ns in pycompat.iteritems(repo.names):
2401 2401 if name != b'branches':
2402 2402 names.update(ns.listnames(repo))
2403 2403 names.update(
2404 2404 tag
2405 2405 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2406 2406 if not closed
2407 2407 )
2408 2408 completions = set()
2409 2409 if not args:
2410 2410 args = [b'']
2411 2411 for a in args:
2412 2412 completions.update(n for n in names if n.startswith(a))
2413 2413 ui.write(b'\n'.join(sorted(completions)))
2414 2414 ui.write(b'\n')
2415 2415
2416 2416
2417 2417 @command(
2418 2418 b'debugnodemap',
2419 2419 [
2420 2420 (
2421 2421 b'',
2422 2422 b'dump-new',
2423 2423 False,
2424 2424 _(b'write a (new) persistent binary nodemap on stdout'),
2425 2425 ),
2426 2426 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2427 2427 (
2428 2428 b'',
2429 2429 b'check',
2430 2430 False,
2431 2431 _(b'check that the data on disk data are correct.'),
2432 2432 ),
2433 2433 (
2434 2434 b'',
2435 2435 b'metadata',
2436 2436 False,
2437 2437 _(b'display the on disk meta data for the nodemap'),
2438 2438 ),
2439 2439 ],
2440 2440 )
2441 2441 def debugnodemap(ui, repo, **opts):
2442 2442 """write and inspect on disk nodemap"""
2443 2443 if opts['dump_new']:
2444 2444 unfi = repo.unfiltered()
2445 2445 cl = unfi.changelog
2446 2446 if util.safehasattr(cl.index, "nodemap_data_all"):
2447 2447 data = cl.index.nodemap_data_all()
2448 2448 else:
2449 2449 data = nodemap.persistent_data(cl.index)
2450 2450 ui.write(data)
2451 2451 elif opts['dump_disk']:
2452 2452 unfi = repo.unfiltered()
2453 2453 cl = unfi.changelog
2454 2454 nm_data = nodemap.persisted_data(cl)
2455 2455 if nm_data is not None:
2456 2456 docket, data = nm_data
2457 2457 ui.write(data[:])
2458 2458 elif opts['check']:
2459 2459 unfi = repo.unfiltered()
2460 2460 cl = unfi.changelog
2461 2461 nm_data = nodemap.persisted_data(cl)
2462 2462 if nm_data is not None:
2463 2463 docket, data = nm_data
2464 2464 return nodemap.check_data(ui, cl.index, data)
2465 2465 elif opts['metadata']:
2466 2466 unfi = repo.unfiltered()
2467 2467 cl = unfi.changelog
2468 2468 nm_data = nodemap.persisted_data(cl)
2469 2469 if nm_data is not None:
2470 2470 docket, data = nm_data
2471 2471 ui.write((b"uid: %s\n") % docket.uid)
2472 2472 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2473 2473 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2474 2474 ui.write((b"data-length: %d\n") % docket.data_length)
2475 2475 ui.write((b"data-unused: %d\n") % docket.data_unused)
2476 2476 unused_perc = docket.data_unused * 100.0 / docket.data_length
2477 2477 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2478 2478
2479 2479
2480 2480 @command(
2481 2481 b'debugobsolete',
2482 2482 [
2483 2483 (b'', b'flags', 0, _(b'markers flag')),
2484 2484 (
2485 2485 b'',
2486 2486 b'record-parents',
2487 2487 False,
2488 2488 _(b'record parent information for the precursor'),
2489 2489 ),
2490 2490 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2491 2491 (
2492 2492 b'',
2493 2493 b'exclusive',
2494 2494 False,
2495 2495 _(b'restrict display to markers only relevant to REV'),
2496 2496 ),
2497 2497 (b'', b'index', False, _(b'display index of the marker')),
2498 2498 (b'', b'delete', [], _(b'delete markers specified by indices')),
2499 2499 ]
2500 2500 + cmdutil.commitopts2
2501 2501 + cmdutil.formatteropts,
2502 2502 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2503 2503 )
2504 2504 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2505 2505 """create arbitrary obsolete marker
2506 2506
2507 2507 With no arguments, displays the list of obsolescence markers."""
2508 2508
2509 2509 opts = pycompat.byteskwargs(opts)
2510 2510
2511 2511 def parsenodeid(s):
2512 2512 try:
2513 2513 # We do not use revsingle/revrange functions here to accept
2514 2514 # arbitrary node identifiers, possibly not present in the
2515 2515 # local repository.
2516 2516 n = bin(s)
2517 2517 if len(n) != repo.nodeconstants.nodelen:
2518 2518 raise TypeError()
2519 2519 return n
2520 2520 except TypeError:
2521 2521 raise error.InputError(
2522 2522 b'changeset references must be full hexadecimal '
2523 2523 b'node identifiers'
2524 2524 )
2525 2525
2526 2526 if opts.get(b'delete'):
2527 2527 indices = []
2528 2528 for v in opts.get(b'delete'):
2529 2529 try:
2530 2530 indices.append(int(v))
2531 2531 except ValueError:
2532 2532 raise error.InputError(
2533 2533 _(b'invalid index value: %r') % v,
2534 2534 hint=_(b'use integers for indices'),
2535 2535 )
2536 2536
2537 2537 if repo.currenttransaction():
2538 2538 raise error.Abort(
2539 2539 _(b'cannot delete obsmarkers in the middle of transaction.')
2540 2540 )
2541 2541
2542 2542 with repo.lock():
2543 2543 n = repair.deleteobsmarkers(repo.obsstore, indices)
2544 2544 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2545 2545
2546 2546 return
2547 2547
2548 2548 if precursor is not None:
2549 2549 if opts[b'rev']:
2550 2550 raise error.InputError(
2551 2551 b'cannot select revision when creating marker'
2552 2552 )
2553 2553 metadata = {}
2554 2554 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2555 2555 succs = tuple(parsenodeid(succ) for succ in successors)
2556 2556 l = repo.lock()
2557 2557 try:
2558 2558 tr = repo.transaction(b'debugobsolete')
2559 2559 try:
2560 2560 date = opts.get(b'date')
2561 2561 if date:
2562 2562 date = dateutil.parsedate(date)
2563 2563 else:
2564 2564 date = None
2565 2565 prec = parsenodeid(precursor)
2566 2566 parents = None
2567 2567 if opts[b'record_parents']:
2568 2568 if prec not in repo.unfiltered():
2569 2569 raise error.Abort(
2570 2570 b'cannot used --record-parents on '
2571 2571 b'unknown changesets'
2572 2572 )
2573 2573 parents = repo.unfiltered()[prec].parents()
2574 2574 parents = tuple(p.node() for p in parents)
2575 2575 repo.obsstore.create(
2576 2576 tr,
2577 2577 prec,
2578 2578 succs,
2579 2579 opts[b'flags'],
2580 2580 parents=parents,
2581 2581 date=date,
2582 2582 metadata=metadata,
2583 2583 ui=ui,
2584 2584 )
2585 2585 tr.close()
2586 2586 except ValueError as exc:
2587 2587 raise error.Abort(
2588 2588 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2589 2589 )
2590 2590 finally:
2591 2591 tr.release()
2592 2592 finally:
2593 2593 l.release()
2594 2594 else:
2595 2595 if opts[b'rev']:
2596 2596 revs = scmutil.revrange(repo, opts[b'rev'])
2597 2597 nodes = [repo[r].node() for r in revs]
2598 2598 markers = list(
2599 2599 obsutil.getmarkers(
2600 2600 repo, nodes=nodes, exclusive=opts[b'exclusive']
2601 2601 )
2602 2602 )
2603 2603 markers.sort(key=lambda x: x._data)
2604 2604 else:
2605 2605 markers = obsutil.getmarkers(repo)
2606 2606
2607 2607 markerstoiter = markers
2608 2608 isrelevant = lambda m: True
2609 2609 if opts.get(b'rev') and opts.get(b'index'):
2610 2610 markerstoiter = obsutil.getmarkers(repo)
2611 2611 markerset = set(markers)
2612 2612 isrelevant = lambda m: m in markerset
2613 2613
2614 2614 fm = ui.formatter(b'debugobsolete', opts)
2615 2615 for i, m in enumerate(markerstoiter):
2616 2616 if not isrelevant(m):
2617 2617 # marker can be irrelevant when we're iterating over a set
2618 2618 # of markers (markerstoiter) which is bigger than the set
2619 2619 # of markers we want to display (markers)
2620 2620 # this can happen if both --index and --rev options are
2621 2621 # provided and thus we need to iterate over all of the markers
2622 2622 # to get the correct indices, but only display the ones that
2623 2623 # are relevant to --rev value
2624 2624 continue
2625 2625 fm.startitem()
2626 2626 ind = i if opts.get(b'index') else None
2627 2627 cmdutil.showmarker(fm, m, index=ind)
2628 2628 fm.end()
2629 2629
2630 2630
2631 2631 @command(
2632 2632 b'debugp1copies',
2633 2633 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2634 2634 _(b'[-r REV]'),
2635 2635 )
2636 2636 def debugp1copies(ui, repo, **opts):
2637 2637 """dump copy information compared to p1"""
2638 2638
2639 2639 opts = pycompat.byteskwargs(opts)
2640 2640 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2641 2641 for dst, src in ctx.p1copies().items():
2642 2642 ui.write(b'%s -> %s\n' % (src, dst))
2643 2643
2644 2644
2645 2645 @command(
2646 2646 b'debugp2copies',
2647 2647 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2648 2648 _(b'[-r REV]'),
2649 2649 )
2650 2650 def debugp1copies(ui, repo, **opts):
2651 2651 """dump copy information compared to p2"""
2652 2652
2653 2653 opts = pycompat.byteskwargs(opts)
2654 2654 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2655 2655 for dst, src in ctx.p2copies().items():
2656 2656 ui.write(b'%s -> %s\n' % (src, dst))
2657 2657
2658 2658
2659 2659 @command(
2660 2660 b'debugpathcomplete',
2661 2661 [
2662 2662 (b'f', b'full', None, _(b'complete an entire path')),
2663 2663 (b'n', b'normal', None, _(b'show only normal files')),
2664 2664 (b'a', b'added', None, _(b'show only added files')),
2665 2665 (b'r', b'removed', None, _(b'show only removed files')),
2666 2666 ],
2667 2667 _(b'FILESPEC...'),
2668 2668 )
2669 2669 def debugpathcomplete(ui, repo, *specs, **opts):
2670 2670 """complete part or all of a tracked path
2671 2671
2672 2672 This command supports shells that offer path name completion. It
2673 2673 currently completes only files already known to the dirstate.
2674 2674
2675 2675 Completion extends only to the next path segment unless
2676 2676 --full is specified, in which case entire paths are used."""
2677 2677
2678 2678 def complete(path, acceptable):
2679 2679 dirstate = repo.dirstate
2680 2680 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2681 2681 rootdir = repo.root + pycompat.ossep
2682 2682 if spec != repo.root and not spec.startswith(rootdir):
2683 2683 return [], []
2684 2684 if os.path.isdir(spec):
2685 2685 spec += b'/'
2686 2686 spec = spec[len(rootdir) :]
2687 2687 fixpaths = pycompat.ossep != b'/'
2688 2688 if fixpaths:
2689 2689 spec = spec.replace(pycompat.ossep, b'/')
2690 2690 speclen = len(spec)
2691 2691 fullpaths = opts['full']
2692 2692 files, dirs = set(), set()
2693 2693 adddir, addfile = dirs.add, files.add
2694 2694 for f, st in pycompat.iteritems(dirstate):
2695 2695 if f.startswith(spec) and st.state in acceptable:
2696 2696 if fixpaths:
2697 2697 f = f.replace(b'/', pycompat.ossep)
2698 2698 if fullpaths:
2699 2699 addfile(f)
2700 2700 continue
2701 2701 s = f.find(pycompat.ossep, speclen)
2702 2702 if s >= 0:
2703 2703 adddir(f[:s])
2704 2704 else:
2705 2705 addfile(f)
2706 2706 return files, dirs
2707 2707
2708 2708 acceptable = b''
2709 2709 if opts['normal']:
2710 2710 acceptable += b'nm'
2711 2711 if opts['added']:
2712 2712 acceptable += b'a'
2713 2713 if opts['removed']:
2714 2714 acceptable += b'r'
2715 2715 cwd = repo.getcwd()
2716 2716 if not specs:
2717 2717 specs = [b'.']
2718 2718
2719 2719 files, dirs = set(), set()
2720 2720 for spec in specs:
2721 2721 f, d = complete(spec, acceptable or b'nmar')
2722 2722 files.update(f)
2723 2723 dirs.update(d)
2724 2724 files.update(dirs)
2725 2725 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2726 2726 ui.write(b'\n')
2727 2727
2728 2728
2729 2729 @command(
2730 2730 b'debugpathcopies',
2731 2731 cmdutil.walkopts,
2732 2732 b'hg debugpathcopies REV1 REV2 [FILE]',
2733 2733 inferrepo=True,
2734 2734 )
2735 2735 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2736 2736 """show copies between two revisions"""
2737 2737 ctx1 = scmutil.revsingle(repo, rev1)
2738 2738 ctx2 = scmutil.revsingle(repo, rev2)
2739 2739 m = scmutil.match(ctx1, pats, opts)
2740 2740 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2741 2741 ui.write(b'%s -> %s\n' % (src, dst))
2742 2742
2743 2743
2744 2744 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2745 2745 def debugpeer(ui, path):
2746 2746 """establish a connection to a peer repository"""
2747 2747 # Always enable peer request logging. Requires --debug to display
2748 2748 # though.
2749 2749 overrides = {
2750 2750 (b'devel', b'debug.peer-request'): True,
2751 2751 }
2752 2752
2753 2753 with ui.configoverride(overrides):
2754 2754 peer = hg.peer(ui, {}, path)
2755 2755
2756 2756 try:
2757 2757 local = peer.local() is not None
2758 2758 canpush = peer.canpush()
2759 2759
2760 2760 ui.write(_(b'url: %s\n') % peer.url())
2761 2761 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2762 2762 ui.write(
2763 2763 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2764 2764 )
2765 2765 finally:
2766 2766 peer.close()
2767 2767
2768 2768
2769 2769 @command(
2770 2770 b'debugpickmergetool',
2771 2771 [
2772 2772 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2773 2773 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2774 2774 ]
2775 2775 + cmdutil.walkopts
2776 2776 + cmdutil.mergetoolopts,
2777 2777 _(b'[PATTERN]...'),
2778 2778 inferrepo=True,
2779 2779 )
2780 2780 def debugpickmergetool(ui, repo, *pats, **opts):
2781 2781 """examine which merge tool is chosen for specified file
2782 2782
2783 2783 As described in :hg:`help merge-tools`, Mercurial examines
2784 2784 configurations below in this order to decide which merge tool is
2785 2785 chosen for specified file.
2786 2786
2787 2787 1. ``--tool`` option
2788 2788 2. ``HGMERGE`` environment variable
2789 2789 3. configurations in ``merge-patterns`` section
2790 2790 4. configuration of ``ui.merge``
2791 2791 5. configurations in ``merge-tools`` section
2792 2792 6. ``hgmerge`` tool (for historical reason only)
2793 2793 7. default tool for fallback (``:merge`` or ``:prompt``)
2794 2794
2795 2795 This command writes out examination result in the style below::
2796 2796
2797 2797 FILE = MERGETOOL
2798 2798
2799 2799 By default, all files known in the first parent context of the
2800 2800 working directory are examined. Use file patterns and/or -I/-X
2801 2801 options to limit target files. -r/--rev is also useful to examine
2802 2802 files in another context without actual updating to it.
2803 2803
2804 2804 With --debug, this command shows warning messages while matching
2805 2805 against ``merge-patterns`` and so on, too. It is recommended to
2806 2806 use this option with explicit file patterns and/or -I/-X options,
2807 2807 because this option increases amount of output per file according
2808 2808 to configurations in hgrc.
2809 2809
2810 2810 With -v/--verbose, this command shows configurations below at
2811 2811 first (only if specified).
2812 2812
2813 2813 - ``--tool`` option
2814 2814 - ``HGMERGE`` environment variable
2815 2815 - configuration of ``ui.merge``
2816 2816
2817 2817 If merge tool is chosen before matching against
2818 2818 ``merge-patterns``, this command can't show any helpful
2819 2819 information, even with --debug. In such case, information above is
2820 2820 useful to know why a merge tool is chosen.
2821 2821 """
2822 2822 opts = pycompat.byteskwargs(opts)
2823 2823 overrides = {}
2824 2824 if opts[b'tool']:
2825 2825 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2826 2826 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2827 2827
2828 2828 with ui.configoverride(overrides, b'debugmergepatterns'):
2829 2829 hgmerge = encoding.environ.get(b"HGMERGE")
2830 2830 if hgmerge is not None:
2831 2831 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2832 2832 uimerge = ui.config(b"ui", b"merge")
2833 2833 if uimerge:
2834 2834 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2835 2835
2836 2836 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2837 2837 m = scmutil.match(ctx, pats, opts)
2838 2838 changedelete = opts[b'changedelete']
2839 2839 for path in ctx.walk(m):
2840 2840 fctx = ctx[path]
2841 2841 with ui.silent(
2842 2842 error=True
2843 2843 ) if not ui.debugflag else util.nullcontextmanager():
2844 2844 tool, toolpath = filemerge._picktool(
2845 2845 repo,
2846 2846 ui,
2847 2847 path,
2848 2848 fctx.isbinary(),
2849 2849 b'l' in fctx.flags(),
2850 2850 changedelete,
2851 2851 )
2852 2852 ui.write(b'%s = %s\n' % (path, tool))
2853 2853
2854 2854
2855 2855 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2856 2856 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2857 2857 """access the pushkey key/value protocol
2858 2858
2859 2859 With two args, list the keys in the given namespace.
2860 2860
2861 2861 With five args, set a key to new if it currently is set to old.
2862 2862 Reports success or failure.
2863 2863 """
2864 2864
2865 2865 target = hg.peer(ui, {}, repopath)
2866 2866 try:
2867 2867 if keyinfo:
2868 2868 key, old, new = keyinfo
2869 2869 with target.commandexecutor() as e:
2870 2870 r = e.callcommand(
2871 2871 b'pushkey',
2872 2872 {
2873 2873 b'namespace': namespace,
2874 2874 b'key': key,
2875 2875 b'old': old,
2876 2876 b'new': new,
2877 2877 },
2878 2878 ).result()
2879 2879
2880 2880 ui.status(pycompat.bytestr(r) + b'\n')
2881 2881 return not r
2882 2882 else:
2883 2883 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2884 2884 ui.write(
2885 2885 b"%s\t%s\n"
2886 2886 % (stringutil.escapestr(k), stringutil.escapestr(v))
2887 2887 )
2888 2888 finally:
2889 2889 target.close()
2890 2890
2891 2891
2892 2892 @command(b'debugpvec', [], _(b'A B'))
2893 2893 def debugpvec(ui, repo, a, b=None):
2894 2894 ca = scmutil.revsingle(repo, a)
2895 2895 cb = scmutil.revsingle(repo, b)
2896 2896 pa = pvec.ctxpvec(ca)
2897 2897 pb = pvec.ctxpvec(cb)
2898 2898 if pa == pb:
2899 2899 rel = b"="
2900 2900 elif pa > pb:
2901 2901 rel = b">"
2902 2902 elif pa < pb:
2903 2903 rel = b"<"
2904 2904 elif pa | pb:
2905 2905 rel = b"|"
2906 2906 ui.write(_(b"a: %s\n") % pa)
2907 2907 ui.write(_(b"b: %s\n") % pb)
2908 2908 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2909 2909 ui.write(
2910 2910 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2911 2911 % (
2912 2912 abs(pa._depth - pb._depth),
2913 2913 pvec._hamming(pa._vec, pb._vec),
2914 2914 pa.distance(pb),
2915 2915 rel,
2916 2916 )
2917 2917 )
2918 2918
2919 2919
2920 2920 @command(
2921 2921 b'debugrebuilddirstate|debugrebuildstate',
2922 2922 [
2923 2923 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2924 2924 (
2925 2925 b'',
2926 2926 b'minimal',
2927 2927 None,
2928 2928 _(
2929 2929 b'only rebuild files that are inconsistent with '
2930 2930 b'the working copy parent'
2931 2931 ),
2932 2932 ),
2933 2933 ],
2934 2934 _(b'[-r REV]'),
2935 2935 )
2936 2936 def debugrebuilddirstate(ui, repo, rev, **opts):
2937 2937 """rebuild the dirstate as it would look like for the given revision
2938 2938
2939 2939 If no revision is specified the first current parent will be used.
2940 2940
2941 2941 The dirstate will be set to the files of the given revision.
2942 2942 The actual working directory content or existing dirstate
2943 2943 information such as adds or removes is not considered.
2944 2944
2945 2945 ``minimal`` will only rebuild the dirstate status for files that claim to be
2946 2946 tracked but are not in the parent manifest, or that exist in the parent
2947 2947 manifest but are not in the dirstate. It will not change adds, removes, or
2948 2948 modified files that are in the working copy parent.
2949 2949
2950 2950 One use of this command is to make the next :hg:`status` invocation
2951 2951 check the actual file content.
2952 2952 """
2953 2953 ctx = scmutil.revsingle(repo, rev)
2954 2954 with repo.wlock():
2955 2955 dirstate = repo.dirstate
2956 2956 changedfiles = None
2957 2957 # See command doc for what minimal does.
2958 2958 if opts.get('minimal'):
2959 2959 manifestfiles = set(ctx.manifest().keys())
2960 2960 dirstatefiles = set(dirstate)
2961 2961 manifestonly = manifestfiles - dirstatefiles
2962 2962 dsonly = dirstatefiles - manifestfiles
2963 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2963 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
2964 2964 changedfiles = manifestonly | dsnotadded
2965 2965
2966 2966 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2967 2967
2968 2968
2969 2969 @command(
2970 2970 b'debugrebuildfncache',
2971 2971 [
2972 2972 (
2973 2973 b'',
2974 2974 b'only-data',
2975 2975 False,
2976 2976 _(b'only look for wrong .d files (much faster)'),
2977 2977 )
2978 2978 ],
2979 2979 b'',
2980 2980 )
2981 2981 def debugrebuildfncache(ui, repo, **opts):
2982 2982 """rebuild the fncache file"""
2983 2983 opts = pycompat.byteskwargs(opts)
2984 2984 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
2985 2985
2986 2986
2987 2987 @command(
2988 2988 b'debugrename',
2989 2989 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2990 2990 _(b'[-r REV] [FILE]...'),
2991 2991 )
2992 2992 def debugrename(ui, repo, *pats, **opts):
2993 2993 """dump rename information"""
2994 2994
2995 2995 opts = pycompat.byteskwargs(opts)
2996 2996 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2997 2997 m = scmutil.match(ctx, pats, opts)
2998 2998 for abs in ctx.walk(m):
2999 2999 fctx = ctx[abs]
3000 3000 o = fctx.filelog().renamed(fctx.filenode())
3001 3001 rel = repo.pathto(abs)
3002 3002 if o:
3003 3003 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3004 3004 else:
3005 3005 ui.write(_(b"%s not renamed\n") % rel)
3006 3006
3007 3007
3008 3008 @command(b'debugrequires|debugrequirements', [], b'')
3009 3009 def debugrequirements(ui, repo):
3010 3010 """print the current repo requirements"""
3011 3011 for r in sorted(repo.requirements):
3012 3012 ui.write(b"%s\n" % r)
3013 3013
3014 3014
3015 3015 @command(
3016 3016 b'debugrevlog',
3017 3017 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3018 3018 _(b'-c|-m|FILE'),
3019 3019 optionalrepo=True,
3020 3020 )
3021 3021 def debugrevlog(ui, repo, file_=None, **opts):
3022 3022 """show data and statistics about a revlog"""
3023 3023 opts = pycompat.byteskwargs(opts)
3024 3024 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3025 3025
3026 3026 if opts.get(b"dump"):
3027 3027 numrevs = len(r)
3028 3028 ui.write(
3029 3029 (
3030 3030 b"# rev p1rev p2rev start end deltastart base p1 p2"
3031 3031 b" rawsize totalsize compression heads chainlen\n"
3032 3032 )
3033 3033 )
3034 3034 ts = 0
3035 3035 heads = set()
3036 3036
3037 3037 for rev in pycompat.xrange(numrevs):
3038 3038 dbase = r.deltaparent(rev)
3039 3039 if dbase == -1:
3040 3040 dbase = rev
3041 3041 cbase = r.chainbase(rev)
3042 3042 clen = r.chainlen(rev)
3043 3043 p1, p2 = r.parentrevs(rev)
3044 3044 rs = r.rawsize(rev)
3045 3045 ts = ts + rs
3046 3046 heads -= set(r.parentrevs(rev))
3047 3047 heads.add(rev)
3048 3048 try:
3049 3049 compression = ts / r.end(rev)
3050 3050 except ZeroDivisionError:
3051 3051 compression = 0
3052 3052 ui.write(
3053 3053 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
3054 3054 b"%11d %5d %8d\n"
3055 3055 % (
3056 3056 rev,
3057 3057 p1,
3058 3058 p2,
3059 3059 r.start(rev),
3060 3060 r.end(rev),
3061 3061 r.start(dbase),
3062 3062 r.start(cbase),
3063 3063 r.start(p1),
3064 3064 r.start(p2),
3065 3065 rs,
3066 3066 ts,
3067 3067 compression,
3068 3068 len(heads),
3069 3069 clen,
3070 3070 )
3071 3071 )
3072 3072 return 0
3073 3073
3074 3074 format = r._format_version
3075 3075 v = r._format_flags
3076 3076 flags = []
3077 3077 gdelta = False
3078 3078 if v & revlog.FLAG_INLINE_DATA:
3079 3079 flags.append(b'inline')
3080 3080 if v & revlog.FLAG_GENERALDELTA:
3081 3081 gdelta = True
3082 3082 flags.append(b'generaldelta')
3083 3083 if not flags:
3084 3084 flags = [b'(none)']
3085 3085
3086 3086 ### tracks merge vs single parent
3087 3087 nummerges = 0
3088 3088
3089 3089 ### tracks ways the "delta" are build
3090 3090 # nodelta
3091 3091 numempty = 0
3092 3092 numemptytext = 0
3093 3093 numemptydelta = 0
3094 3094 # full file content
3095 3095 numfull = 0
3096 3096 # intermediate snapshot against a prior snapshot
3097 3097 numsemi = 0
3098 3098 # snapshot count per depth
3099 3099 numsnapdepth = collections.defaultdict(lambda: 0)
3100 3100 # delta against previous revision
3101 3101 numprev = 0
3102 3102 # delta against first or second parent (not prev)
3103 3103 nump1 = 0
3104 3104 nump2 = 0
3105 3105 # delta against neither prev nor parents
3106 3106 numother = 0
3107 3107 # delta against prev that are also first or second parent
3108 3108 # (details of `numprev`)
3109 3109 nump1prev = 0
3110 3110 nump2prev = 0
3111 3111
3112 3112 # data about delta chain of each revs
3113 3113 chainlengths = []
3114 3114 chainbases = []
3115 3115 chainspans = []
3116 3116
3117 3117 # data about each revision
3118 3118 datasize = [None, 0, 0]
3119 3119 fullsize = [None, 0, 0]
3120 3120 semisize = [None, 0, 0]
3121 3121 # snapshot count per depth
3122 3122 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3123 3123 deltasize = [None, 0, 0]
3124 3124 chunktypecounts = {}
3125 3125 chunktypesizes = {}
3126 3126
3127 3127 def addsize(size, l):
3128 3128 if l[0] is None or size < l[0]:
3129 3129 l[0] = size
3130 3130 if size > l[1]:
3131 3131 l[1] = size
3132 3132 l[2] += size
3133 3133
3134 3134 numrevs = len(r)
3135 3135 for rev in pycompat.xrange(numrevs):
3136 3136 p1, p2 = r.parentrevs(rev)
3137 3137 delta = r.deltaparent(rev)
3138 3138 if format > 0:
3139 3139 addsize(r.rawsize(rev), datasize)
3140 3140 if p2 != nullrev:
3141 3141 nummerges += 1
3142 3142 size = r.length(rev)
3143 3143 if delta == nullrev:
3144 3144 chainlengths.append(0)
3145 3145 chainbases.append(r.start(rev))
3146 3146 chainspans.append(size)
3147 3147 if size == 0:
3148 3148 numempty += 1
3149 3149 numemptytext += 1
3150 3150 else:
3151 3151 numfull += 1
3152 3152 numsnapdepth[0] += 1
3153 3153 addsize(size, fullsize)
3154 3154 addsize(size, snapsizedepth[0])
3155 3155 else:
3156 3156 chainlengths.append(chainlengths[delta] + 1)
3157 3157 baseaddr = chainbases[delta]
3158 3158 revaddr = r.start(rev)
3159 3159 chainbases.append(baseaddr)
3160 3160 chainspans.append((revaddr - baseaddr) + size)
3161 3161 if size == 0:
3162 3162 numempty += 1
3163 3163 numemptydelta += 1
3164 3164 elif r.issnapshot(rev):
3165 3165 addsize(size, semisize)
3166 3166 numsemi += 1
3167 3167 depth = r.snapshotdepth(rev)
3168 3168 numsnapdepth[depth] += 1
3169 3169 addsize(size, snapsizedepth[depth])
3170 3170 else:
3171 3171 addsize(size, deltasize)
3172 3172 if delta == rev - 1:
3173 3173 numprev += 1
3174 3174 if delta == p1:
3175 3175 nump1prev += 1
3176 3176 elif delta == p2:
3177 3177 nump2prev += 1
3178 3178 elif delta == p1:
3179 3179 nump1 += 1
3180 3180 elif delta == p2:
3181 3181 nump2 += 1
3182 3182 elif delta != nullrev:
3183 3183 numother += 1
3184 3184
3185 3185 # Obtain data on the raw chunks in the revlog.
3186 3186 if util.safehasattr(r, b'_getsegmentforrevs'):
3187 3187 segment = r._getsegmentforrevs(rev, rev)[1]
3188 3188 else:
3189 3189 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3190 3190 if segment:
3191 3191 chunktype = bytes(segment[0:1])
3192 3192 else:
3193 3193 chunktype = b'empty'
3194 3194
3195 3195 if chunktype not in chunktypecounts:
3196 3196 chunktypecounts[chunktype] = 0
3197 3197 chunktypesizes[chunktype] = 0
3198 3198
3199 3199 chunktypecounts[chunktype] += 1
3200 3200 chunktypesizes[chunktype] += size
3201 3201
3202 3202 # Adjust size min value for empty cases
3203 3203 for size in (datasize, fullsize, semisize, deltasize):
3204 3204 if size[0] is None:
3205 3205 size[0] = 0
3206 3206
3207 3207 numdeltas = numrevs - numfull - numempty - numsemi
3208 3208 numoprev = numprev - nump1prev - nump2prev
3209 3209 totalrawsize = datasize[2]
3210 3210 datasize[2] /= numrevs
3211 3211 fulltotal = fullsize[2]
3212 3212 if numfull == 0:
3213 3213 fullsize[2] = 0
3214 3214 else:
3215 3215 fullsize[2] /= numfull
3216 3216 semitotal = semisize[2]
3217 3217 snaptotal = {}
3218 3218 if numsemi > 0:
3219 3219 semisize[2] /= numsemi
3220 3220 for depth in snapsizedepth:
3221 3221 snaptotal[depth] = snapsizedepth[depth][2]
3222 3222 snapsizedepth[depth][2] /= numsnapdepth[depth]
3223 3223
3224 3224 deltatotal = deltasize[2]
3225 3225 if numdeltas > 0:
3226 3226 deltasize[2] /= numdeltas
3227 3227 totalsize = fulltotal + semitotal + deltatotal
3228 3228 avgchainlen = sum(chainlengths) / numrevs
3229 3229 maxchainlen = max(chainlengths)
3230 3230 maxchainspan = max(chainspans)
3231 3231 compratio = 1
3232 3232 if totalsize:
3233 3233 compratio = totalrawsize / totalsize
3234 3234
3235 3235 basedfmtstr = b'%%%dd\n'
3236 3236 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3237 3237
3238 3238 def dfmtstr(max):
3239 3239 return basedfmtstr % len(str(max))
3240 3240
3241 3241 def pcfmtstr(max, padding=0):
3242 3242 return basepcfmtstr % (len(str(max)), b' ' * padding)
3243 3243
3244 3244 def pcfmt(value, total):
3245 3245 if total:
3246 3246 return (value, 100 * float(value) / total)
3247 3247 else:
3248 3248 return value, 100.0
3249 3249
3250 3250 ui.writenoi18n(b'format : %d\n' % format)
3251 3251 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3252 3252
3253 3253 ui.write(b'\n')
3254 3254 fmt = pcfmtstr(totalsize)
3255 3255 fmt2 = dfmtstr(totalsize)
3256 3256 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3257 3257 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3258 3258 ui.writenoi18n(
3259 3259 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3260 3260 )
3261 3261 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3262 3262 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3263 3263 ui.writenoi18n(
3264 3264 b' text : '
3265 3265 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3266 3266 )
3267 3267 ui.writenoi18n(
3268 3268 b' delta : '
3269 3269 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3270 3270 )
3271 3271 ui.writenoi18n(
3272 3272 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3273 3273 )
3274 3274 for depth in sorted(numsnapdepth):
3275 3275 ui.write(
3276 3276 (b' lvl-%-3d : ' % depth)
3277 3277 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3278 3278 )
3279 3279 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3280 3280 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3281 3281 ui.writenoi18n(
3282 3282 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3283 3283 )
3284 3284 for depth in sorted(numsnapdepth):
3285 3285 ui.write(
3286 3286 (b' lvl-%-3d : ' % depth)
3287 3287 + fmt % pcfmt(snaptotal[depth], totalsize)
3288 3288 )
3289 3289 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3290 3290
3291 3291 def fmtchunktype(chunktype):
3292 3292 if chunktype == b'empty':
3293 3293 return b' %s : ' % chunktype
3294 3294 elif chunktype in pycompat.bytestr(string.ascii_letters):
3295 3295 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3296 3296 else:
3297 3297 return b' 0x%s : ' % hex(chunktype)
3298 3298
3299 3299 ui.write(b'\n')
3300 3300 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3301 3301 for chunktype in sorted(chunktypecounts):
3302 3302 ui.write(fmtchunktype(chunktype))
3303 3303 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3304 3304 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3305 3305 for chunktype in sorted(chunktypecounts):
3306 3306 ui.write(fmtchunktype(chunktype))
3307 3307 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3308 3308
3309 3309 ui.write(b'\n')
3310 3310 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3311 3311 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3312 3312 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3313 3313 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3314 3314 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3315 3315
3316 3316 if format > 0:
3317 3317 ui.write(b'\n')
3318 3318 ui.writenoi18n(
3319 3319 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3320 3320 % tuple(datasize)
3321 3321 )
3322 3322 ui.writenoi18n(
3323 3323 b'full revision size (min/max/avg) : %d / %d / %d\n'
3324 3324 % tuple(fullsize)
3325 3325 )
3326 3326 ui.writenoi18n(
3327 3327 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3328 3328 % tuple(semisize)
3329 3329 )
3330 3330 for depth in sorted(snapsizedepth):
3331 3331 if depth == 0:
3332 3332 continue
3333 3333 ui.writenoi18n(
3334 3334 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3335 3335 % ((depth,) + tuple(snapsizedepth[depth]))
3336 3336 )
3337 3337 ui.writenoi18n(
3338 3338 b'delta size (min/max/avg) : %d / %d / %d\n'
3339 3339 % tuple(deltasize)
3340 3340 )
3341 3341
3342 3342 if numdeltas > 0:
3343 3343 ui.write(b'\n')
3344 3344 fmt = pcfmtstr(numdeltas)
3345 3345 fmt2 = pcfmtstr(numdeltas, 4)
3346 3346 ui.writenoi18n(
3347 3347 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3348 3348 )
3349 3349 if numprev > 0:
3350 3350 ui.writenoi18n(
3351 3351 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3352 3352 )
3353 3353 ui.writenoi18n(
3354 3354 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3355 3355 )
3356 3356 ui.writenoi18n(
3357 3357 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3358 3358 )
3359 3359 if gdelta:
3360 3360 ui.writenoi18n(
3361 3361 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3362 3362 )
3363 3363 ui.writenoi18n(
3364 3364 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3365 3365 )
3366 3366 ui.writenoi18n(
3367 3367 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3368 3368 )
3369 3369
3370 3370
3371 3371 @command(
3372 3372 b'debugrevlogindex',
3373 3373 cmdutil.debugrevlogopts
3374 3374 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3375 3375 _(b'[-f FORMAT] -c|-m|FILE'),
3376 3376 optionalrepo=True,
3377 3377 )
3378 3378 def debugrevlogindex(ui, repo, file_=None, **opts):
3379 3379 """dump the contents of a revlog index"""
3380 3380 opts = pycompat.byteskwargs(opts)
3381 3381 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3382 3382 format = opts.get(b'format', 0)
3383 3383 if format not in (0, 1):
3384 3384 raise error.Abort(_(b"unknown format %d") % format)
3385 3385
3386 3386 if ui.debugflag:
3387 3387 shortfn = hex
3388 3388 else:
3389 3389 shortfn = short
3390 3390
3391 3391 # There might not be anything in r, so have a sane default
3392 3392 idlen = 12
3393 3393 for i in r:
3394 3394 idlen = len(shortfn(r.node(i)))
3395 3395 break
3396 3396
3397 3397 if format == 0:
3398 3398 if ui.verbose:
3399 3399 ui.writenoi18n(
3400 3400 b" rev offset length linkrev %s %s p2\n"
3401 3401 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3402 3402 )
3403 3403 else:
3404 3404 ui.writenoi18n(
3405 3405 b" rev linkrev %s %s p2\n"
3406 3406 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3407 3407 )
3408 3408 elif format == 1:
3409 3409 if ui.verbose:
3410 3410 ui.writenoi18n(
3411 3411 (
3412 3412 b" rev flag offset length size link p1"
3413 3413 b" p2 %s\n"
3414 3414 )
3415 3415 % b"nodeid".rjust(idlen)
3416 3416 )
3417 3417 else:
3418 3418 ui.writenoi18n(
3419 3419 b" rev flag size link p1 p2 %s\n"
3420 3420 % b"nodeid".rjust(idlen)
3421 3421 )
3422 3422
3423 3423 for i in r:
3424 3424 node = r.node(i)
3425 3425 if format == 0:
3426 3426 try:
3427 3427 pp = r.parents(node)
3428 3428 except Exception:
3429 3429 pp = [repo.nullid, repo.nullid]
3430 3430 if ui.verbose:
3431 3431 ui.write(
3432 3432 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3433 3433 % (
3434 3434 i,
3435 3435 r.start(i),
3436 3436 r.length(i),
3437 3437 r.linkrev(i),
3438 3438 shortfn(node),
3439 3439 shortfn(pp[0]),
3440 3440 shortfn(pp[1]),
3441 3441 )
3442 3442 )
3443 3443 else:
3444 3444 ui.write(
3445 3445 b"% 6d % 7d %s %s %s\n"
3446 3446 % (
3447 3447 i,
3448 3448 r.linkrev(i),
3449 3449 shortfn(node),
3450 3450 shortfn(pp[0]),
3451 3451 shortfn(pp[1]),
3452 3452 )
3453 3453 )
3454 3454 elif format == 1:
3455 3455 pr = r.parentrevs(i)
3456 3456 if ui.verbose:
3457 3457 ui.write(
3458 3458 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3459 3459 % (
3460 3460 i,
3461 3461 r.flags(i),
3462 3462 r.start(i),
3463 3463 r.length(i),
3464 3464 r.rawsize(i),
3465 3465 r.linkrev(i),
3466 3466 pr[0],
3467 3467 pr[1],
3468 3468 shortfn(node),
3469 3469 )
3470 3470 )
3471 3471 else:
3472 3472 ui.write(
3473 3473 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3474 3474 % (
3475 3475 i,
3476 3476 r.flags(i),
3477 3477 r.rawsize(i),
3478 3478 r.linkrev(i),
3479 3479 pr[0],
3480 3480 pr[1],
3481 3481 shortfn(node),
3482 3482 )
3483 3483 )
3484 3484
3485 3485
3486 3486 @command(
3487 3487 b'debugrevspec',
3488 3488 [
3489 3489 (
3490 3490 b'',
3491 3491 b'optimize',
3492 3492 None,
3493 3493 _(b'print parsed tree after optimizing (DEPRECATED)'),
3494 3494 ),
3495 3495 (
3496 3496 b'',
3497 3497 b'show-revs',
3498 3498 True,
3499 3499 _(b'print list of result revisions (default)'),
3500 3500 ),
3501 3501 (
3502 3502 b's',
3503 3503 b'show-set',
3504 3504 None,
3505 3505 _(b'print internal representation of result set'),
3506 3506 ),
3507 3507 (
3508 3508 b'p',
3509 3509 b'show-stage',
3510 3510 [],
3511 3511 _(b'print parsed tree at the given stage'),
3512 3512 _(b'NAME'),
3513 3513 ),
3514 3514 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3515 3515 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3516 3516 ],
3517 3517 b'REVSPEC',
3518 3518 )
3519 3519 def debugrevspec(ui, repo, expr, **opts):
3520 3520 """parse and apply a revision specification
3521 3521
3522 3522 Use -p/--show-stage option to print the parsed tree at the given stages.
3523 3523 Use -p all to print tree at every stage.
3524 3524
3525 3525 Use --no-show-revs option with -s or -p to print only the set
3526 3526 representation or the parsed tree respectively.
3527 3527
3528 3528 Use --verify-optimized to compare the optimized result with the unoptimized
3529 3529 one. Returns 1 if the optimized result differs.
3530 3530 """
3531 3531 opts = pycompat.byteskwargs(opts)
3532 3532 aliases = ui.configitems(b'revsetalias')
3533 3533 stages = [
3534 3534 (b'parsed', lambda tree: tree),
3535 3535 (
3536 3536 b'expanded',
3537 3537 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3538 3538 ),
3539 3539 (b'concatenated', revsetlang.foldconcat),
3540 3540 (b'analyzed', revsetlang.analyze),
3541 3541 (b'optimized', revsetlang.optimize),
3542 3542 ]
3543 3543 if opts[b'no_optimized']:
3544 3544 stages = stages[:-1]
3545 3545 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3546 3546 raise error.Abort(
3547 3547 _(b'cannot use --verify-optimized with --no-optimized')
3548 3548 )
3549 3549 stagenames = {n for n, f in stages}
3550 3550
3551 3551 showalways = set()
3552 3552 showchanged = set()
3553 3553 if ui.verbose and not opts[b'show_stage']:
3554 3554 # show parsed tree by --verbose (deprecated)
3555 3555 showalways.add(b'parsed')
3556 3556 showchanged.update([b'expanded', b'concatenated'])
3557 3557 if opts[b'optimize']:
3558 3558 showalways.add(b'optimized')
3559 3559 if opts[b'show_stage'] and opts[b'optimize']:
3560 3560 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3561 3561 if opts[b'show_stage'] == [b'all']:
3562 3562 showalways.update(stagenames)
3563 3563 else:
3564 3564 for n in opts[b'show_stage']:
3565 3565 if n not in stagenames:
3566 3566 raise error.Abort(_(b'invalid stage name: %s') % n)
3567 3567 showalways.update(opts[b'show_stage'])
3568 3568
3569 3569 treebystage = {}
3570 3570 printedtree = None
3571 3571 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3572 3572 for n, f in stages:
3573 3573 treebystage[n] = tree = f(tree)
3574 3574 if n in showalways or (n in showchanged and tree != printedtree):
3575 3575 if opts[b'show_stage'] or n != b'parsed':
3576 3576 ui.write(b"* %s:\n" % n)
3577 3577 ui.write(revsetlang.prettyformat(tree), b"\n")
3578 3578 printedtree = tree
3579 3579
3580 3580 if opts[b'verify_optimized']:
3581 3581 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3582 3582 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3583 3583 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3584 3584 ui.writenoi18n(
3585 3585 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3586 3586 )
3587 3587 ui.writenoi18n(
3588 3588 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3589 3589 )
3590 3590 arevs = list(arevs)
3591 3591 brevs = list(brevs)
3592 3592 if arevs == brevs:
3593 3593 return 0
3594 3594 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3595 3595 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3596 3596 sm = difflib.SequenceMatcher(None, arevs, brevs)
3597 3597 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3598 3598 if tag in ('delete', 'replace'):
3599 3599 for c in arevs[alo:ahi]:
3600 3600 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3601 3601 if tag in ('insert', 'replace'):
3602 3602 for c in brevs[blo:bhi]:
3603 3603 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3604 3604 if tag == 'equal':
3605 3605 for c in arevs[alo:ahi]:
3606 3606 ui.write(b' %d\n' % c)
3607 3607 return 1
3608 3608
3609 3609 func = revset.makematcher(tree)
3610 3610 revs = func(repo)
3611 3611 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3612 3612 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3613 3613 if not opts[b'show_revs']:
3614 3614 return
3615 3615 for c in revs:
3616 3616 ui.write(b"%d\n" % c)
3617 3617
3618 3618
3619 3619 @command(
3620 3620 b'debugserve',
3621 3621 [
3622 3622 (
3623 3623 b'',
3624 3624 b'sshstdio',
3625 3625 False,
3626 3626 _(b'run an SSH server bound to process handles'),
3627 3627 ),
3628 3628 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3629 3629 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3630 3630 ],
3631 3631 b'',
3632 3632 )
3633 3633 def debugserve(ui, repo, **opts):
3634 3634 """run a server with advanced settings
3635 3635
3636 3636 This command is similar to :hg:`serve`. It exists partially as a
3637 3637 workaround to the fact that ``hg serve --stdio`` must have specific
3638 3638 arguments for security reasons.
3639 3639 """
3640 3640 opts = pycompat.byteskwargs(opts)
3641 3641
3642 3642 if not opts[b'sshstdio']:
3643 3643 raise error.Abort(_(b'only --sshstdio is currently supported'))
3644 3644
3645 3645 logfh = None
3646 3646
3647 3647 if opts[b'logiofd'] and opts[b'logiofile']:
3648 3648 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3649 3649
3650 3650 if opts[b'logiofd']:
3651 3651 # Ideally we would be line buffered. But line buffering in binary
3652 3652 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3653 3653 # buffering could have performance impacts. But since this isn't
3654 3654 # performance critical code, it should be fine.
3655 3655 try:
3656 3656 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3657 3657 except OSError as e:
3658 3658 if e.errno != errno.ESPIPE:
3659 3659 raise
3660 3660 # can't seek a pipe, so `ab` mode fails on py3
3661 3661 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3662 3662 elif opts[b'logiofile']:
3663 3663 logfh = open(opts[b'logiofile'], b'ab', 0)
3664 3664
3665 3665 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3666 3666 s.serve_forever()
3667 3667
3668 3668
3669 3669 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3670 3670 def debugsetparents(ui, repo, rev1, rev2=None):
3671 3671 """manually set the parents of the current working directory (DANGEROUS)
3672 3672
3673 3673 This command is not what you are looking for and should not be used. Using
3674 3674 this command will most certainly results in slight corruption of the file
3675 3675 level histories withing your repository. DO NOT USE THIS COMMAND.
3676 3676
3677 3677 The command update the p1 and p2 field in the dirstate, and not touching
3678 3678 anything else. This useful for writing repository conversion tools, but
3679 3679 should be used with extreme care. For example, neither the working
3680 3680 directory nor the dirstate is updated, so file status may be incorrect
3681 3681 after running this command. Only used if you are one of the few people that
3682 3682 deeply unstand both conversion tools and file level histories. If you are
3683 3683 reading this help, you are not one of this people (most of them sailed west
3684 3684 from Mithlond anyway.
3685 3685
3686 3686 So one last time DO NOT USE THIS COMMAND.
3687 3687
3688 3688 Returns 0 on success.
3689 3689 """
3690 3690
3691 3691 node1 = scmutil.revsingle(repo, rev1).node()
3692 3692 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3693 3693
3694 3694 with repo.wlock():
3695 3695 repo.setparents(node1, node2)
3696 3696
3697 3697
3698 3698 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3699 3699 def debugsidedata(ui, repo, file_, rev=None, **opts):
3700 3700 """dump the side data for a cl/manifest/file revision
3701 3701
3702 3702 Use --verbose to dump the sidedata content."""
3703 3703 opts = pycompat.byteskwargs(opts)
3704 3704 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3705 3705 if rev is not None:
3706 3706 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3707 3707 file_, rev = None, file_
3708 3708 elif rev is None:
3709 3709 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3710 3710 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3711 3711 r = getattr(r, '_revlog', r)
3712 3712 try:
3713 3713 sidedata = r.sidedata(r.lookup(rev))
3714 3714 except KeyError:
3715 3715 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3716 3716 if sidedata:
3717 3717 sidedata = list(sidedata.items())
3718 3718 sidedata.sort()
3719 3719 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3720 3720 for key, value in sidedata:
3721 3721 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3722 3722 if ui.verbose:
3723 3723 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3724 3724
3725 3725
3726 3726 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3727 3727 def debugssl(ui, repo, source=None, **opts):
3728 3728 """test a secure connection to a server
3729 3729
3730 3730 This builds the certificate chain for the server on Windows, installing the
3731 3731 missing intermediates and trusted root via Windows Update if necessary. It
3732 3732 does nothing on other platforms.
3733 3733
3734 3734 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3735 3735 that server is used. See :hg:`help urls` for more information.
3736 3736
3737 3737 If the update succeeds, retry the original operation. Otherwise, the cause
3738 3738 of the SSL error is likely another issue.
3739 3739 """
3740 3740 if not pycompat.iswindows:
3741 3741 raise error.Abort(
3742 3742 _(b'certificate chain building is only possible on Windows')
3743 3743 )
3744 3744
3745 3745 if not source:
3746 3746 if not repo:
3747 3747 raise error.Abort(
3748 3748 _(
3749 3749 b"there is no Mercurial repository here, and no "
3750 3750 b"server specified"
3751 3751 )
3752 3752 )
3753 3753 source = b"default"
3754 3754
3755 3755 source, branches = urlutil.get_unique_pull_path(
3756 3756 b'debugssl', repo, ui, source
3757 3757 )
3758 3758 url = urlutil.url(source)
3759 3759
3760 3760 defaultport = {b'https': 443, b'ssh': 22}
3761 3761 if url.scheme in defaultport:
3762 3762 try:
3763 3763 addr = (url.host, int(url.port or defaultport[url.scheme]))
3764 3764 except ValueError:
3765 3765 raise error.Abort(_(b"malformed port number in URL"))
3766 3766 else:
3767 3767 raise error.Abort(_(b"only https and ssh connections are supported"))
3768 3768
3769 3769 from . import win32
3770 3770
3771 3771 s = ssl.wrap_socket(
3772 3772 socket.socket(),
3773 3773 ssl_version=ssl.PROTOCOL_TLS,
3774 3774 cert_reqs=ssl.CERT_NONE,
3775 3775 ca_certs=None,
3776 3776 )
3777 3777
3778 3778 try:
3779 3779 s.connect(addr)
3780 3780 cert = s.getpeercert(True)
3781 3781
3782 3782 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3783 3783
3784 3784 complete = win32.checkcertificatechain(cert, build=False)
3785 3785
3786 3786 if not complete:
3787 3787 ui.status(_(b'certificate chain is incomplete, updating... '))
3788 3788
3789 3789 if not win32.checkcertificatechain(cert):
3790 3790 ui.status(_(b'failed.\n'))
3791 3791 else:
3792 3792 ui.status(_(b'done.\n'))
3793 3793 else:
3794 3794 ui.status(_(b'full certificate chain is available\n'))
3795 3795 finally:
3796 3796 s.close()
3797 3797
3798 3798
3799 3799 @command(
3800 3800 b"debugbackupbundle",
3801 3801 [
3802 3802 (
3803 3803 b"",
3804 3804 b"recover",
3805 3805 b"",
3806 3806 b"brings the specified changeset back into the repository",
3807 3807 )
3808 3808 ]
3809 3809 + cmdutil.logopts,
3810 3810 _(b"hg debugbackupbundle [--recover HASH]"),
3811 3811 )
3812 3812 def debugbackupbundle(ui, repo, *pats, **opts):
3813 3813 """lists the changesets available in backup bundles
3814 3814
3815 3815 Without any arguments, this command prints a list of the changesets in each
3816 3816 backup bundle.
3817 3817
3818 3818 --recover takes a changeset hash and unbundles the first bundle that
3819 3819 contains that hash, which puts that changeset back in your repository.
3820 3820
3821 3821 --verbose will print the entire commit message and the bundle path for that
3822 3822 backup.
3823 3823 """
3824 3824 backups = list(
3825 3825 filter(
3826 3826 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3827 3827 )
3828 3828 )
3829 3829 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3830 3830
3831 3831 opts = pycompat.byteskwargs(opts)
3832 3832 opts[b"bundle"] = b""
3833 3833 opts[b"force"] = None
3834 3834 limit = logcmdutil.getlimit(opts)
3835 3835
3836 3836 def display(other, chlist, displayer):
3837 3837 if opts.get(b"newest_first"):
3838 3838 chlist.reverse()
3839 3839 count = 0
3840 3840 for n in chlist:
3841 3841 if limit is not None and count >= limit:
3842 3842 break
3843 3843 parents = [
3844 3844 True for p in other.changelog.parents(n) if p != repo.nullid
3845 3845 ]
3846 3846 if opts.get(b"no_merges") and len(parents) == 2:
3847 3847 continue
3848 3848 count += 1
3849 3849 displayer.show(other[n])
3850 3850
3851 3851 recovernode = opts.get(b"recover")
3852 3852 if recovernode:
3853 3853 if scmutil.isrevsymbol(repo, recovernode):
3854 3854 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3855 3855 return
3856 3856 elif backups:
3857 3857 msg = _(
3858 3858 b"Recover changesets using: hg debugbackupbundle --recover "
3859 3859 b"<changeset hash>\n\nAvailable backup changesets:"
3860 3860 )
3861 3861 ui.status(msg, label=b"status.removed")
3862 3862 else:
3863 3863 ui.status(_(b"no backup changesets found\n"))
3864 3864 return
3865 3865
3866 3866 for backup in backups:
3867 3867 # Much of this is copied from the hg incoming logic
3868 3868 source = os.path.relpath(backup, encoding.getcwd())
3869 3869 source, branches = urlutil.get_unique_pull_path(
3870 3870 b'debugbackupbundle',
3871 3871 repo,
3872 3872 ui,
3873 3873 source,
3874 3874 default_branches=opts.get(b'branch'),
3875 3875 )
3876 3876 try:
3877 3877 other = hg.peer(repo, opts, source)
3878 3878 except error.LookupError as ex:
3879 3879 msg = _(b"\nwarning: unable to open bundle %s") % source
3880 3880 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3881 3881 ui.warn(msg, hint=hint)
3882 3882 continue
3883 3883 revs, checkout = hg.addbranchrevs(
3884 3884 repo, other, branches, opts.get(b"rev")
3885 3885 )
3886 3886
3887 3887 if revs:
3888 3888 revs = [other.lookup(rev) for rev in revs]
3889 3889
3890 3890 with ui.silent():
3891 3891 try:
3892 3892 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3893 3893 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3894 3894 )
3895 3895 except error.LookupError:
3896 3896 continue
3897 3897
3898 3898 try:
3899 3899 if not chlist:
3900 3900 continue
3901 3901 if recovernode:
3902 3902 with repo.lock(), repo.transaction(b"unbundle") as tr:
3903 3903 if scmutil.isrevsymbol(other, recovernode):
3904 3904 ui.status(_(b"Unbundling %s\n") % (recovernode))
3905 3905 f = hg.openpath(ui, source)
3906 3906 gen = exchange.readbundle(ui, f, source)
3907 3907 if isinstance(gen, bundle2.unbundle20):
3908 3908 bundle2.applybundle(
3909 3909 repo,
3910 3910 gen,
3911 3911 tr,
3912 3912 source=b"unbundle",
3913 3913 url=b"bundle:" + source,
3914 3914 )
3915 3915 else:
3916 3916 gen.apply(repo, b"unbundle", b"bundle:" + source)
3917 3917 break
3918 3918 else:
3919 3919 backupdate = encoding.strtolocal(
3920 3920 time.strftime(
3921 3921 "%a %H:%M, %Y-%m-%d",
3922 3922 time.localtime(os.path.getmtime(source)),
3923 3923 )
3924 3924 )
3925 3925 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3926 3926 if ui.verbose:
3927 3927 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3928 3928 else:
3929 3929 opts[
3930 3930 b"template"
3931 3931 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3932 3932 displayer = logcmdutil.changesetdisplayer(
3933 3933 ui, other, opts, False
3934 3934 )
3935 3935 display(other, chlist, displayer)
3936 3936 displayer.close()
3937 3937 finally:
3938 3938 cleanupfn()
3939 3939
3940 3940
3941 3941 @command(
3942 3942 b'debugsub',
3943 3943 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3944 3944 _(b'[-r REV] [REV]'),
3945 3945 )
3946 3946 def debugsub(ui, repo, rev=None):
3947 3947 ctx = scmutil.revsingle(repo, rev, None)
3948 3948 for k, v in sorted(ctx.substate.items()):
3949 3949 ui.writenoi18n(b'path %s\n' % k)
3950 3950 ui.writenoi18n(b' source %s\n' % v[0])
3951 3951 ui.writenoi18n(b' revision %s\n' % v[1])
3952 3952
3953 3953
3954 3954 @command(b'debugshell', optionalrepo=True)
3955 3955 def debugshell(ui, repo):
3956 3956 """run an interactive Python interpreter
3957 3957
3958 3958 The local namespace is provided with a reference to the ui and
3959 3959 the repo instance (if available).
3960 3960 """
3961 3961 import code
3962 3962
3963 3963 imported_objects = {
3964 3964 'ui': ui,
3965 3965 'repo': repo,
3966 3966 }
3967 3967
3968 3968 code.interact(local=imported_objects)
3969 3969
3970 3970
3971 3971 @command(
3972 3972 b'debugsuccessorssets',
3973 3973 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3974 3974 _(b'[REV]'),
3975 3975 )
3976 3976 def debugsuccessorssets(ui, repo, *revs, **opts):
3977 3977 """show set of successors for revision
3978 3978
3979 3979 A successors set of changeset A is a consistent group of revisions that
3980 3980 succeed A. It contains non-obsolete changesets only unless closests
3981 3981 successors set is set.
3982 3982
3983 3983 In most cases a changeset A has a single successors set containing a single
3984 3984 successor (changeset A replaced by A').
3985 3985
3986 3986 A changeset that is made obsolete with no successors are called "pruned".
3987 3987 Such changesets have no successors sets at all.
3988 3988
3989 3989 A changeset that has been "split" will have a successors set containing
3990 3990 more than one successor.
3991 3991
3992 3992 A changeset that has been rewritten in multiple different ways is called
3993 3993 "divergent". Such changesets have multiple successor sets (each of which
3994 3994 may also be split, i.e. have multiple successors).
3995 3995
3996 3996 Results are displayed as follows::
3997 3997
3998 3998 <rev1>
3999 3999 <successors-1A>
4000 4000 <rev2>
4001 4001 <successors-2A>
4002 4002 <successors-2B1> <successors-2B2> <successors-2B3>
4003 4003
4004 4004 Here rev2 has two possible (i.e. divergent) successors sets. The first
4005 4005 holds one element, whereas the second holds three (i.e. the changeset has
4006 4006 been split).
4007 4007 """
4008 4008 # passed to successorssets caching computation from one call to another
4009 4009 cache = {}
4010 4010 ctx2str = bytes
4011 4011 node2str = short
4012 4012 for rev in scmutil.revrange(repo, revs):
4013 4013 ctx = repo[rev]
4014 4014 ui.write(b'%s\n' % ctx2str(ctx))
4015 4015 for succsset in obsutil.successorssets(
4016 4016 repo, ctx.node(), closest=opts['closest'], cache=cache
4017 4017 ):
4018 4018 if succsset:
4019 4019 ui.write(b' ')
4020 4020 ui.write(node2str(succsset[0]))
4021 4021 for node in succsset[1:]:
4022 4022 ui.write(b' ')
4023 4023 ui.write(node2str(node))
4024 4024 ui.write(b'\n')
4025 4025
4026 4026
4027 4027 @command(b'debugtagscache', [])
4028 4028 def debugtagscache(ui, repo):
4029 4029 """display the contents of .hg/cache/hgtagsfnodes1"""
4030 4030 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4031 4031 flog = repo.file(b'.hgtags')
4032 4032 for r in repo:
4033 4033 node = repo[r].node()
4034 4034 tagsnode = cache.getfnode(node, computemissing=False)
4035 4035 if tagsnode:
4036 4036 tagsnodedisplay = hex(tagsnode)
4037 4037 if not flog.hasnode(tagsnode):
4038 4038 tagsnodedisplay += b' (unknown node)'
4039 4039 elif tagsnode is None:
4040 4040 tagsnodedisplay = b'missing'
4041 4041 else:
4042 4042 tagsnodedisplay = b'invalid'
4043 4043
4044 4044 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4045 4045
4046 4046
4047 4047 @command(
4048 4048 b'debugtemplate',
4049 4049 [
4050 4050 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4051 4051 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4052 4052 ],
4053 4053 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4054 4054 optionalrepo=True,
4055 4055 )
4056 4056 def debugtemplate(ui, repo, tmpl, **opts):
4057 4057 """parse and apply a template
4058 4058
4059 4059 If -r/--rev is given, the template is processed as a log template and
4060 4060 applied to the given changesets. Otherwise, it is processed as a generic
4061 4061 template.
4062 4062
4063 4063 Use --verbose to print the parsed tree.
4064 4064 """
4065 4065 revs = None
4066 4066 if opts['rev']:
4067 4067 if repo is None:
4068 4068 raise error.RepoError(
4069 4069 _(b'there is no Mercurial repository here (.hg not found)')
4070 4070 )
4071 4071 revs = scmutil.revrange(repo, opts['rev'])
4072 4072
4073 4073 props = {}
4074 4074 for d in opts['define']:
4075 4075 try:
4076 4076 k, v = (e.strip() for e in d.split(b'=', 1))
4077 4077 if not k or k == b'ui':
4078 4078 raise ValueError
4079 4079 props[k] = v
4080 4080 except ValueError:
4081 4081 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4082 4082
4083 4083 if ui.verbose:
4084 4084 aliases = ui.configitems(b'templatealias')
4085 4085 tree = templater.parse(tmpl)
4086 4086 ui.note(templater.prettyformat(tree), b'\n')
4087 4087 newtree = templater.expandaliases(tree, aliases)
4088 4088 if newtree != tree:
4089 4089 ui.notenoi18n(
4090 4090 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4091 4091 )
4092 4092
4093 4093 if revs is None:
4094 4094 tres = formatter.templateresources(ui, repo)
4095 4095 t = formatter.maketemplater(ui, tmpl, resources=tres)
4096 4096 if ui.verbose:
4097 4097 kwds, funcs = t.symbolsuseddefault()
4098 4098 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4099 4099 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4100 4100 ui.write(t.renderdefault(props))
4101 4101 else:
4102 4102 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4103 4103 if ui.verbose:
4104 4104 kwds, funcs = displayer.t.symbolsuseddefault()
4105 4105 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4106 4106 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4107 4107 for r in revs:
4108 4108 displayer.show(repo[r], **pycompat.strkwargs(props))
4109 4109 displayer.close()
4110 4110
4111 4111
4112 4112 @command(
4113 4113 b'debuguigetpass',
4114 4114 [
4115 4115 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4116 4116 ],
4117 4117 _(b'[-p TEXT]'),
4118 4118 norepo=True,
4119 4119 )
4120 4120 def debuguigetpass(ui, prompt=b''):
4121 4121 """show prompt to type password"""
4122 4122 r = ui.getpass(prompt)
4123 4123 if r is None:
4124 4124 r = b"<default response>"
4125 4125 ui.writenoi18n(b'response: %s\n' % r)
4126 4126
4127 4127
4128 4128 @command(
4129 4129 b'debuguiprompt',
4130 4130 [
4131 4131 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4132 4132 ],
4133 4133 _(b'[-p TEXT]'),
4134 4134 norepo=True,
4135 4135 )
4136 4136 def debuguiprompt(ui, prompt=b''):
4137 4137 """show plain prompt"""
4138 4138 r = ui.prompt(prompt)
4139 4139 ui.writenoi18n(b'response: %s\n' % r)
4140 4140
4141 4141
4142 4142 @command(b'debugupdatecaches', [])
4143 4143 def debugupdatecaches(ui, repo, *pats, **opts):
4144 4144 """warm all known caches in the repository"""
4145 4145 with repo.wlock(), repo.lock():
4146 4146 repo.updatecaches(caches=repository.CACHES_ALL)
4147 4147
4148 4148
4149 4149 @command(
4150 4150 b'debugupgraderepo',
4151 4151 [
4152 4152 (
4153 4153 b'o',
4154 4154 b'optimize',
4155 4155 [],
4156 4156 _(b'extra optimization to perform'),
4157 4157 _(b'NAME'),
4158 4158 ),
4159 4159 (b'', b'run', False, _(b'performs an upgrade')),
4160 4160 (b'', b'backup', True, _(b'keep the old repository content around')),
4161 4161 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4162 4162 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4163 4163 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4164 4164 ],
4165 4165 )
4166 4166 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4167 4167 """upgrade a repository to use different features
4168 4168
4169 4169 If no arguments are specified, the repository is evaluated for upgrade
4170 4170 and a list of problems and potential optimizations is printed.
4171 4171
4172 4172 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4173 4173 can be influenced via additional arguments. More details will be provided
4174 4174 by the command output when run without ``--run``.
4175 4175
4176 4176 During the upgrade, the repository will be locked and no writes will be
4177 4177 allowed.
4178 4178
4179 4179 At the end of the upgrade, the repository may not be readable while new
4180 4180 repository data is swapped in. This window will be as long as it takes to
4181 4181 rename some directories inside the ``.hg`` directory. On most machines, this
4182 4182 should complete almost instantaneously and the chances of a consumer being
4183 4183 unable to access the repository should be low.
4184 4184
4185 4185 By default, all revlogs will be upgraded. You can restrict this using flags
4186 4186 such as `--manifest`:
4187 4187
4188 4188 * `--manifest`: only optimize the manifest
4189 4189 * `--no-manifest`: optimize all revlog but the manifest
4190 4190 * `--changelog`: optimize the changelog only
4191 4191 * `--no-changelog --no-manifest`: optimize filelogs only
4192 4192 * `--filelogs`: optimize the filelogs only
4193 4193 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4194 4194 """
4195 4195 return upgrade.upgraderepo(
4196 4196 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4197 4197 )
4198 4198
4199 4199
4200 4200 @command(
4201 4201 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4202 4202 )
4203 4203 def debugwalk(ui, repo, *pats, **opts):
4204 4204 """show how files match on given patterns"""
4205 4205 opts = pycompat.byteskwargs(opts)
4206 4206 m = scmutil.match(repo[None], pats, opts)
4207 4207 if ui.verbose:
4208 4208 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4209 4209 items = list(repo[None].walk(m))
4210 4210 if not items:
4211 4211 return
4212 4212 f = lambda fn: fn
4213 4213 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4214 4214 f = lambda fn: util.normpath(fn)
4215 4215 fmt = b'f %%-%ds %%-%ds %%s' % (
4216 4216 max([len(abs) for abs in items]),
4217 4217 max([len(repo.pathto(abs)) for abs in items]),
4218 4218 )
4219 4219 for abs in items:
4220 4220 line = fmt % (
4221 4221 abs,
4222 4222 f(repo.pathto(abs)),
4223 4223 m.exact(abs) and b'exact' or b'',
4224 4224 )
4225 4225 ui.write(b"%s\n" % line.rstrip())
4226 4226
4227 4227
4228 4228 @command(b'debugwhyunstable', [], _(b'REV'))
4229 4229 def debugwhyunstable(ui, repo, rev):
4230 4230 """explain instabilities of a changeset"""
4231 4231 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4232 4232 dnodes = b''
4233 4233 if entry.get(b'divergentnodes'):
4234 4234 dnodes = (
4235 4235 b' '.join(
4236 4236 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4237 4237 for ctx in entry[b'divergentnodes']
4238 4238 )
4239 4239 + b' '
4240 4240 )
4241 4241 ui.write(
4242 4242 b'%s: %s%s %s\n'
4243 4243 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4244 4244 )
4245 4245
4246 4246
4247 4247 @command(
4248 4248 b'debugwireargs',
4249 4249 [
4250 4250 (b'', b'three', b'', b'three'),
4251 4251 (b'', b'four', b'', b'four'),
4252 4252 (b'', b'five', b'', b'five'),
4253 4253 ]
4254 4254 + cmdutil.remoteopts,
4255 4255 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4256 4256 norepo=True,
4257 4257 )
4258 4258 def debugwireargs(ui, repopath, *vals, **opts):
4259 4259 opts = pycompat.byteskwargs(opts)
4260 4260 repo = hg.peer(ui, opts, repopath)
4261 4261 try:
4262 4262 for opt in cmdutil.remoteopts:
4263 4263 del opts[opt[1]]
4264 4264 args = {}
4265 4265 for k, v in pycompat.iteritems(opts):
4266 4266 if v:
4267 4267 args[k] = v
4268 4268 args = pycompat.strkwargs(args)
4269 4269 # run twice to check that we don't mess up the stream for the next command
4270 4270 res1 = repo.debugwireargs(*vals, **args)
4271 4271 res2 = repo.debugwireargs(*vals, **args)
4272 4272 ui.write(b"%s\n" % res1)
4273 4273 if res1 != res2:
4274 4274 ui.warn(b"%s\n" % res2)
4275 4275 finally:
4276 4276 repo.close()
4277 4277
4278 4278
4279 4279 def _parsewirelangblocks(fh):
4280 4280 activeaction = None
4281 4281 blocklines = []
4282 4282 lastindent = 0
4283 4283
4284 4284 for line in fh:
4285 4285 line = line.rstrip()
4286 4286 if not line:
4287 4287 continue
4288 4288
4289 4289 if line.startswith(b'#'):
4290 4290 continue
4291 4291
4292 4292 if not line.startswith(b' '):
4293 4293 # New block. Flush previous one.
4294 4294 if activeaction:
4295 4295 yield activeaction, blocklines
4296 4296
4297 4297 activeaction = line
4298 4298 blocklines = []
4299 4299 lastindent = 0
4300 4300 continue
4301 4301
4302 4302 # Else we start with an indent.
4303 4303
4304 4304 if not activeaction:
4305 4305 raise error.Abort(_(b'indented line outside of block'))
4306 4306
4307 4307 indent = len(line) - len(line.lstrip())
4308 4308
4309 4309 # If this line is indented more than the last line, concatenate it.
4310 4310 if indent > lastindent and blocklines:
4311 4311 blocklines[-1] += line.lstrip()
4312 4312 else:
4313 4313 blocklines.append(line)
4314 4314 lastindent = indent
4315 4315
4316 4316 # Flush last block.
4317 4317 if activeaction:
4318 4318 yield activeaction, blocklines
4319 4319
4320 4320
4321 4321 @command(
4322 4322 b'debugwireproto',
4323 4323 [
4324 4324 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4325 4325 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4326 4326 (
4327 4327 b'',
4328 4328 b'noreadstderr',
4329 4329 False,
4330 4330 _(b'do not read from stderr of the remote'),
4331 4331 ),
4332 4332 (
4333 4333 b'',
4334 4334 b'nologhandshake',
4335 4335 False,
4336 4336 _(b'do not log I/O related to the peer handshake'),
4337 4337 ),
4338 4338 ]
4339 4339 + cmdutil.remoteopts,
4340 4340 _(b'[PATH]'),
4341 4341 optionalrepo=True,
4342 4342 )
4343 4343 def debugwireproto(ui, repo, path=None, **opts):
4344 4344 """send wire protocol commands to a server
4345 4345
4346 4346 This command can be used to issue wire protocol commands to remote
4347 4347 peers and to debug the raw data being exchanged.
4348 4348
4349 4349 ``--localssh`` will start an SSH server against the current repository
4350 4350 and connect to that. By default, the connection will perform a handshake
4351 4351 and establish an appropriate peer instance.
4352 4352
4353 4353 ``--peer`` can be used to bypass the handshake protocol and construct a
4354 4354 peer instance using the specified class type. Valid values are ``raw``,
4355 4355 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4356 4356 raw data payloads and don't support higher-level command actions.
4357 4357
4358 4358 ``--noreadstderr`` can be used to disable automatic reading from stderr
4359 4359 of the peer (for SSH connections only). Disabling automatic reading of
4360 4360 stderr is useful for making output more deterministic.
4361 4361
4362 4362 Commands are issued via a mini language which is specified via stdin.
4363 4363 The language consists of individual actions to perform. An action is
4364 4364 defined by a block. A block is defined as a line with no leading
4365 4365 space followed by 0 or more lines with leading space. Blocks are
4366 4366 effectively a high-level command with additional metadata.
4367 4367
4368 4368 Lines beginning with ``#`` are ignored.
4369 4369
4370 4370 The following sections denote available actions.
4371 4371
4372 4372 raw
4373 4373 ---
4374 4374
4375 4375 Send raw data to the server.
4376 4376
4377 4377 The block payload contains the raw data to send as one atomic send
4378 4378 operation. The data may not actually be delivered in a single system
4379 4379 call: it depends on the abilities of the transport being used.
4380 4380
4381 4381 Each line in the block is de-indented and concatenated. Then, that
4382 4382 value is evaluated as a Python b'' literal. This allows the use of
4383 4383 backslash escaping, etc.
4384 4384
4385 4385 raw+
4386 4386 ----
4387 4387
4388 4388 Behaves like ``raw`` except flushes output afterwards.
4389 4389
4390 4390 command <X>
4391 4391 -----------
4392 4392
4393 4393 Send a request to run a named command, whose name follows the ``command``
4394 4394 string.
4395 4395
4396 4396 Arguments to the command are defined as lines in this block. The format of
4397 4397 each line is ``<key> <value>``. e.g.::
4398 4398
4399 4399 command listkeys
4400 4400 namespace bookmarks
4401 4401
4402 4402 If the value begins with ``eval:``, it will be interpreted as a Python
4403 4403 literal expression. Otherwise values are interpreted as Python b'' literals.
4404 4404 This allows sending complex types and encoding special byte sequences via
4405 4405 backslash escaping.
4406 4406
4407 4407 The following arguments have special meaning:
4408 4408
4409 4409 ``PUSHFILE``
4410 4410 When defined, the *push* mechanism of the peer will be used instead
4411 4411 of the static request-response mechanism and the content of the
4412 4412 file specified in the value of this argument will be sent as the
4413 4413 command payload.
4414 4414
4415 4415 This can be used to submit a local bundle file to the remote.
4416 4416
4417 4417 batchbegin
4418 4418 ----------
4419 4419
4420 4420 Instruct the peer to begin a batched send.
4421 4421
4422 4422 All ``command`` blocks are queued for execution until the next
4423 4423 ``batchsubmit`` block.
4424 4424
4425 4425 batchsubmit
4426 4426 -----------
4427 4427
4428 4428 Submit previously queued ``command`` blocks as a batch request.
4429 4429
4430 4430 This action MUST be paired with a ``batchbegin`` action.
4431 4431
4432 4432 httprequest <method> <path>
4433 4433 ---------------------------
4434 4434
4435 4435 (HTTP peer only)
4436 4436
4437 4437 Send an HTTP request to the peer.
4438 4438
4439 4439 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4440 4440
4441 4441 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4442 4442 headers to add to the request. e.g. ``Accept: foo``.
4443 4443
4444 4444 The following arguments are special:
4445 4445
4446 4446 ``BODYFILE``
4447 4447 The content of the file defined as the value to this argument will be
4448 4448 transferred verbatim as the HTTP request body.
4449 4449
4450 4450 ``frame <type> <flags> <payload>``
4451 4451 Send a unified protocol frame as part of the request body.
4452 4452
4453 4453 All frames will be collected and sent as the body to the HTTP
4454 4454 request.
4455 4455
4456 4456 close
4457 4457 -----
4458 4458
4459 4459 Close the connection to the server.
4460 4460
4461 4461 flush
4462 4462 -----
4463 4463
4464 4464 Flush data written to the server.
4465 4465
4466 4466 readavailable
4467 4467 -------------
4468 4468
4469 4469 Close the write end of the connection and read all available data from
4470 4470 the server.
4471 4471
4472 4472 If the connection to the server encompasses multiple pipes, we poll both
4473 4473 pipes and read available data.
4474 4474
4475 4475 readline
4476 4476 --------
4477 4477
4478 4478 Read a line of output from the server. If there are multiple output
4479 4479 pipes, reads only the main pipe.
4480 4480
4481 4481 ereadline
4482 4482 ---------
4483 4483
4484 4484 Like ``readline``, but read from the stderr pipe, if available.
4485 4485
4486 4486 read <X>
4487 4487 --------
4488 4488
4489 4489 ``read()`` N bytes from the server's main output pipe.
4490 4490
4491 4491 eread <X>
4492 4492 ---------
4493 4493
4494 4494 ``read()`` N bytes from the server's stderr pipe, if available.
4495 4495
4496 4496 Specifying Unified Frame-Based Protocol Frames
4497 4497 ----------------------------------------------
4498 4498
4499 4499 It is possible to emit a *Unified Frame-Based Protocol* by using special
4500 4500 syntax.
4501 4501
4502 4502 A frame is composed as a type, flags, and payload. These can be parsed
4503 4503 from a string of the form:
4504 4504
4505 4505 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4506 4506
4507 4507 ``request-id`` and ``stream-id`` are integers defining the request and
4508 4508 stream identifiers.
4509 4509
4510 4510 ``type`` can be an integer value for the frame type or the string name
4511 4511 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4512 4512 ``command-name``.
4513 4513
4514 4514 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4515 4515 components. Each component (and there can be just one) can be an integer
4516 4516 or a flag name for stream flags or frame flags, respectively. Values are
4517 4517 resolved to integers and then bitwise OR'd together.
4518 4518
4519 4519 ``payload`` represents the raw frame payload. If it begins with
4520 4520 ``cbor:``, the following string is evaluated as Python code and the
4521 4521 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4522 4522 as a Python byte string literal.
4523 4523 """
4524 4524 opts = pycompat.byteskwargs(opts)
4525 4525
4526 4526 if opts[b'localssh'] and not repo:
4527 4527 raise error.Abort(_(b'--localssh requires a repository'))
4528 4528
4529 4529 if opts[b'peer'] and opts[b'peer'] not in (
4530 4530 b'raw',
4531 4531 b'http2',
4532 4532 b'ssh1',
4533 4533 b'ssh2',
4534 4534 ):
4535 4535 raise error.Abort(
4536 4536 _(b'invalid value for --peer'),
4537 4537 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4538 4538 )
4539 4539
4540 4540 if path and opts[b'localssh']:
4541 4541 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4542 4542
4543 4543 if ui.interactive():
4544 4544 ui.write(_(b'(waiting for commands on stdin)\n'))
4545 4545
4546 4546 blocks = list(_parsewirelangblocks(ui.fin))
4547 4547
4548 4548 proc = None
4549 4549 stdin = None
4550 4550 stdout = None
4551 4551 stderr = None
4552 4552 opener = None
4553 4553
4554 4554 if opts[b'localssh']:
4555 4555 # We start the SSH server in its own process so there is process
4556 4556 # separation. This prevents a whole class of potential bugs around
4557 4557 # shared state from interfering with server operation.
4558 4558 args = procutil.hgcmd() + [
4559 4559 b'-R',
4560 4560 repo.root,
4561 4561 b'debugserve',
4562 4562 b'--sshstdio',
4563 4563 ]
4564 4564 proc = subprocess.Popen(
4565 4565 pycompat.rapply(procutil.tonativestr, args),
4566 4566 stdin=subprocess.PIPE,
4567 4567 stdout=subprocess.PIPE,
4568 4568 stderr=subprocess.PIPE,
4569 4569 bufsize=0,
4570 4570 )
4571 4571
4572 4572 stdin = proc.stdin
4573 4573 stdout = proc.stdout
4574 4574 stderr = proc.stderr
4575 4575
4576 4576 # We turn the pipes into observers so we can log I/O.
4577 4577 if ui.verbose or opts[b'peer'] == b'raw':
4578 4578 stdin = util.makeloggingfileobject(
4579 4579 ui, proc.stdin, b'i', logdata=True
4580 4580 )
4581 4581 stdout = util.makeloggingfileobject(
4582 4582 ui, proc.stdout, b'o', logdata=True
4583 4583 )
4584 4584 stderr = util.makeloggingfileobject(
4585 4585 ui, proc.stderr, b'e', logdata=True
4586 4586 )
4587 4587
4588 4588 # --localssh also implies the peer connection settings.
4589 4589
4590 4590 url = b'ssh://localserver'
4591 4591 autoreadstderr = not opts[b'noreadstderr']
4592 4592
4593 4593 if opts[b'peer'] == b'ssh1':
4594 4594 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4595 4595 peer = sshpeer.sshv1peer(
4596 4596 ui,
4597 4597 url,
4598 4598 proc,
4599 4599 stdin,
4600 4600 stdout,
4601 4601 stderr,
4602 4602 None,
4603 4603 autoreadstderr=autoreadstderr,
4604 4604 )
4605 4605 elif opts[b'peer'] == b'ssh2':
4606 4606 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4607 4607 peer = sshpeer.sshv2peer(
4608 4608 ui,
4609 4609 url,
4610 4610 proc,
4611 4611 stdin,
4612 4612 stdout,
4613 4613 stderr,
4614 4614 None,
4615 4615 autoreadstderr=autoreadstderr,
4616 4616 )
4617 4617 elif opts[b'peer'] == b'raw':
4618 4618 ui.write(_(b'using raw connection to peer\n'))
4619 4619 peer = None
4620 4620 else:
4621 4621 ui.write(_(b'creating ssh peer from handshake results\n'))
4622 4622 peer = sshpeer.makepeer(
4623 4623 ui,
4624 4624 url,
4625 4625 proc,
4626 4626 stdin,
4627 4627 stdout,
4628 4628 stderr,
4629 4629 autoreadstderr=autoreadstderr,
4630 4630 )
4631 4631
4632 4632 elif path:
4633 4633 # We bypass hg.peer() so we can proxy the sockets.
4634 4634 # TODO consider not doing this because we skip
4635 4635 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4636 4636 u = urlutil.url(path)
4637 4637 if u.scheme != b'http':
4638 4638 raise error.Abort(_(b'only http:// paths are currently supported'))
4639 4639
4640 4640 url, authinfo = u.authinfo()
4641 4641 openerargs = {
4642 4642 'useragent': b'Mercurial debugwireproto',
4643 4643 }
4644 4644
4645 4645 # Turn pipes/sockets into observers so we can log I/O.
4646 4646 if ui.verbose:
4647 4647 openerargs.update(
4648 4648 {
4649 4649 'loggingfh': ui,
4650 4650 'loggingname': b's',
4651 4651 'loggingopts': {
4652 4652 'logdata': True,
4653 4653 'logdataapis': False,
4654 4654 },
4655 4655 }
4656 4656 )
4657 4657
4658 4658 if ui.debugflag:
4659 4659 openerargs['loggingopts']['logdataapis'] = True
4660 4660
4661 4661 # Don't send default headers when in raw mode. This allows us to
4662 4662 # bypass most of the behavior of our URL handling code so we can
4663 4663 # have near complete control over what's sent on the wire.
4664 4664 if opts[b'peer'] == b'raw':
4665 4665 openerargs['sendaccept'] = False
4666 4666
4667 4667 opener = urlmod.opener(ui, authinfo, **openerargs)
4668 4668
4669 4669 if opts[b'peer'] == b'http2':
4670 4670 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4671 4671 # We go through makepeer() because we need an API descriptor for
4672 4672 # the peer instance to be useful.
4673 4673 maybe_silent = (
4674 4674 ui.silent()
4675 4675 if opts[b'nologhandshake']
4676 4676 else util.nullcontextmanager()
4677 4677 )
4678 4678 with maybe_silent, ui.configoverride(
4679 4679 {(b'experimental', b'httppeer.advertise-v2'): True}
4680 4680 ):
4681 4681 peer = httppeer.makepeer(ui, path, opener=opener)
4682 4682
4683 4683 if not isinstance(peer, httppeer.httpv2peer):
4684 4684 raise error.Abort(
4685 4685 _(
4686 4686 b'could not instantiate HTTP peer for '
4687 4687 b'wire protocol version 2'
4688 4688 ),
4689 4689 hint=_(
4690 4690 b'the server may not have the feature '
4691 4691 b'enabled or is not allowing this '
4692 4692 b'client version'
4693 4693 ),
4694 4694 )
4695 4695
4696 4696 elif opts[b'peer'] == b'raw':
4697 4697 ui.write(_(b'using raw connection to peer\n'))
4698 4698 peer = None
4699 4699 elif opts[b'peer']:
4700 4700 raise error.Abort(
4701 4701 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4702 4702 )
4703 4703 else:
4704 4704 peer = httppeer.makepeer(ui, path, opener=opener)
4705 4705
4706 4706 # We /could/ populate stdin/stdout with sock.makefile()...
4707 4707 else:
4708 4708 raise error.Abort(_(b'unsupported connection configuration'))
4709 4709
4710 4710 batchedcommands = None
4711 4711
4712 4712 # Now perform actions based on the parsed wire language instructions.
4713 4713 for action, lines in blocks:
4714 4714 if action in (b'raw', b'raw+'):
4715 4715 if not stdin:
4716 4716 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4717 4717
4718 4718 # Concatenate the data together.
4719 4719 data = b''.join(l.lstrip() for l in lines)
4720 4720 data = stringutil.unescapestr(data)
4721 4721 stdin.write(data)
4722 4722
4723 4723 if action == b'raw+':
4724 4724 stdin.flush()
4725 4725 elif action == b'flush':
4726 4726 if not stdin:
4727 4727 raise error.Abort(_(b'cannot call flush on this peer'))
4728 4728 stdin.flush()
4729 4729 elif action.startswith(b'command'):
4730 4730 if not peer:
4731 4731 raise error.Abort(
4732 4732 _(
4733 4733 b'cannot send commands unless peer instance '
4734 4734 b'is available'
4735 4735 )
4736 4736 )
4737 4737
4738 4738 command = action.split(b' ', 1)[1]
4739 4739
4740 4740 args = {}
4741 4741 for line in lines:
4742 4742 # We need to allow empty values.
4743 4743 fields = line.lstrip().split(b' ', 1)
4744 4744 if len(fields) == 1:
4745 4745 key = fields[0]
4746 4746 value = b''
4747 4747 else:
4748 4748 key, value = fields
4749 4749
4750 4750 if value.startswith(b'eval:'):
4751 4751 value = stringutil.evalpythonliteral(value[5:])
4752 4752 else:
4753 4753 value = stringutil.unescapestr(value)
4754 4754
4755 4755 args[key] = value
4756 4756
4757 4757 if batchedcommands is not None:
4758 4758 batchedcommands.append((command, args))
4759 4759 continue
4760 4760
4761 4761 ui.status(_(b'sending %s command\n') % command)
4762 4762
4763 4763 if b'PUSHFILE' in args:
4764 4764 with open(args[b'PUSHFILE'], 'rb') as fh:
4765 4765 del args[b'PUSHFILE']
4766 4766 res, output = peer._callpush(
4767 4767 command, fh, **pycompat.strkwargs(args)
4768 4768 )
4769 4769 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4770 4770 ui.status(
4771 4771 _(b'remote output: %s\n') % stringutil.escapestr(output)
4772 4772 )
4773 4773 else:
4774 4774 with peer.commandexecutor() as e:
4775 4775 res = e.callcommand(command, args).result()
4776 4776
4777 4777 if isinstance(res, wireprotov2peer.commandresponse):
4778 4778 val = res.objects()
4779 4779 ui.status(
4780 4780 _(b'response: %s\n')
4781 4781 % stringutil.pprint(val, bprefix=True, indent=2)
4782 4782 )
4783 4783 else:
4784 4784 ui.status(
4785 4785 _(b'response: %s\n')
4786 4786 % stringutil.pprint(res, bprefix=True, indent=2)
4787 4787 )
4788 4788
4789 4789 elif action == b'batchbegin':
4790 4790 if batchedcommands is not None:
4791 4791 raise error.Abort(_(b'nested batchbegin not allowed'))
4792 4792
4793 4793 batchedcommands = []
4794 4794 elif action == b'batchsubmit':
4795 4795 # There is a batching API we could go through. But it would be
4796 4796 # difficult to normalize requests into function calls. It is easier
4797 4797 # to bypass this layer and normalize to commands + args.
4798 4798 ui.status(
4799 4799 _(b'sending batch with %d sub-commands\n')
4800 4800 % len(batchedcommands)
4801 4801 )
4802 4802 assert peer is not None
4803 4803 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4804 4804 ui.status(
4805 4805 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4806 4806 )
4807 4807
4808 4808 batchedcommands = None
4809 4809
4810 4810 elif action.startswith(b'httprequest '):
4811 4811 if not opener:
4812 4812 raise error.Abort(
4813 4813 _(b'cannot use httprequest without an HTTP peer')
4814 4814 )
4815 4815
4816 4816 request = action.split(b' ', 2)
4817 4817 if len(request) != 3:
4818 4818 raise error.Abort(
4819 4819 _(
4820 4820 b'invalid httprequest: expected format is '
4821 4821 b'"httprequest <method> <path>'
4822 4822 )
4823 4823 )
4824 4824
4825 4825 method, httppath = request[1:]
4826 4826 headers = {}
4827 4827 body = None
4828 4828 frames = []
4829 4829 for line in lines:
4830 4830 line = line.lstrip()
4831 4831 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4832 4832 if m:
4833 4833 # Headers need to use native strings.
4834 4834 key = pycompat.strurl(m.group(1))
4835 4835 value = pycompat.strurl(m.group(2))
4836 4836 headers[key] = value
4837 4837 continue
4838 4838
4839 4839 if line.startswith(b'BODYFILE '):
4840 4840 with open(line.split(b' ', 1), b'rb') as fh:
4841 4841 body = fh.read()
4842 4842 elif line.startswith(b'frame '):
4843 4843 frame = wireprotoframing.makeframefromhumanstring(
4844 4844 line[len(b'frame ') :]
4845 4845 )
4846 4846
4847 4847 frames.append(frame)
4848 4848 else:
4849 4849 raise error.Abort(
4850 4850 _(b'unknown argument to httprequest: %s') % line
4851 4851 )
4852 4852
4853 4853 url = path + httppath
4854 4854
4855 4855 if frames:
4856 4856 body = b''.join(bytes(f) for f in frames)
4857 4857
4858 4858 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4859 4859
4860 4860 # urllib.Request insists on using has_data() as a proxy for
4861 4861 # determining the request method. Override that to use our
4862 4862 # explicitly requested method.
4863 4863 req.get_method = lambda: pycompat.sysstr(method)
4864 4864
4865 4865 try:
4866 4866 res = opener.open(req)
4867 4867 body = res.read()
4868 4868 except util.urlerr.urlerror as e:
4869 4869 # read() method must be called, but only exists in Python 2
4870 4870 getattr(e, 'read', lambda: None)()
4871 4871 continue
4872 4872
4873 4873 ct = res.headers.get('Content-Type')
4874 4874 if ct == 'application/mercurial-cbor':
4875 4875 ui.write(
4876 4876 _(b'cbor> %s\n')
4877 4877 % stringutil.pprint(
4878 4878 cborutil.decodeall(body), bprefix=True, indent=2
4879 4879 )
4880 4880 )
4881 4881
4882 4882 elif action == b'close':
4883 4883 assert peer is not None
4884 4884 peer.close()
4885 4885 elif action == b'readavailable':
4886 4886 if not stdout or not stderr:
4887 4887 raise error.Abort(
4888 4888 _(b'readavailable not available on this peer')
4889 4889 )
4890 4890
4891 4891 stdin.close()
4892 4892 stdout.read()
4893 4893 stderr.read()
4894 4894
4895 4895 elif action == b'readline':
4896 4896 if not stdout:
4897 4897 raise error.Abort(_(b'readline not available on this peer'))
4898 4898 stdout.readline()
4899 4899 elif action == b'ereadline':
4900 4900 if not stderr:
4901 4901 raise error.Abort(_(b'ereadline not available on this peer'))
4902 4902 stderr.readline()
4903 4903 elif action.startswith(b'read '):
4904 4904 count = int(action.split(b' ', 1)[1])
4905 4905 if not stdout:
4906 4906 raise error.Abort(_(b'read not available on this peer'))
4907 4907 stdout.read(count)
4908 4908 elif action.startswith(b'eread '):
4909 4909 count = int(action.split(b' ', 1)[1])
4910 4910 if not stderr:
4911 4911 raise error.Abort(_(b'eread not available on this peer'))
4912 4912 stderr.read(count)
4913 4913 else:
4914 4914 raise error.Abort(_(b'unknown action: %s') % action)
4915 4915
4916 4916 if batchedcommands is not None:
4917 4917 raise error.Abort(_(b'unclosed "batchbegin" request'))
4918 4918
4919 4919 if peer:
4920 4920 peer.close()
4921 4921
4922 4922 if proc:
4923 4923 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now