##// END OF EJS Templates
debugcommands: s/stdin/stdout in debugnodemap help...
Pulkit Goyal -
r47193:3e3b81b6 default
parent child Browse files
Show More
@@ -1,4678 +1,4678 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 revlog,
73 73 revset,
74 74 revsetlang,
75 75 scmutil,
76 76 setdiscovery,
77 77 simplemerge,
78 78 sshpeer,
79 79 sslutil,
80 80 streamclone,
81 81 strip,
82 82 tags as tagsmod,
83 83 templater,
84 84 treediscovery,
85 85 upgrade,
86 86 url as urlmod,
87 87 util,
88 88 vfs as vfsmod,
89 89 wireprotoframing,
90 90 wireprotoserver,
91 91 wireprotov2peer,
92 92 )
93 93 from .utils import (
94 94 cborutil,
95 95 compression,
96 96 dateutil,
97 97 procutil,
98 98 stringutil,
99 99 )
100 100
101 101 from .revlogutils import (
102 102 deltas as deltautil,
103 103 nodemap,
104 104 sidedata,
105 105 )
106 106
107 107 release = lockmod.release
108 108
109 109 table = {}
110 110 table.update(strip.command._table)
111 111 command = registrar.command(table)
112 112
113 113
114 114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 115 def debugancestor(ui, repo, *args):
116 116 """find the ancestor revision of two revisions in a given index"""
117 117 if len(args) == 3:
118 118 index, rev1, rev2 = args
119 119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 120 lookup = r.lookup
121 121 elif len(args) == 2:
122 122 if not repo:
123 123 raise error.Abort(
124 124 _(b'there is no Mercurial repository here (.hg not found)')
125 125 )
126 126 rev1, rev2 = args
127 127 r = repo.changelog
128 128 lookup = repo.lookup
129 129 else:
130 130 raise error.Abort(_(b'either two or three arguments required'))
131 131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133 133
134 134
135 135 @command(b'debugantivirusrunning', [])
136 136 def debugantivirusrunning(ui, repo):
137 137 """attempt to trigger an antivirus scanner to see if one is active"""
138 138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 139 f.write(
140 140 util.b85decode(
141 141 # This is a base85-armored version of the EICAR test file. See
142 142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 145 )
146 146 )
147 147 # Give an AV engine time to scan the file.
148 148 time.sleep(2)
149 149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150 150
151 151
152 152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 153 def debugapplystreamclonebundle(ui, repo, fname):
154 154 """apply a stream clone bundle file"""
155 155 f = hg.openpath(ui, fname)
156 156 gen = exchange.readbundle(ui, f, fname)
157 157 gen.apply(repo)
158 158
159 159
160 160 @command(
161 161 b'debugbuilddag',
162 162 [
163 163 (
164 164 b'm',
165 165 b'mergeable-file',
166 166 None,
167 167 _(b'add single file mergeable changes'),
168 168 ),
169 169 (
170 170 b'o',
171 171 b'overwritten-file',
172 172 None,
173 173 _(b'add single file all revs overwrite'),
174 174 ),
175 175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 176 ],
177 177 _(b'[OPTION]... [TEXT]'),
178 178 )
179 179 def debugbuilddag(
180 180 ui,
181 181 repo,
182 182 text=None,
183 183 mergeable_file=False,
184 184 overwritten_file=False,
185 185 new_file=False,
186 186 ):
187 187 """builds a repo with a given DAG from scratch in the current empty repo
188 188
189 189 The description of the DAG is read from stdin if not given on the
190 190 command line.
191 191
192 192 Elements:
193 193
194 194 - "+n" is a linear run of n nodes based on the current default parent
195 195 - "." is a single node based on the current default parent
196 196 - "$" resets the default parent to null (implied at the start);
197 197 otherwise the default parent is always the last node created
198 198 - "<p" sets the default parent to the backref p
199 199 - "*p" is a fork at parent p, which is a backref
200 200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 201 - "/p2" is a merge of the preceding node and p2
202 202 - ":tag" defines a local tag for the preceding node
203 203 - "@branch" sets the named branch for subsequent nodes
204 204 - "#...\\n" is a comment up to the end of the line
205 205
206 206 Whitespace between the above elements is ignored.
207 207
208 208 A backref is either
209 209
210 210 - a number n, which references the node curr-n, where curr is the current
211 211 node, or
212 212 - the name of a local tag you placed earlier using ":tag", or
213 213 - empty to denote the default parent.
214 214
215 215 All string valued-elements are either strictly alphanumeric, or must
216 216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 217 """
218 218
219 219 if text is None:
220 220 ui.status(_(b"reading DAG from stdin\n"))
221 221 text = ui.fin.read()
222 222
223 223 cl = repo.changelog
224 224 if len(cl) > 0:
225 225 raise error.Abort(_(b'repository is not empty'))
226 226
227 227 # determine number of revs in DAG
228 228 total = 0
229 229 for type, data in dagparser.parsedag(text):
230 230 if type == b'n':
231 231 total += 1
232 232
233 233 if mergeable_file:
234 234 linesperrev = 2
235 235 # make a file with k lines per rev
236 236 initialmergedlines = [
237 237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 238 ]
239 239 initialmergedlines.append(b"")
240 240
241 241 tags = []
242 242 progress = ui.makeprogress(
243 243 _(b'building'), unit=_(b'revisions'), total=total
244 244 )
245 245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 246 at = -1
247 247 atbranch = b'default'
248 248 nodeids = []
249 249 id = 0
250 250 progress.update(id)
251 251 for type, data in dagparser.parsedag(text):
252 252 if type == b'n':
253 253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 254 id, ps = data
255 255
256 256 files = []
257 257 filecontent = {}
258 258
259 259 p2 = None
260 260 if mergeable_file:
261 261 fn = b"mf"
262 262 p1 = repo[ps[0]]
263 263 if len(ps) > 1:
264 264 p2 = repo[ps[1]]
265 265 pa = p1.ancestor(p2)
266 266 base, local, other = [
267 267 x[fn].data() for x in (pa, p1, p2)
268 268 ]
269 269 m3 = simplemerge.Merge3Text(base, local, other)
270 270 ml = [l.strip() for l in m3.merge_lines()]
271 271 ml.append(b"")
272 272 elif at > 0:
273 273 ml = p1[fn].data().split(b"\n")
274 274 else:
275 275 ml = initialmergedlines
276 276 ml[id * linesperrev] += b" r%i" % id
277 277 mergedtext = b"\n".join(ml)
278 278 files.append(fn)
279 279 filecontent[fn] = mergedtext
280 280
281 281 if overwritten_file:
282 282 fn = b"of"
283 283 files.append(fn)
284 284 filecontent[fn] = b"r%i\n" % id
285 285
286 286 if new_file:
287 287 fn = b"nf%i" % id
288 288 files.append(fn)
289 289 filecontent[fn] = b"r%i\n" % id
290 290 if len(ps) > 1:
291 291 if not p2:
292 292 p2 = repo[ps[1]]
293 293 for fn in p2:
294 294 if fn.startswith(b"nf"):
295 295 files.append(fn)
296 296 filecontent[fn] = p2[fn].data()
297 297
298 298 def fctxfn(repo, cx, path):
299 299 if path in filecontent:
300 300 return context.memfilectx(
301 301 repo, cx, path, filecontent[path]
302 302 )
303 303 return None
304 304
305 305 if len(ps) == 0 or ps[0] < 0:
306 306 pars = [None, None]
307 307 elif len(ps) == 1:
308 308 pars = [nodeids[ps[0]], None]
309 309 else:
310 310 pars = [nodeids[p] for p in ps]
311 311 cx = context.memctx(
312 312 repo,
313 313 pars,
314 314 b"r%i" % id,
315 315 files,
316 316 fctxfn,
317 317 date=(id, 0),
318 318 user=b"debugbuilddag",
319 319 extra={b'branch': atbranch},
320 320 )
321 321 nodeid = repo.commitctx(cx)
322 322 nodeids.append(nodeid)
323 323 at = id
324 324 elif type == b'l':
325 325 id, name = data
326 326 ui.note((b'tag %s\n' % name))
327 327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 328 elif type == b'a':
329 329 ui.note((b'branch %s\n' % data))
330 330 atbranch = data
331 331 progress.update(id)
332 332
333 333 if tags:
334 334 repo.vfs.write(b"localtags", b"".join(tags))
335 335
336 336
337 337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 338 indent_string = b' ' * indent
339 339 if all:
340 340 ui.writenoi18n(
341 341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 342 % indent_string
343 343 )
344 344
345 345 def showchunks(named):
346 346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 347 for deltadata in gen.deltaiter():
348 348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 349 ui.write(
350 350 b"%s%s %s %s %s %s %d\n"
351 351 % (
352 352 indent_string,
353 353 hex(node),
354 354 hex(p1),
355 355 hex(p2),
356 356 hex(cs),
357 357 hex(deltabase),
358 358 len(delta),
359 359 )
360 360 )
361 361
362 362 gen.changelogheader()
363 363 showchunks(b"changelog")
364 364 gen.manifestheader()
365 365 showchunks(b"manifest")
366 366 for chunkdata in iter(gen.filelogheader, {}):
367 367 fname = chunkdata[b'filename']
368 368 showchunks(fname)
369 369 else:
370 370 if isinstance(gen, bundle2.unbundle20):
371 371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 372 gen.changelogheader()
373 373 for deltadata in gen.deltaiter():
374 374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376 376
377 377
378 378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 379 """display version and markers contained in 'data'"""
380 380 opts = pycompat.byteskwargs(opts)
381 381 data = part.read()
382 382 indent_string = b' ' * indent
383 383 try:
384 384 version, markers = obsolete._readmarkers(data)
385 385 except error.UnknownVersion as exc:
386 386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 387 msg %= indent_string, exc.version, len(data)
388 388 ui.write(msg)
389 389 else:
390 390 msg = b"%sversion: %d (%d bytes)\n"
391 391 msg %= indent_string, version, len(data)
392 392 ui.write(msg)
393 393 fm = ui.formatter(b'debugobsolete', opts)
394 394 for rawmarker in sorted(markers):
395 395 m = obsutil.marker(None, rawmarker)
396 396 fm.startitem()
397 397 fm.plain(indent_string)
398 398 cmdutil.showmarker(fm, m)
399 399 fm.end()
400 400
401 401
402 402 def _debugphaseheads(ui, data, indent=0):
403 403 """display version and markers contained in 'data'"""
404 404 indent_string = b' ' * indent
405 405 headsbyphase = phases.binarydecode(data)
406 406 for phase in phases.allphases:
407 407 for head in headsbyphase[phase]:
408 408 ui.write(indent_string)
409 409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410 410
411 411
412 412 def _quasirepr(thing):
413 413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 414 return b'{%s}' % (
415 415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 416 )
417 417 return pycompat.bytestr(repr(thing))
418 418
419 419
420 420 def _debugbundle2(ui, gen, all=None, **opts):
421 421 """lists the contents of a bundle2"""
422 422 if not isinstance(gen, bundle2.unbundle20):
423 423 raise error.Abort(_(b'not a bundle2 file'))
424 424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 425 parttypes = opts.get('part_type', [])
426 426 for part in gen.iterparts():
427 427 if parttypes and part.type not in parttypes:
428 428 continue
429 429 msg = b'%s -- %s (mandatory: %r)\n'
430 430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 431 if part.type == b'changegroup':
432 432 version = part.params.get(b'version', b'01')
433 433 cg = changegroup.getunbundler(version, part, b'UN')
434 434 if not ui.quiet:
435 435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 436 if part.type == b'obsmarkers':
437 437 if not ui.quiet:
438 438 _debugobsmarkers(ui, part, indent=4, **opts)
439 439 if part.type == b'phase-heads':
440 440 if not ui.quiet:
441 441 _debugphaseheads(ui, part, indent=4)
442 442
443 443
444 444 @command(
445 445 b'debugbundle',
446 446 [
447 447 (b'a', b'all', None, _(b'show all details')),
448 448 (b'', b'part-type', [], _(b'show only the named part type')),
449 449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 450 ],
451 451 _(b'FILE'),
452 452 norepo=True,
453 453 )
454 454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 455 """lists the contents of a bundle"""
456 456 with hg.openpath(ui, bundlepath) as f:
457 457 if spec:
458 458 spec = exchange.getbundlespec(ui, f)
459 459 ui.write(b'%s\n' % spec)
460 460 return
461 461
462 462 gen = exchange.readbundle(ui, f, bundlepath)
463 463 if isinstance(gen, bundle2.unbundle20):
464 464 return _debugbundle2(ui, gen, all=all, **opts)
465 465 _debugchangegroup(ui, gen, all=all, **opts)
466 466
467 467
468 468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 469 def debugcapabilities(ui, path, **opts):
470 470 """lists the capabilities of a remote peer"""
471 471 opts = pycompat.byteskwargs(opts)
472 472 peer = hg.peer(ui, opts, path)
473 473 caps = peer.capabilities()
474 474 ui.writenoi18n(b'Main capabilities:\n')
475 475 for c in sorted(caps):
476 476 ui.write(b' %s\n' % c)
477 477 b2caps = bundle2.bundle2caps(peer)
478 478 if b2caps:
479 479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 481 ui.write(b' %s\n' % key)
482 482 for v in values:
483 483 ui.write(b' %s\n' % v)
484 484
485 485
486 486 @command(b'debugchangedfiles', [], b'REV')
487 487 def debugchangedfiles(ui, repo, rev):
488 488 """list the stored files changes for a revision"""
489 489 ctx = scmutil.revsingle(repo, rev, None)
490 490 sd = repo.changelog.sidedata(ctx.rev())
491 491 files_block = sd.get(sidedata.SD_FILES)
492 492 if files_block is not None:
493 493 files = metadata.decode_files_sidedata(sd)
494 494 for f in sorted(files.touched):
495 495 if f in files.added:
496 496 action = b"added"
497 497 elif f in files.removed:
498 498 action = b"removed"
499 499 elif f in files.merged:
500 500 action = b"merged"
501 501 elif f in files.salvaged:
502 502 action = b"salvaged"
503 503 else:
504 504 action = b"touched"
505 505
506 506 copy_parent = b""
507 507 copy_source = b""
508 508 if f in files.copied_from_p1:
509 509 copy_parent = b"p1"
510 510 copy_source = files.copied_from_p1[f]
511 511 elif f in files.copied_from_p2:
512 512 copy_parent = b"p2"
513 513 copy_source = files.copied_from_p2[f]
514 514
515 515 data = (action, copy_parent, f, copy_source)
516 516 template = b"%-8s %2s: %s, %s;\n"
517 517 ui.write(template % data)
518 518
519 519
520 520 @command(b'debugcheckstate', [], b'')
521 521 def debugcheckstate(ui, repo):
522 522 """validate the correctness of the current dirstate"""
523 523 parent1, parent2 = repo.dirstate.parents()
524 524 m1 = repo[parent1].manifest()
525 525 m2 = repo[parent2].manifest()
526 526 errors = 0
527 527 for f in repo.dirstate:
528 528 state = repo.dirstate[f]
529 529 if state in b"nr" and f not in m1:
530 530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 531 errors += 1
532 532 if state in b"a" and f in m1:
533 533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 534 errors += 1
535 535 if state in b"m" and f not in m1 and f not in m2:
536 536 ui.warn(
537 537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 538 )
539 539 errors += 1
540 540 for f in m1:
541 541 state = repo.dirstate[f]
542 542 if state not in b"nrm":
543 543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 544 errors += 1
545 545 if errors:
546 546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 547 raise error.Abort(errstr)
548 548
549 549
550 550 @command(
551 551 b'debugcolor',
552 552 [(b'', b'style', None, _(b'show all configured styles'))],
553 553 b'hg debugcolor',
554 554 )
555 555 def debugcolor(ui, repo, **opts):
556 556 """show available color, effects or style"""
557 557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 558 if opts.get('style'):
559 559 return _debugdisplaystyle(ui)
560 560 else:
561 561 return _debugdisplaycolor(ui)
562 562
563 563
564 564 def _debugdisplaycolor(ui):
565 565 ui = ui.copy()
566 566 ui._styles.clear()
567 567 for effect in color._activeeffects(ui).keys():
568 568 ui._styles[effect] = effect
569 569 if ui._terminfoparams:
570 570 for k, v in ui.configitems(b'color'):
571 571 if k.startswith(b'color.'):
572 572 ui._styles[k] = k[6:]
573 573 elif k.startswith(b'terminfo.'):
574 574 ui._styles[k] = k[9:]
575 575 ui.write(_(b'available colors:\n'))
576 576 # sort label with a '_' after the other to group '_background' entry.
577 577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 578 for colorname, label in items:
579 579 ui.write(b'%s\n' % colorname, label=label)
580 580
581 581
582 582 def _debugdisplaystyle(ui):
583 583 ui.write(_(b'available style:\n'))
584 584 if not ui._styles:
585 585 return
586 586 width = max(len(s) for s in ui._styles)
587 587 for label, effects in sorted(ui._styles.items()):
588 588 ui.write(b'%s' % label, label=label)
589 589 if effects:
590 590 # 50
591 591 ui.write(b': ')
592 592 ui.write(b' ' * (max(0, width - len(label))))
593 593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 594 ui.write(b'\n')
595 595
596 596
597 597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 598 def debugcreatestreamclonebundle(ui, repo, fname):
599 599 """create a stream clone bundle file
600 600
601 601 Stream bundles are special bundles that are essentially archives of
602 602 revlog files. They are commonly used for cloning very quickly.
603 603 """
604 604 # TODO we may want to turn this into an abort when this functionality
605 605 # is moved into `hg bundle`.
606 606 if phases.hassecret(repo):
607 607 ui.warn(
608 608 _(
609 609 b'(warning: stream clone bundle will contain secret '
610 610 b'revisions)\n'
611 611 )
612 612 )
613 613
614 614 requirements, gen = streamclone.generatebundlev1(repo)
615 615 changegroup.writechunks(ui, gen, fname)
616 616
617 617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618 618
619 619
620 620 @command(
621 621 b'debugdag',
622 622 [
623 623 (b't', b'tags', None, _(b'use tags as labels')),
624 624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 625 (b'', b'dots', None, _(b'use dots for runs')),
626 626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 627 ],
628 628 _(b'[OPTION]... [FILE [REV]...]'),
629 629 optionalrepo=True,
630 630 )
631 631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 632 """format the changelog or an index DAG as a concise textual description
633 633
634 634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 635 revision numbers, they get labeled in the output as rN.
636 636
637 637 Otherwise, the changelog DAG of the current repo is emitted.
638 638 """
639 639 spaces = opts.get('spaces')
640 640 dots = opts.get('dots')
641 641 if file_:
642 642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 643 revs = {int(r) for r in revs}
644 644
645 645 def events():
646 646 for r in rlog:
647 647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 648 if r in revs:
649 649 yield b'l', (r, b"r%i" % r)
650 650
651 651 elif repo:
652 652 cl = repo.changelog
653 653 tags = opts.get('tags')
654 654 branches = opts.get('branches')
655 655 if tags:
656 656 labels = {}
657 657 for l, n in repo.tags().items():
658 658 labels.setdefault(cl.rev(n), []).append(l)
659 659
660 660 def events():
661 661 b = b"default"
662 662 for r in cl:
663 663 if branches:
664 664 newb = cl.read(cl.node(r))[5][b'branch']
665 665 if newb != b:
666 666 yield b'a', newb
667 667 b = newb
668 668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 669 if tags:
670 670 ls = labels.get(r)
671 671 if ls:
672 672 for l in ls:
673 673 yield b'l', (r, l)
674 674
675 675 else:
676 676 raise error.Abort(_(b'need repo for changelog dag'))
677 677
678 678 for line in dagparser.dagtextlines(
679 679 events(),
680 680 addspaces=spaces,
681 681 wraplabels=True,
682 682 wrapannotations=True,
683 683 wrapnonlinear=dots,
684 684 usedots=dots,
685 685 maxlinewidth=70,
686 686 ):
687 687 ui.write(line)
688 688 ui.write(b"\n")
689 689
690 690
691 691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 692 def debugdata(ui, repo, file_, rev=None, **opts):
693 693 """dump the contents of a data file revision"""
694 694 opts = pycompat.byteskwargs(opts)
695 695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 696 if rev is not None:
697 697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 698 file_, rev = None, file_
699 699 elif rev is None:
700 700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 702 try:
703 703 ui.write(r.rawdata(r.lookup(rev)))
704 704 except KeyError:
705 705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706 706
707 707
708 708 @command(
709 709 b'debugdate',
710 710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 711 _(b'[-e] DATE [RANGE]'),
712 712 norepo=True,
713 713 optionalrepo=True,
714 714 )
715 715 def debugdate(ui, date, range=None, **opts):
716 716 """parse and display a date"""
717 717 if opts["extended"]:
718 718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 719 else:
720 720 d = dateutil.parsedate(date)
721 721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 723 if range:
724 724 m = dateutil.matchdate(range)
725 725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726 726
727 727
728 728 @command(
729 729 b'debugdeltachain',
730 730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 731 _(b'-c|-m|FILE'),
732 732 optionalrepo=True,
733 733 )
734 734 def debugdeltachain(ui, repo, file_=None, **opts):
735 735 """dump information about delta chains in a revlog
736 736
737 737 Output can be templatized. Available template keywords are:
738 738
739 739 :``rev``: revision number
740 740 :``chainid``: delta chain identifier (numbered by unique base)
741 741 :``chainlen``: delta chain length to this revision
742 742 :``prevrev``: previous revision in delta chain
743 743 :``deltatype``: role of delta / how it was computed
744 744 :``compsize``: compressed size of revision
745 745 :``uncompsize``: uncompressed size of revision
746 746 :``chainsize``: total size of compressed revisions in chain
747 747 :``chainratio``: total chain size divided by uncompressed revision size
748 748 (new delta chains typically start at ratio 2.00)
749 749 :``lindist``: linear distance from base revision in delta chain to end
750 750 of this revision
751 751 :``extradist``: total size of revisions not part of this delta chain from
752 752 base of delta chain to end of this revision; a measurement
753 753 of how much extra data we need to read/seek across to read
754 754 the delta chain for this revision
755 755 :``extraratio``: extradist divided by chainsize; another representation of
756 756 how much unrelated data is needed to load this delta chain
757 757
758 758 If the repository is configured to use the sparse read, additional keywords
759 759 are available:
760 760
761 761 :``readsize``: total size of data read from the disk for a revision
762 762 (sum of the sizes of all the blocks)
763 763 :``largestblock``: size of the largest block of data read from the disk
764 764 :``readdensity``: density of useful bytes in the data read from the disk
765 765 :``srchunks``: in how many data hunks the whole revision would be read
766 766
767 767 The sparse read can be enabled with experimental.sparse-read = True
768 768 """
769 769 opts = pycompat.byteskwargs(opts)
770 770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 771 index = r.index
772 772 start = r.start
773 773 length = r.length
774 774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 775 withsparseread = getattr(r, '_withsparseread', False)
776 776
777 777 def revinfo(rev):
778 778 e = index[rev]
779 779 compsize = e[1]
780 780 uncompsize = e[2]
781 781 chainsize = 0
782 782
783 783 if generaldelta:
784 784 if e[3] == e[5]:
785 785 deltatype = b'p1'
786 786 elif e[3] == e[6]:
787 787 deltatype = b'p2'
788 788 elif e[3] == rev - 1:
789 789 deltatype = b'prev'
790 790 elif e[3] == rev:
791 791 deltatype = b'base'
792 792 else:
793 793 deltatype = b'other'
794 794 else:
795 795 if e[3] == rev:
796 796 deltatype = b'base'
797 797 else:
798 798 deltatype = b'prev'
799 799
800 800 chain = r._deltachain(rev)[0]
801 801 for iterrev in chain:
802 802 e = index[iterrev]
803 803 chainsize += e[1]
804 804
805 805 return compsize, uncompsize, deltatype, chain, chainsize
806 806
807 807 fm = ui.formatter(b'debugdeltachain', opts)
808 808
809 809 fm.plain(
810 810 b' rev chain# chainlen prev delta '
811 811 b'size rawsize chainsize ratio lindist extradist '
812 812 b'extraratio'
813 813 )
814 814 if withsparseread:
815 815 fm.plain(b' readsize largestblk rddensity srchunks')
816 816 fm.plain(b'\n')
817 817
818 818 chainbases = {}
819 819 for rev in r:
820 820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 821 chainbase = chain[0]
822 822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 823 basestart = start(chainbase)
824 824 revstart = start(rev)
825 825 lineardist = revstart + comp - basestart
826 826 extradist = lineardist - chainsize
827 827 try:
828 828 prevrev = chain[-2]
829 829 except IndexError:
830 830 prevrev = -1
831 831
832 832 if uncomp != 0:
833 833 chainratio = float(chainsize) / float(uncomp)
834 834 else:
835 835 chainratio = chainsize
836 836
837 837 if chainsize != 0:
838 838 extraratio = float(extradist) / float(chainsize)
839 839 else:
840 840 extraratio = extradist
841 841
842 842 fm.startitem()
843 843 fm.write(
844 844 b'rev chainid chainlen prevrev deltatype compsize '
845 845 b'uncompsize chainsize chainratio lindist extradist '
846 846 b'extraratio',
847 847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 848 rev,
849 849 chainid,
850 850 len(chain),
851 851 prevrev,
852 852 deltatype,
853 853 comp,
854 854 uncomp,
855 855 chainsize,
856 856 chainratio,
857 857 lineardist,
858 858 extradist,
859 859 extraratio,
860 860 rev=rev,
861 861 chainid=chainid,
862 862 chainlen=len(chain),
863 863 prevrev=prevrev,
864 864 deltatype=deltatype,
865 865 compsize=comp,
866 866 uncompsize=uncomp,
867 867 chainsize=chainsize,
868 868 chainratio=chainratio,
869 869 lindist=lineardist,
870 870 extradist=extradist,
871 871 extraratio=extraratio,
872 872 )
873 873 if withsparseread:
874 874 readsize = 0
875 875 largestblock = 0
876 876 srchunks = 0
877 877
878 878 for revschunk in deltautil.slicechunk(r, chain):
879 879 srchunks += 1
880 880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 881 blksize = blkend - start(revschunk[0])
882 882
883 883 readsize += blksize
884 884 if largestblock < blksize:
885 885 largestblock = blksize
886 886
887 887 if readsize:
888 888 readdensity = float(chainsize) / float(readsize)
889 889 else:
890 890 readdensity = 1
891 891
892 892 fm.write(
893 893 b'readsize largestblock readdensity srchunks',
894 894 b' %10d %10d %9.5f %8d',
895 895 readsize,
896 896 largestblock,
897 897 readdensity,
898 898 srchunks,
899 899 readsize=readsize,
900 900 largestblock=largestblock,
901 901 readdensity=readdensity,
902 902 srchunks=srchunks,
903 903 )
904 904
905 905 fm.plain(b'\n')
906 906
907 907 fm.end()
908 908
909 909
910 910 @command(
911 911 b'debugdirstate|debugstate',
912 912 [
913 913 (
914 914 b'',
915 915 b'nodates',
916 916 None,
917 917 _(b'do not display the saved mtime (DEPRECATED)'),
918 918 ),
919 919 (b'', b'dates', True, _(b'display the saved mtime')),
920 920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 921 ],
922 922 _(b'[OPTION]...'),
923 923 )
924 924 def debugstate(ui, repo, **opts):
925 925 """show the contents of the current dirstate"""
926 926
927 927 nodates = not opts['dates']
928 928 if opts.get('nodates') is not None:
929 929 nodates = True
930 930 datesort = opts.get('datesort')
931 931
932 932 if datesort:
933 933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 934 else:
935 935 keyfunc = None # sort by filename
936 936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 937 if ent[3] == -1:
938 938 timestr = b'unset '
939 939 elif nodates:
940 940 timestr = b'set '
941 941 else:
942 942 timestr = time.strftime(
943 943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 944 )
945 945 timestr = encoding.strtolocal(timestr)
946 946 if ent[1] & 0o20000:
947 947 mode = b'lnk'
948 948 else:
949 949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 951 for f in repo.dirstate.copies():
952 952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953 953
954 954
955 955 @command(
956 956 b'debugdiscovery',
957 957 [
958 958 (b'', b'old', None, _(b'use old-style discovery')),
959 959 (
960 960 b'',
961 961 b'nonheads',
962 962 None,
963 963 _(b'use old-style discovery with non-heads included'),
964 964 ),
965 965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 967 ]
968 968 + cmdutil.remoteopts,
969 969 _(b'[--rev REV] [OTHER]'),
970 970 )
971 971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 972 """runs the changeset discovery protocol in isolation"""
973 973 opts = pycompat.byteskwargs(opts)
974 974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 975 remote = hg.peer(repo, opts, remoteurl)
976 976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977 977
978 978 # make sure tests are repeatable
979 979 random.seed(int(opts[b'seed']))
980 980
981 981 data = {}
982 982 if opts.get(b'old'):
983 983
984 984 def doit(pushedrevs, remoteheads, remote=remote):
985 985 if not util.safehasattr(remote, b'branches'):
986 986 # enable in-client legacy support
987 987 remote = localrepo.locallegacypeer(remote.local())
988 988 common, _in, hds = treediscovery.findcommonincoming(
989 989 repo, remote, force=True, audit=data
990 990 )
991 991 common = set(common)
992 992 if not opts.get(b'nonheads'):
993 993 ui.writenoi18n(
994 994 b"unpruned common: %s\n"
995 995 % b" ".join(sorted(short(n) for n in common))
996 996 )
997 997
998 998 clnode = repo.changelog.node
999 999 common = repo.revs(b'heads(::%ln)', common)
1000 1000 common = {clnode(r) for r in common}
1001 1001 return common, hds
1002 1002
1003 1003 else:
1004 1004
1005 1005 def doit(pushedrevs, remoteheads, remote=remote):
1006 1006 nodes = None
1007 1007 if pushedrevs:
1008 1008 revs = scmutil.revrange(repo, pushedrevs)
1009 1009 nodes = [repo[r].node() for r in revs]
1010 1010 common, any, hds = setdiscovery.findcommonheads(
1011 1011 ui, repo, remote, ancestorsof=nodes, audit=data
1012 1012 )
1013 1013 return common, hds
1014 1014
1015 1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1016 1016 localrevs = opts[b'rev']
1017 1017 with util.timedcm('debug-discovery') as t:
1018 1018 common, hds = doit(localrevs, remoterevs)
1019 1019
1020 1020 # compute all statistics
1021 1021 heads_common = set(common)
1022 1022 heads_remote = set(hds)
1023 1023 heads_local = set(repo.heads())
1024 1024 # note: they cannot be a local or remote head that is in common and not
1025 1025 # itself a head of common.
1026 1026 heads_common_local = heads_common & heads_local
1027 1027 heads_common_remote = heads_common & heads_remote
1028 1028 heads_common_both = heads_common & heads_remote & heads_local
1029 1029
1030 1030 all = repo.revs(b'all()')
1031 1031 common = repo.revs(b'::%ln', common)
1032 1032 roots_common = repo.revs(b'roots(::%ld)', common)
1033 1033 missing = repo.revs(b'not ::%ld', common)
1034 1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1035 1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1036 1036 assert len(common) + len(missing) == len(all)
1037 1037
1038 1038 initial_undecided = repo.revs(
1039 1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1040 1040 )
1041 1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1042 1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1043 1043 common_initial_undecided = initial_undecided & common
1044 1044 missing_initial_undecided = initial_undecided & missing
1045 1045
1046 1046 data[b'elapsed'] = t.elapsed
1047 1047 data[b'nb-common-heads'] = len(heads_common)
1048 1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1049 1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1050 1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1051 1051 data[b'nb-common-roots'] = len(roots_common)
1052 1052 data[b'nb-head-local'] = len(heads_local)
1053 1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1054 1054 data[b'nb-head-remote'] = len(heads_remote)
1055 1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1056 1056 heads_common_remote
1057 1057 )
1058 1058 data[b'nb-revs'] = len(all)
1059 1059 data[b'nb-revs-common'] = len(common)
1060 1060 data[b'nb-revs-missing'] = len(missing)
1061 1061 data[b'nb-missing-heads'] = len(heads_missing)
1062 1062 data[b'nb-missing-roots'] = len(roots_missing)
1063 1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1068 1068
1069 1069 # display discovery summary
1070 1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1071 1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1072 1072 ui.writenoi18n(b"heads summary:\n")
1073 1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1074 1074 ui.writenoi18n(
1075 1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1076 1076 )
1077 1077 ui.writenoi18n(
1078 1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1079 1079 )
1080 1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1081 1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1082 1082 ui.writenoi18n(
1083 1083 b" common: %(nb-common-heads-local)9d\n" % data
1084 1084 )
1085 1085 ui.writenoi18n(
1086 1086 b" missing: %(nb-head-local-missing)9d\n" % data
1087 1087 )
1088 1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1089 1089 ui.writenoi18n(
1090 1090 b" common: %(nb-common-heads-remote)9d\n" % data
1091 1091 )
1092 1092 ui.writenoi18n(
1093 1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1094 1094 )
1095 1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1096 1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1097 1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1098 1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1099 1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1100 1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1101 1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1102 1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1103 1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1104 1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1105 1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1106 1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1107 1107
1108 1108 if ui.verbose:
1109 1109 ui.writenoi18n(
1110 1110 b"common heads: %s\n"
1111 1111 % b" ".join(sorted(short(n) for n in heads_common))
1112 1112 )
1113 1113
1114 1114
1115 1115 _chunksize = 4 << 10
1116 1116
1117 1117
1118 1118 @command(
1119 1119 b'debugdownload',
1120 1120 [
1121 1121 (b'o', b'output', b'', _(b'path')),
1122 1122 ],
1123 1123 optionalrepo=True,
1124 1124 )
1125 1125 def debugdownload(ui, repo, url, output=None, **opts):
1126 1126 """download a resource using Mercurial logic and config"""
1127 1127 fh = urlmod.open(ui, url, output)
1128 1128
1129 1129 dest = ui
1130 1130 if output:
1131 1131 dest = open(output, b"wb", _chunksize)
1132 1132 try:
1133 1133 data = fh.read(_chunksize)
1134 1134 while data:
1135 1135 dest.write(data)
1136 1136 data = fh.read(_chunksize)
1137 1137 finally:
1138 1138 if output:
1139 1139 dest.close()
1140 1140
1141 1141
1142 1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1143 1143 def debugextensions(ui, repo, **opts):
1144 1144 '''show information about active extensions'''
1145 1145 opts = pycompat.byteskwargs(opts)
1146 1146 exts = extensions.extensions(ui)
1147 1147 hgver = util.version()
1148 1148 fm = ui.formatter(b'debugextensions', opts)
1149 1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1150 1150 isinternal = extensions.ismoduleinternal(extmod)
1151 1151 extsource = None
1152 1152
1153 1153 if util.safehasattr(extmod, '__file__'):
1154 1154 extsource = pycompat.fsencode(extmod.__file__)
1155 1155 elif getattr(sys, 'oxidized', False):
1156 1156 extsource = pycompat.sysexecutable
1157 1157 if isinternal:
1158 1158 exttestedwith = [] # never expose magic string to users
1159 1159 else:
1160 1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1161 1161 extbuglink = getattr(extmod, 'buglink', None)
1162 1162
1163 1163 fm.startitem()
1164 1164
1165 1165 if ui.quiet or ui.verbose:
1166 1166 fm.write(b'name', b'%s\n', extname)
1167 1167 else:
1168 1168 fm.write(b'name', b'%s', extname)
1169 1169 if isinternal or hgver in exttestedwith:
1170 1170 fm.plain(b'\n')
1171 1171 elif not exttestedwith:
1172 1172 fm.plain(_(b' (untested!)\n'))
1173 1173 else:
1174 1174 lasttestedversion = exttestedwith[-1]
1175 1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1176 1176
1177 1177 fm.condwrite(
1178 1178 ui.verbose and extsource,
1179 1179 b'source',
1180 1180 _(b' location: %s\n'),
1181 1181 extsource or b"",
1182 1182 )
1183 1183
1184 1184 if ui.verbose:
1185 1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1186 1186 fm.data(bundled=isinternal)
1187 1187
1188 1188 fm.condwrite(
1189 1189 ui.verbose and exttestedwith,
1190 1190 b'testedwith',
1191 1191 _(b' tested with: %s\n'),
1192 1192 fm.formatlist(exttestedwith, name=b'ver'),
1193 1193 )
1194 1194
1195 1195 fm.condwrite(
1196 1196 ui.verbose and extbuglink,
1197 1197 b'buglink',
1198 1198 _(b' bug reporting: %s\n'),
1199 1199 extbuglink or b"",
1200 1200 )
1201 1201
1202 1202 fm.end()
1203 1203
1204 1204
1205 1205 @command(
1206 1206 b'debugfileset',
1207 1207 [
1208 1208 (
1209 1209 b'r',
1210 1210 b'rev',
1211 1211 b'',
1212 1212 _(b'apply the filespec on this revision'),
1213 1213 _(b'REV'),
1214 1214 ),
1215 1215 (
1216 1216 b'',
1217 1217 b'all-files',
1218 1218 False,
1219 1219 _(b'test files from all revisions and working directory'),
1220 1220 ),
1221 1221 (
1222 1222 b's',
1223 1223 b'show-matcher',
1224 1224 None,
1225 1225 _(b'print internal representation of matcher'),
1226 1226 ),
1227 1227 (
1228 1228 b'p',
1229 1229 b'show-stage',
1230 1230 [],
1231 1231 _(b'print parsed tree at the given stage'),
1232 1232 _(b'NAME'),
1233 1233 ),
1234 1234 ],
1235 1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1236 1236 )
1237 1237 def debugfileset(ui, repo, expr, **opts):
1238 1238 '''parse and apply a fileset specification'''
1239 1239 from . import fileset
1240 1240
1241 1241 fileset.symbols # force import of fileset so we have predicates to optimize
1242 1242 opts = pycompat.byteskwargs(opts)
1243 1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1244 1244
1245 1245 stages = [
1246 1246 (b'parsed', pycompat.identity),
1247 1247 (b'analyzed', filesetlang.analyze),
1248 1248 (b'optimized', filesetlang.optimize),
1249 1249 ]
1250 1250 stagenames = {n for n, f in stages}
1251 1251
1252 1252 showalways = set()
1253 1253 if ui.verbose and not opts[b'show_stage']:
1254 1254 # show parsed tree by --verbose (deprecated)
1255 1255 showalways.add(b'parsed')
1256 1256 if opts[b'show_stage'] == [b'all']:
1257 1257 showalways.update(stagenames)
1258 1258 else:
1259 1259 for n in opts[b'show_stage']:
1260 1260 if n not in stagenames:
1261 1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1262 1262 showalways.update(opts[b'show_stage'])
1263 1263
1264 1264 tree = filesetlang.parse(expr)
1265 1265 for n, f in stages:
1266 1266 tree = f(tree)
1267 1267 if n in showalways:
1268 1268 if opts[b'show_stage'] or n != b'parsed':
1269 1269 ui.write(b"* %s:\n" % n)
1270 1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1271 1271
1272 1272 files = set()
1273 1273 if opts[b'all_files']:
1274 1274 for r in repo:
1275 1275 c = repo[r]
1276 1276 files.update(c.files())
1277 1277 files.update(c.substate)
1278 1278 if opts[b'all_files'] or ctx.rev() is None:
1279 1279 wctx = repo[None]
1280 1280 files.update(
1281 1281 repo.dirstate.walk(
1282 1282 scmutil.matchall(repo),
1283 1283 subrepos=list(wctx.substate),
1284 1284 unknown=True,
1285 1285 ignored=True,
1286 1286 )
1287 1287 )
1288 1288 files.update(wctx.substate)
1289 1289 else:
1290 1290 files.update(ctx.files())
1291 1291 files.update(ctx.substate)
1292 1292
1293 1293 m = ctx.matchfileset(repo.getcwd(), expr)
1294 1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1295 1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1296 1296 for f in sorted(files):
1297 1297 if not m(f):
1298 1298 continue
1299 1299 ui.write(b"%s\n" % f)
1300 1300
1301 1301
1302 1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1303 1303 def debugformat(ui, repo, **opts):
1304 1304 """display format information about the current repository
1305 1305
1306 1306 Use --verbose to get extra information about current config value and
1307 1307 Mercurial default."""
1308 1308 opts = pycompat.byteskwargs(opts)
1309 1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1310 1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1311 1311
1312 1312 def makeformatname(name):
1313 1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1314 1314
1315 1315 fm = ui.formatter(b'debugformat', opts)
1316 1316 if fm.isplain():
1317 1317
1318 1318 def formatvalue(value):
1319 1319 if util.safehasattr(value, b'startswith'):
1320 1320 return value
1321 1321 if value:
1322 1322 return b'yes'
1323 1323 else:
1324 1324 return b'no'
1325 1325
1326 1326 else:
1327 1327 formatvalue = pycompat.identity
1328 1328
1329 1329 fm.plain(b'format-variant')
1330 1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1331 1331 fm.plain(b' repo')
1332 1332 if ui.verbose:
1333 1333 fm.plain(b' config default')
1334 1334 fm.plain(b'\n')
1335 1335 for fv in upgrade.allformatvariant:
1336 1336 fm.startitem()
1337 1337 repovalue = fv.fromrepo(repo)
1338 1338 configvalue = fv.fromconfig(repo)
1339 1339
1340 1340 if repovalue != configvalue:
1341 1341 namelabel = b'formatvariant.name.mismatchconfig'
1342 1342 repolabel = b'formatvariant.repo.mismatchconfig'
1343 1343 elif repovalue != fv.default:
1344 1344 namelabel = b'formatvariant.name.mismatchdefault'
1345 1345 repolabel = b'formatvariant.repo.mismatchdefault'
1346 1346 else:
1347 1347 namelabel = b'formatvariant.name.uptodate'
1348 1348 repolabel = b'formatvariant.repo.uptodate'
1349 1349
1350 1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1351 1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1352 1352 if fv.default != configvalue:
1353 1353 configlabel = b'formatvariant.config.special'
1354 1354 else:
1355 1355 configlabel = b'formatvariant.config.default'
1356 1356 fm.condwrite(
1357 1357 ui.verbose,
1358 1358 b'config',
1359 1359 b' %6s',
1360 1360 formatvalue(configvalue),
1361 1361 label=configlabel,
1362 1362 )
1363 1363 fm.condwrite(
1364 1364 ui.verbose,
1365 1365 b'default',
1366 1366 b' %7s',
1367 1367 formatvalue(fv.default),
1368 1368 label=b'formatvariant.default',
1369 1369 )
1370 1370 fm.plain(b'\n')
1371 1371 fm.end()
1372 1372
1373 1373
1374 1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1375 1375 def debugfsinfo(ui, path=b"."):
1376 1376 """show information detected about current filesystem"""
1377 1377 ui.writenoi18n(b'path: %s\n' % path)
1378 1378 ui.writenoi18n(
1379 1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1380 1380 )
1381 1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1382 1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1383 1383 ui.writenoi18n(
1384 1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1385 1385 )
1386 1386 ui.writenoi18n(
1387 1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1388 1388 )
1389 1389 casesensitive = b'(unknown)'
1390 1390 try:
1391 1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1392 1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1393 1393 except OSError:
1394 1394 pass
1395 1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1396 1396
1397 1397
1398 1398 @command(
1399 1399 b'debuggetbundle',
1400 1400 [
1401 1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1402 1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1403 1403 (
1404 1404 b't',
1405 1405 b'type',
1406 1406 b'bzip2',
1407 1407 _(b'bundle compression type to use'),
1408 1408 _(b'TYPE'),
1409 1409 ),
1410 1410 ],
1411 1411 _(b'REPO FILE [-H|-C ID]...'),
1412 1412 norepo=True,
1413 1413 )
1414 1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1415 1415 """retrieves a bundle from a repo
1416 1416
1417 1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1418 1418 given file.
1419 1419 """
1420 1420 opts = pycompat.byteskwargs(opts)
1421 1421 repo = hg.peer(ui, opts, repopath)
1422 1422 if not repo.capable(b'getbundle'):
1423 1423 raise error.Abort(b"getbundle() not supported by target repository")
1424 1424 args = {}
1425 1425 if common:
1426 1426 args['common'] = [bin(s) for s in common]
1427 1427 if head:
1428 1428 args['heads'] = [bin(s) for s in head]
1429 1429 # TODO: get desired bundlecaps from command line.
1430 1430 args['bundlecaps'] = None
1431 1431 bundle = repo.getbundle(b'debug', **args)
1432 1432
1433 1433 bundletype = opts.get(b'type', b'bzip2').lower()
1434 1434 btypes = {
1435 1435 b'none': b'HG10UN',
1436 1436 b'bzip2': b'HG10BZ',
1437 1437 b'gzip': b'HG10GZ',
1438 1438 b'bundle2': b'HG20',
1439 1439 }
1440 1440 bundletype = btypes.get(bundletype)
1441 1441 if bundletype not in bundle2.bundletypes:
1442 1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1443 1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1444 1444
1445 1445
1446 1446 @command(b'debugignore', [], b'[FILE]')
1447 1447 def debugignore(ui, repo, *files, **opts):
1448 1448 """display the combined ignore pattern and information about ignored files
1449 1449
1450 1450 With no argument display the combined ignore pattern.
1451 1451
1452 1452 Given space separated file names, shows if the given file is ignored and
1453 1453 if so, show the ignore rule (file and line number) that matched it.
1454 1454 """
1455 1455 ignore = repo.dirstate._ignore
1456 1456 if not files:
1457 1457 # Show all the patterns
1458 1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1459 1459 else:
1460 1460 m = scmutil.match(repo[None], pats=files)
1461 1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1462 1462 for f in m.files():
1463 1463 nf = util.normpath(f)
1464 1464 ignored = None
1465 1465 ignoredata = None
1466 1466 if nf != b'.':
1467 1467 if ignore(nf):
1468 1468 ignored = nf
1469 1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1470 1470 else:
1471 1471 for p in pathutil.finddirs(nf):
1472 1472 if ignore(p):
1473 1473 ignored = p
1474 1474 ignoredata = repo.dirstate._ignorefileandline(p)
1475 1475 break
1476 1476 if ignored:
1477 1477 if ignored == nf:
1478 1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1479 1479 else:
1480 1480 ui.write(
1481 1481 _(
1482 1482 b"%s is ignored because of "
1483 1483 b"containing directory %s\n"
1484 1484 )
1485 1485 % (uipathfn(f), ignored)
1486 1486 )
1487 1487 ignorefile, lineno, line = ignoredata
1488 1488 ui.write(
1489 1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1490 1490 % (ignorefile, lineno, line)
1491 1491 )
1492 1492 else:
1493 1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1494 1494
1495 1495
1496 1496 @command(
1497 1497 b'debugindex',
1498 1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1499 1499 _(b'-c|-m|FILE'),
1500 1500 )
1501 1501 def debugindex(ui, repo, file_=None, **opts):
1502 1502 """dump index data for a storage primitive"""
1503 1503 opts = pycompat.byteskwargs(opts)
1504 1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1505 1505
1506 1506 if ui.debugflag:
1507 1507 shortfn = hex
1508 1508 else:
1509 1509 shortfn = short
1510 1510
1511 1511 idlen = 12
1512 1512 for i in store:
1513 1513 idlen = len(shortfn(store.node(i)))
1514 1514 break
1515 1515
1516 1516 fm = ui.formatter(b'debugindex', opts)
1517 1517 fm.plain(
1518 1518 b' rev linkrev %s %s p2\n'
1519 1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1520 1520 )
1521 1521
1522 1522 for rev in store:
1523 1523 node = store.node(rev)
1524 1524 parents = store.parents(node)
1525 1525
1526 1526 fm.startitem()
1527 1527 fm.write(b'rev', b'%6d ', rev)
1528 1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1529 1529 fm.write(b'node', b'%s ', shortfn(node))
1530 1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1531 1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1532 1532 fm.plain(b'\n')
1533 1533
1534 1534 fm.end()
1535 1535
1536 1536
1537 1537 @command(
1538 1538 b'debugindexdot',
1539 1539 cmdutil.debugrevlogopts,
1540 1540 _(b'-c|-m|FILE'),
1541 1541 optionalrepo=True,
1542 1542 )
1543 1543 def debugindexdot(ui, repo, file_=None, **opts):
1544 1544 """dump an index DAG as a graphviz dot file"""
1545 1545 opts = pycompat.byteskwargs(opts)
1546 1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1547 1547 ui.writenoi18n(b"digraph G {\n")
1548 1548 for i in r:
1549 1549 node = r.node(i)
1550 1550 pp = r.parents(node)
1551 1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1552 1552 if pp[1] != nullid:
1553 1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1554 1554 ui.write(b"}\n")
1555 1555
1556 1556
1557 1557 @command(b'debugindexstats', [])
1558 1558 def debugindexstats(ui, repo):
1559 1559 """show stats related to the changelog index"""
1560 1560 repo.changelog.shortest(nullid, 1)
1561 1561 index = repo.changelog.index
1562 1562 if not util.safehasattr(index, b'stats'):
1563 1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1564 1564 for k, v in sorted(index.stats().items()):
1565 1565 ui.write(b'%s: %d\n' % (k, v))
1566 1566
1567 1567
1568 1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1569 1569 def debuginstall(ui, **opts):
1570 1570 """test Mercurial installation
1571 1571
1572 1572 Returns 0 on success.
1573 1573 """
1574 1574 opts = pycompat.byteskwargs(opts)
1575 1575
1576 1576 problems = 0
1577 1577
1578 1578 fm = ui.formatter(b'debuginstall', opts)
1579 1579 fm.startitem()
1580 1580
1581 1581 # encoding might be unknown or wrong. don't translate these messages.
1582 1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1583 1583 err = None
1584 1584 try:
1585 1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1586 1586 except LookupError as inst:
1587 1587 err = stringutil.forcebytestr(inst)
1588 1588 problems += 1
1589 1589 fm.condwrite(
1590 1590 err,
1591 1591 b'encodingerror',
1592 1592 b" %s\n (check that your locale is properly set)\n",
1593 1593 err,
1594 1594 )
1595 1595
1596 1596 # Python
1597 1597 pythonlib = None
1598 1598 if util.safehasattr(os, '__file__'):
1599 1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1600 1600 elif getattr(sys, 'oxidized', False):
1601 1601 pythonlib = pycompat.sysexecutable
1602 1602
1603 1603 fm.write(
1604 1604 b'pythonexe',
1605 1605 _(b"checking Python executable (%s)\n"),
1606 1606 pycompat.sysexecutable or _(b"unknown"),
1607 1607 )
1608 1608 fm.write(
1609 1609 b'pythonimplementation',
1610 1610 _(b"checking Python implementation (%s)\n"),
1611 1611 pycompat.sysbytes(platform.python_implementation()),
1612 1612 )
1613 1613 fm.write(
1614 1614 b'pythonver',
1615 1615 _(b"checking Python version (%s)\n"),
1616 1616 (b"%d.%d.%d" % sys.version_info[:3]),
1617 1617 )
1618 1618 fm.write(
1619 1619 b'pythonlib',
1620 1620 _(b"checking Python lib (%s)...\n"),
1621 1621 pythonlib or _(b"unknown"),
1622 1622 )
1623 1623
1624 1624 try:
1625 1625 from . import rustext
1626 1626
1627 1627 rustext.__doc__ # trigger lazy import
1628 1628 except ImportError:
1629 1629 rustext = None
1630 1630
1631 1631 security = set(sslutil.supportedprotocols)
1632 1632 if sslutil.hassni:
1633 1633 security.add(b'sni')
1634 1634
1635 1635 fm.write(
1636 1636 b'pythonsecurity',
1637 1637 _(b"checking Python security support (%s)\n"),
1638 1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1639 1639 )
1640 1640
1641 1641 # These are warnings, not errors. So don't increment problem count. This
1642 1642 # may change in the future.
1643 1643 if b'tls1.2' not in security:
1644 1644 fm.plain(
1645 1645 _(
1646 1646 b' TLS 1.2 not supported by Python install; '
1647 1647 b'network connections lack modern security\n'
1648 1648 )
1649 1649 )
1650 1650 if b'sni' not in security:
1651 1651 fm.plain(
1652 1652 _(
1653 1653 b' SNI not supported by Python install; may have '
1654 1654 b'connectivity issues with some servers\n'
1655 1655 )
1656 1656 )
1657 1657
1658 1658 fm.plain(
1659 1659 _(
1660 1660 b"checking Rust extensions (%s)\n"
1661 1661 % (b'missing' if rustext is None else b'installed')
1662 1662 ),
1663 1663 )
1664 1664
1665 1665 # TODO print CA cert info
1666 1666
1667 1667 # hg version
1668 1668 hgver = util.version()
1669 1669 fm.write(
1670 1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1671 1671 )
1672 1672 fm.write(
1673 1673 b'hgverextra',
1674 1674 _(b"checking Mercurial custom build (%s)\n"),
1675 1675 b'+'.join(hgver.split(b'+')[1:]),
1676 1676 )
1677 1677
1678 1678 # compiled modules
1679 1679 hgmodules = None
1680 1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1681 1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1682 1682 elif getattr(sys, 'oxidized', False):
1683 1683 hgmodules = pycompat.sysexecutable
1684 1684
1685 1685 fm.write(
1686 1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1687 1687 )
1688 1688 fm.write(
1689 1689 b'hgmodules',
1690 1690 _(b"checking installed modules (%s)...\n"),
1691 1691 hgmodules or _(b"unknown"),
1692 1692 )
1693 1693
1694 1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1695 1695 rustext = rustandc # for now, that's the only case
1696 1696 cext = policy.policy in (b'c', b'allow') or rustandc
1697 1697 nopure = cext or rustext
1698 1698 if nopure:
1699 1699 err = None
1700 1700 try:
1701 1701 if cext:
1702 1702 from .cext import ( # pytype: disable=import-error
1703 1703 base85,
1704 1704 bdiff,
1705 1705 mpatch,
1706 1706 osutil,
1707 1707 )
1708 1708
1709 1709 # quiet pyflakes
1710 1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1711 1711 if rustext:
1712 1712 from .rustext import ( # pytype: disable=import-error
1713 1713 ancestor,
1714 1714 dirstate,
1715 1715 )
1716 1716
1717 1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1718 1718 except Exception as inst:
1719 1719 err = stringutil.forcebytestr(inst)
1720 1720 problems += 1
1721 1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1722 1722
1723 1723 compengines = util.compengines._engines.values()
1724 1724 fm.write(
1725 1725 b'compengines',
1726 1726 _(b'checking registered compression engines (%s)\n'),
1727 1727 fm.formatlist(
1728 1728 sorted(e.name() for e in compengines),
1729 1729 name=b'compengine',
1730 1730 fmt=b'%s',
1731 1731 sep=b', ',
1732 1732 ),
1733 1733 )
1734 1734 fm.write(
1735 1735 b'compenginesavail',
1736 1736 _(b'checking available compression engines (%s)\n'),
1737 1737 fm.formatlist(
1738 1738 sorted(e.name() for e in compengines if e.available()),
1739 1739 name=b'compengine',
1740 1740 fmt=b'%s',
1741 1741 sep=b', ',
1742 1742 ),
1743 1743 )
1744 1744 wirecompengines = compression.compengines.supportedwireengines(
1745 1745 compression.SERVERROLE
1746 1746 )
1747 1747 fm.write(
1748 1748 b'compenginesserver',
1749 1749 _(
1750 1750 b'checking available compression engines '
1751 1751 b'for wire protocol (%s)\n'
1752 1752 ),
1753 1753 fm.formatlist(
1754 1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1755 1755 name=b'compengine',
1756 1756 fmt=b'%s',
1757 1757 sep=b', ',
1758 1758 ),
1759 1759 )
1760 1760 re2 = b'missing'
1761 1761 if util._re2:
1762 1762 re2 = b'available'
1763 1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1764 1764 fm.data(re2=bool(util._re2))
1765 1765
1766 1766 # templates
1767 1767 p = templater.templatedir()
1768 1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1769 1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1770 1770 if p:
1771 1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1772 1772 if m:
1773 1773 # template found, check if it is working
1774 1774 err = None
1775 1775 try:
1776 1776 templater.templater.frommapfile(m)
1777 1777 except Exception as inst:
1778 1778 err = stringutil.forcebytestr(inst)
1779 1779 p = None
1780 1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1781 1781 else:
1782 1782 p = None
1783 1783 fm.condwrite(
1784 1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1785 1785 )
1786 1786 fm.condwrite(
1787 1787 not m,
1788 1788 b'defaulttemplatenotfound',
1789 1789 _(b" template '%s' not found\n"),
1790 1790 b"default",
1791 1791 )
1792 1792 if not p:
1793 1793 problems += 1
1794 1794 fm.condwrite(
1795 1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1796 1796 )
1797 1797
1798 1798 # editor
1799 1799 editor = ui.geteditor()
1800 1800 editor = util.expandpath(editor)
1801 1801 editorbin = procutil.shellsplit(editor)[0]
1802 1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1803 1803 cmdpath = procutil.findexe(editorbin)
1804 1804 fm.condwrite(
1805 1805 not cmdpath and editor == b'vi',
1806 1806 b'vinotfound',
1807 1807 _(
1808 1808 b" No commit editor set and can't find %s in PATH\n"
1809 1809 b" (specify a commit editor in your configuration"
1810 1810 b" file)\n"
1811 1811 ),
1812 1812 not cmdpath and editor == b'vi' and editorbin,
1813 1813 )
1814 1814 fm.condwrite(
1815 1815 not cmdpath and editor != b'vi',
1816 1816 b'editornotfound',
1817 1817 _(
1818 1818 b" Can't find editor '%s' in PATH\n"
1819 1819 b" (specify a commit editor in your configuration"
1820 1820 b" file)\n"
1821 1821 ),
1822 1822 not cmdpath and editorbin,
1823 1823 )
1824 1824 if not cmdpath and editor != b'vi':
1825 1825 problems += 1
1826 1826
1827 1827 # check username
1828 1828 username = None
1829 1829 err = None
1830 1830 try:
1831 1831 username = ui.username()
1832 1832 except error.Abort as e:
1833 1833 err = e.message
1834 1834 problems += 1
1835 1835
1836 1836 fm.condwrite(
1837 1837 username, b'username', _(b"checking username (%s)\n"), username
1838 1838 )
1839 1839 fm.condwrite(
1840 1840 err,
1841 1841 b'usernameerror',
1842 1842 _(
1843 1843 b"checking username...\n %s\n"
1844 1844 b" (specify a username in your configuration file)\n"
1845 1845 ),
1846 1846 err,
1847 1847 )
1848 1848
1849 1849 for name, mod in extensions.extensions():
1850 1850 handler = getattr(mod, 'debuginstall', None)
1851 1851 if handler is not None:
1852 1852 problems += handler(ui, fm)
1853 1853
1854 1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1855 1855 if not problems:
1856 1856 fm.data(problems=problems)
1857 1857 fm.condwrite(
1858 1858 problems,
1859 1859 b'problems',
1860 1860 _(b"%d problems detected, please check your install!\n"),
1861 1861 problems,
1862 1862 )
1863 1863 fm.end()
1864 1864
1865 1865 return problems
1866 1866
1867 1867
1868 1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1869 1869 def debugknown(ui, repopath, *ids, **opts):
1870 1870 """test whether node ids are known to a repo
1871 1871
1872 1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1873 1873 and 1s indicating unknown/known.
1874 1874 """
1875 1875 opts = pycompat.byteskwargs(opts)
1876 1876 repo = hg.peer(ui, opts, repopath)
1877 1877 if not repo.capable(b'known'):
1878 1878 raise error.Abort(b"known() not supported by target repository")
1879 1879 flags = repo.known([bin(s) for s in ids])
1880 1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1881 1881
1882 1882
1883 1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1884 1884 def debuglabelcomplete(ui, repo, *args):
1885 1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1886 1886 debugnamecomplete(ui, repo, *args)
1887 1887
1888 1888
1889 1889 @command(
1890 1890 b'debuglocks',
1891 1891 [
1892 1892 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1893 1893 (
1894 1894 b'W',
1895 1895 b'force-free-wlock',
1896 1896 None,
1897 1897 _(b'free the working state lock (DANGEROUS)'),
1898 1898 ),
1899 1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1900 1900 (
1901 1901 b'S',
1902 1902 b'set-wlock',
1903 1903 None,
1904 1904 _(b'set the working state lock until stopped'),
1905 1905 ),
1906 1906 ],
1907 1907 _(b'[OPTION]...'),
1908 1908 )
1909 1909 def debuglocks(ui, repo, **opts):
1910 1910 """show or modify state of locks
1911 1911
1912 1912 By default, this command will show which locks are held. This
1913 1913 includes the user and process holding the lock, the amount of time
1914 1914 the lock has been held, and the machine name where the process is
1915 1915 running if it's not local.
1916 1916
1917 1917 Locks protect the integrity of Mercurial's data, so should be
1918 1918 treated with care. System crashes or other interruptions may cause
1919 1919 locks to not be properly released, though Mercurial will usually
1920 1920 detect and remove such stale locks automatically.
1921 1921
1922 1922 However, detecting stale locks may not always be possible (for
1923 1923 instance, on a shared filesystem). Removing locks may also be
1924 1924 blocked by filesystem permissions.
1925 1925
1926 1926 Setting a lock will prevent other commands from changing the data.
1927 1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1928 1928 The set locks are removed when the command exits.
1929 1929
1930 1930 Returns 0 if no locks are held.
1931 1931
1932 1932 """
1933 1933
1934 1934 if opts.get('force_free_lock'):
1935 1935 repo.svfs.unlink(b'lock')
1936 1936 if opts.get('force_free_wlock'):
1937 1937 repo.vfs.unlink(b'wlock')
1938 1938 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
1939 1939 return 0
1940 1940
1941 1941 locks = []
1942 1942 try:
1943 1943 if opts.get('set_wlock'):
1944 1944 try:
1945 1945 locks.append(repo.wlock(False))
1946 1946 except error.LockHeld:
1947 1947 raise error.Abort(_(b'wlock is already held'))
1948 1948 if opts.get('set_lock'):
1949 1949 try:
1950 1950 locks.append(repo.lock(False))
1951 1951 except error.LockHeld:
1952 1952 raise error.Abort(_(b'lock is already held'))
1953 1953 if len(locks):
1954 1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1955 1955 return 0
1956 1956 finally:
1957 1957 release(*locks)
1958 1958
1959 1959 now = time.time()
1960 1960 held = 0
1961 1961
1962 1962 def report(vfs, name, method):
1963 1963 # this causes stale locks to get reaped for more accurate reporting
1964 1964 try:
1965 1965 l = method(False)
1966 1966 except error.LockHeld:
1967 1967 l = None
1968 1968
1969 1969 if l:
1970 1970 l.release()
1971 1971 else:
1972 1972 try:
1973 1973 st = vfs.lstat(name)
1974 1974 age = now - st[stat.ST_MTIME]
1975 1975 user = util.username(st.st_uid)
1976 1976 locker = vfs.readlock(name)
1977 1977 if b":" in locker:
1978 1978 host, pid = locker.split(b':')
1979 1979 if host == socket.gethostname():
1980 1980 locker = b'user %s, process %s' % (user or b'None', pid)
1981 1981 else:
1982 1982 locker = b'user %s, process %s, host %s' % (
1983 1983 user or b'None',
1984 1984 pid,
1985 1985 host,
1986 1986 )
1987 1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1988 1988 return 1
1989 1989 except OSError as e:
1990 1990 if e.errno != errno.ENOENT:
1991 1991 raise
1992 1992
1993 1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1994 1994 return 0
1995 1995
1996 1996 held += report(repo.svfs, b"lock", repo.lock)
1997 1997 held += report(repo.vfs, b"wlock", repo.wlock)
1998 1998
1999 1999 return held
2000 2000
2001 2001
2002 2002 @command(
2003 2003 b'debugmanifestfulltextcache',
2004 2004 [
2005 2005 (b'', b'clear', False, _(b'clear the cache')),
2006 2006 (
2007 2007 b'a',
2008 2008 b'add',
2009 2009 [],
2010 2010 _(b'add the given manifest nodes to the cache'),
2011 2011 _(b'NODE'),
2012 2012 ),
2013 2013 ],
2014 2014 b'',
2015 2015 )
2016 2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2017 2017 """show, clear or amend the contents of the manifest fulltext cache"""
2018 2018
2019 2019 def getcache():
2020 2020 r = repo.manifestlog.getstorage(b'')
2021 2021 try:
2022 2022 return r._fulltextcache
2023 2023 except AttributeError:
2024 2024 msg = _(
2025 2025 b"Current revlog implementation doesn't appear to have a "
2026 2026 b"manifest fulltext cache\n"
2027 2027 )
2028 2028 raise error.Abort(msg)
2029 2029
2030 2030 if opts.get('clear'):
2031 2031 with repo.wlock():
2032 2032 cache = getcache()
2033 2033 cache.clear(clear_persisted_data=True)
2034 2034 return
2035 2035
2036 2036 if add:
2037 2037 with repo.wlock():
2038 2038 m = repo.manifestlog
2039 2039 store = m.getstorage(b'')
2040 2040 for n in add:
2041 2041 try:
2042 2042 manifest = m[store.lookup(n)]
2043 2043 except error.LookupError as e:
2044 2044 raise error.Abort(e, hint=b"Check your manifest node id")
2045 2045 manifest.read() # stores revisision in cache too
2046 2046 return
2047 2047
2048 2048 cache = getcache()
2049 2049 if not len(cache):
2050 2050 ui.write(_(b'cache empty\n'))
2051 2051 else:
2052 2052 ui.write(
2053 2053 _(
2054 2054 b'cache contains %d manifest entries, in order of most to '
2055 2055 b'least recent:\n'
2056 2056 )
2057 2057 % (len(cache),)
2058 2058 )
2059 2059 totalsize = 0
2060 2060 for nodeid in cache:
2061 2061 # Use cache.get to not update the LRU order
2062 2062 data = cache.peek(nodeid)
2063 2063 size = len(data)
2064 2064 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2065 2065 ui.write(
2066 2066 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2067 2067 )
2068 2068 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2069 2069 ui.write(
2070 2070 _(b'total cache data size %s, on-disk %s\n')
2071 2071 % (util.bytecount(totalsize), util.bytecount(ondisk))
2072 2072 )
2073 2073
2074 2074
2075 2075 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2076 2076 def debugmergestate(ui, repo, *args, **opts):
2077 2077 """print merge state
2078 2078
2079 2079 Use --verbose to print out information about whether v1 or v2 merge state
2080 2080 was chosen."""
2081 2081
2082 2082 if ui.verbose:
2083 2083 ms = mergestatemod.mergestate(repo)
2084 2084
2085 2085 # sort so that reasonable information is on top
2086 2086 v1records = ms._readrecordsv1()
2087 2087 v2records = ms._readrecordsv2()
2088 2088
2089 2089 if not v1records and not v2records:
2090 2090 pass
2091 2091 elif not v2records:
2092 2092 ui.writenoi18n(b'no version 2 merge state\n')
2093 2093 elif ms._v1v2match(v1records, v2records):
2094 2094 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2095 2095 else:
2096 2096 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2097 2097
2098 2098 opts = pycompat.byteskwargs(opts)
2099 2099 if not opts[b'template']:
2100 2100 opts[b'template'] = (
2101 2101 b'{if(commits, "", "no merge state found\n")}'
2102 2102 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2103 2103 b'{files % "file: {path} (state \\"{state}\\")\n'
2104 2104 b'{if(local_path, "'
2105 2105 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2106 2106 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2107 2107 b' other path: {other_path} (node {other_node})\n'
2108 2108 b'")}'
2109 2109 b'{if(rename_side, "'
2110 2110 b' rename side: {rename_side}\n'
2111 2111 b' renamed path: {renamed_path}\n'
2112 2112 b'")}'
2113 2113 b'{extras % " extra: {key} = {value}\n"}'
2114 2114 b'"}'
2115 2115 b'{extras % "extra: {file} ({key} = {value})\n"}'
2116 2116 )
2117 2117
2118 2118 ms = mergestatemod.mergestate.read(repo)
2119 2119
2120 2120 fm = ui.formatter(b'debugmergestate', opts)
2121 2121 fm.startitem()
2122 2122
2123 2123 fm_commits = fm.nested(b'commits')
2124 2124 if ms.active():
2125 2125 for name, node, label_index in (
2126 2126 (b'local', ms.local, 0),
2127 2127 (b'other', ms.other, 1),
2128 2128 ):
2129 2129 fm_commits.startitem()
2130 2130 fm_commits.data(name=name)
2131 2131 fm_commits.data(node=hex(node))
2132 2132 if ms._labels and len(ms._labels) > label_index:
2133 2133 fm_commits.data(label=ms._labels[label_index])
2134 2134 fm_commits.end()
2135 2135
2136 2136 fm_files = fm.nested(b'files')
2137 2137 if ms.active():
2138 2138 for f in ms:
2139 2139 fm_files.startitem()
2140 2140 fm_files.data(path=f)
2141 2141 state = ms._state[f]
2142 2142 fm_files.data(state=state[0])
2143 2143 if state[0] in (
2144 2144 mergestatemod.MERGE_RECORD_UNRESOLVED,
2145 2145 mergestatemod.MERGE_RECORD_RESOLVED,
2146 2146 ):
2147 2147 fm_files.data(local_key=state[1])
2148 2148 fm_files.data(local_path=state[2])
2149 2149 fm_files.data(ancestor_path=state[3])
2150 2150 fm_files.data(ancestor_node=state[4])
2151 2151 fm_files.data(other_path=state[5])
2152 2152 fm_files.data(other_node=state[6])
2153 2153 fm_files.data(local_flags=state[7])
2154 2154 elif state[0] in (
2155 2155 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2156 2156 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2157 2157 ):
2158 2158 fm_files.data(renamed_path=state[1])
2159 2159 fm_files.data(rename_side=state[2])
2160 2160 fm_extras = fm_files.nested(b'extras')
2161 2161 for k, v in sorted(ms.extras(f).items()):
2162 2162 fm_extras.startitem()
2163 2163 fm_extras.data(key=k)
2164 2164 fm_extras.data(value=v)
2165 2165 fm_extras.end()
2166 2166
2167 2167 fm_files.end()
2168 2168
2169 2169 fm_extras = fm.nested(b'extras')
2170 2170 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2171 2171 if f in ms:
2172 2172 # If file is in mergestate, we have already processed it's extras
2173 2173 continue
2174 2174 for k, v in pycompat.iteritems(d):
2175 2175 fm_extras.startitem()
2176 2176 fm_extras.data(file=f)
2177 2177 fm_extras.data(key=k)
2178 2178 fm_extras.data(value=v)
2179 2179 fm_extras.end()
2180 2180
2181 2181 fm.end()
2182 2182
2183 2183
2184 2184 @command(b'debugnamecomplete', [], _(b'NAME...'))
2185 2185 def debugnamecomplete(ui, repo, *args):
2186 2186 '''complete "names" - tags, open branch names, bookmark names'''
2187 2187
2188 2188 names = set()
2189 2189 # since we previously only listed open branches, we will handle that
2190 2190 # specially (after this for loop)
2191 2191 for name, ns in pycompat.iteritems(repo.names):
2192 2192 if name != b'branches':
2193 2193 names.update(ns.listnames(repo))
2194 2194 names.update(
2195 2195 tag
2196 2196 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2197 2197 if not closed
2198 2198 )
2199 2199 completions = set()
2200 2200 if not args:
2201 2201 args = [b'']
2202 2202 for a in args:
2203 2203 completions.update(n for n in names if n.startswith(a))
2204 2204 ui.write(b'\n'.join(sorted(completions)))
2205 2205 ui.write(b'\n')
2206 2206
2207 2207
2208 2208 @command(
2209 2209 b'debugnodemap',
2210 2210 [
2211 2211 (
2212 2212 b'',
2213 2213 b'dump-new',
2214 2214 False,
2215 _(b'write a (new) persistent binary nodemap on stdin'),
2215 _(b'write a (new) persistent binary nodemap on stdout'),
2216 2216 ),
2217 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2217 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2218 2218 (
2219 2219 b'',
2220 2220 b'check',
2221 2221 False,
2222 2222 _(b'check that the data on disk data are correct.'),
2223 2223 ),
2224 2224 (
2225 2225 b'',
2226 2226 b'metadata',
2227 2227 False,
2228 2228 _(b'display the on disk meta data for the nodemap'),
2229 2229 ),
2230 2230 ],
2231 2231 )
2232 2232 def debugnodemap(ui, repo, **opts):
2233 2233 """write and inspect on disk nodemap"""
2234 2234 if opts['dump_new']:
2235 2235 unfi = repo.unfiltered()
2236 2236 cl = unfi.changelog
2237 2237 if util.safehasattr(cl.index, "nodemap_data_all"):
2238 2238 data = cl.index.nodemap_data_all()
2239 2239 else:
2240 2240 data = nodemap.persistent_data(cl.index)
2241 2241 ui.write(data)
2242 2242 elif opts['dump_disk']:
2243 2243 unfi = repo.unfiltered()
2244 2244 cl = unfi.changelog
2245 2245 nm_data = nodemap.persisted_data(cl)
2246 2246 if nm_data is not None:
2247 2247 docket, data = nm_data
2248 2248 ui.write(data[:])
2249 2249 elif opts['check']:
2250 2250 unfi = repo.unfiltered()
2251 2251 cl = unfi.changelog
2252 2252 nm_data = nodemap.persisted_data(cl)
2253 2253 if nm_data is not None:
2254 2254 docket, data = nm_data
2255 2255 return nodemap.check_data(ui, cl.index, data)
2256 2256 elif opts['metadata']:
2257 2257 unfi = repo.unfiltered()
2258 2258 cl = unfi.changelog
2259 2259 nm_data = nodemap.persisted_data(cl)
2260 2260 if nm_data is not None:
2261 2261 docket, data = nm_data
2262 2262 ui.write((b"uid: %s\n") % docket.uid)
2263 2263 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2264 2264 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2265 2265 ui.write((b"data-length: %d\n") % docket.data_length)
2266 2266 ui.write((b"data-unused: %d\n") % docket.data_unused)
2267 2267 unused_perc = docket.data_unused * 100.0 / docket.data_length
2268 2268 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2269 2269
2270 2270
2271 2271 @command(
2272 2272 b'debugobsolete',
2273 2273 [
2274 2274 (b'', b'flags', 0, _(b'markers flag')),
2275 2275 (
2276 2276 b'',
2277 2277 b'record-parents',
2278 2278 False,
2279 2279 _(b'record parent information for the precursor'),
2280 2280 ),
2281 2281 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2282 2282 (
2283 2283 b'',
2284 2284 b'exclusive',
2285 2285 False,
2286 2286 _(b'restrict display to markers only relevant to REV'),
2287 2287 ),
2288 2288 (b'', b'index', False, _(b'display index of the marker')),
2289 2289 (b'', b'delete', [], _(b'delete markers specified by indices')),
2290 2290 ]
2291 2291 + cmdutil.commitopts2
2292 2292 + cmdutil.formatteropts,
2293 2293 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2294 2294 )
2295 2295 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2296 2296 """create arbitrary obsolete marker
2297 2297
2298 2298 With no arguments, displays the list of obsolescence markers."""
2299 2299
2300 2300 opts = pycompat.byteskwargs(opts)
2301 2301
2302 2302 def parsenodeid(s):
2303 2303 try:
2304 2304 # We do not use revsingle/revrange functions here to accept
2305 2305 # arbitrary node identifiers, possibly not present in the
2306 2306 # local repository.
2307 2307 n = bin(s)
2308 2308 if len(n) != len(nullid):
2309 2309 raise TypeError()
2310 2310 return n
2311 2311 except TypeError:
2312 2312 raise error.InputError(
2313 2313 b'changeset references must be full hexadecimal '
2314 2314 b'node identifiers'
2315 2315 )
2316 2316
2317 2317 if opts.get(b'delete'):
2318 2318 indices = []
2319 2319 for v in opts.get(b'delete'):
2320 2320 try:
2321 2321 indices.append(int(v))
2322 2322 except ValueError:
2323 2323 raise error.InputError(
2324 2324 _(b'invalid index value: %r') % v,
2325 2325 hint=_(b'use integers for indices'),
2326 2326 )
2327 2327
2328 2328 if repo.currenttransaction():
2329 2329 raise error.Abort(
2330 2330 _(b'cannot delete obsmarkers in the middle of transaction.')
2331 2331 )
2332 2332
2333 2333 with repo.lock():
2334 2334 n = repair.deleteobsmarkers(repo.obsstore, indices)
2335 2335 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2336 2336
2337 2337 return
2338 2338
2339 2339 if precursor is not None:
2340 2340 if opts[b'rev']:
2341 2341 raise error.InputError(
2342 2342 b'cannot select revision when creating marker'
2343 2343 )
2344 2344 metadata = {}
2345 2345 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2346 2346 succs = tuple(parsenodeid(succ) for succ in successors)
2347 2347 l = repo.lock()
2348 2348 try:
2349 2349 tr = repo.transaction(b'debugobsolete')
2350 2350 try:
2351 2351 date = opts.get(b'date')
2352 2352 if date:
2353 2353 date = dateutil.parsedate(date)
2354 2354 else:
2355 2355 date = None
2356 2356 prec = parsenodeid(precursor)
2357 2357 parents = None
2358 2358 if opts[b'record_parents']:
2359 2359 if prec not in repo.unfiltered():
2360 2360 raise error.Abort(
2361 2361 b'cannot used --record-parents on '
2362 2362 b'unknown changesets'
2363 2363 )
2364 2364 parents = repo.unfiltered()[prec].parents()
2365 2365 parents = tuple(p.node() for p in parents)
2366 2366 repo.obsstore.create(
2367 2367 tr,
2368 2368 prec,
2369 2369 succs,
2370 2370 opts[b'flags'],
2371 2371 parents=parents,
2372 2372 date=date,
2373 2373 metadata=metadata,
2374 2374 ui=ui,
2375 2375 )
2376 2376 tr.close()
2377 2377 except ValueError as exc:
2378 2378 raise error.Abort(
2379 2379 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2380 2380 )
2381 2381 finally:
2382 2382 tr.release()
2383 2383 finally:
2384 2384 l.release()
2385 2385 else:
2386 2386 if opts[b'rev']:
2387 2387 revs = scmutil.revrange(repo, opts[b'rev'])
2388 2388 nodes = [repo[r].node() for r in revs]
2389 2389 markers = list(
2390 2390 obsutil.getmarkers(
2391 2391 repo, nodes=nodes, exclusive=opts[b'exclusive']
2392 2392 )
2393 2393 )
2394 2394 markers.sort(key=lambda x: x._data)
2395 2395 else:
2396 2396 markers = obsutil.getmarkers(repo)
2397 2397
2398 2398 markerstoiter = markers
2399 2399 isrelevant = lambda m: True
2400 2400 if opts.get(b'rev') and opts.get(b'index'):
2401 2401 markerstoiter = obsutil.getmarkers(repo)
2402 2402 markerset = set(markers)
2403 2403 isrelevant = lambda m: m in markerset
2404 2404
2405 2405 fm = ui.formatter(b'debugobsolete', opts)
2406 2406 for i, m in enumerate(markerstoiter):
2407 2407 if not isrelevant(m):
2408 2408 # marker can be irrelevant when we're iterating over a set
2409 2409 # of markers (markerstoiter) which is bigger than the set
2410 2410 # of markers we want to display (markers)
2411 2411 # this can happen if both --index and --rev options are
2412 2412 # provided and thus we need to iterate over all of the markers
2413 2413 # to get the correct indices, but only display the ones that
2414 2414 # are relevant to --rev value
2415 2415 continue
2416 2416 fm.startitem()
2417 2417 ind = i if opts.get(b'index') else None
2418 2418 cmdutil.showmarker(fm, m, index=ind)
2419 2419 fm.end()
2420 2420
2421 2421
2422 2422 @command(
2423 2423 b'debugp1copies',
2424 2424 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2425 2425 _(b'[-r REV]'),
2426 2426 )
2427 2427 def debugp1copies(ui, repo, **opts):
2428 2428 """dump copy information compared to p1"""
2429 2429
2430 2430 opts = pycompat.byteskwargs(opts)
2431 2431 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2432 2432 for dst, src in ctx.p1copies().items():
2433 2433 ui.write(b'%s -> %s\n' % (src, dst))
2434 2434
2435 2435
2436 2436 @command(
2437 2437 b'debugp2copies',
2438 2438 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2439 2439 _(b'[-r REV]'),
2440 2440 )
2441 2441 def debugp1copies(ui, repo, **opts):
2442 2442 """dump copy information compared to p2"""
2443 2443
2444 2444 opts = pycompat.byteskwargs(opts)
2445 2445 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2446 2446 for dst, src in ctx.p2copies().items():
2447 2447 ui.write(b'%s -> %s\n' % (src, dst))
2448 2448
2449 2449
2450 2450 @command(
2451 2451 b'debugpathcomplete',
2452 2452 [
2453 2453 (b'f', b'full', None, _(b'complete an entire path')),
2454 2454 (b'n', b'normal', None, _(b'show only normal files')),
2455 2455 (b'a', b'added', None, _(b'show only added files')),
2456 2456 (b'r', b'removed', None, _(b'show only removed files')),
2457 2457 ],
2458 2458 _(b'FILESPEC...'),
2459 2459 )
2460 2460 def debugpathcomplete(ui, repo, *specs, **opts):
2461 2461 """complete part or all of a tracked path
2462 2462
2463 2463 This command supports shells that offer path name completion. It
2464 2464 currently completes only files already known to the dirstate.
2465 2465
2466 2466 Completion extends only to the next path segment unless
2467 2467 --full is specified, in which case entire paths are used."""
2468 2468
2469 2469 def complete(path, acceptable):
2470 2470 dirstate = repo.dirstate
2471 2471 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2472 2472 rootdir = repo.root + pycompat.ossep
2473 2473 if spec != repo.root and not spec.startswith(rootdir):
2474 2474 return [], []
2475 2475 if os.path.isdir(spec):
2476 2476 spec += b'/'
2477 2477 spec = spec[len(rootdir) :]
2478 2478 fixpaths = pycompat.ossep != b'/'
2479 2479 if fixpaths:
2480 2480 spec = spec.replace(pycompat.ossep, b'/')
2481 2481 speclen = len(spec)
2482 2482 fullpaths = opts['full']
2483 2483 files, dirs = set(), set()
2484 2484 adddir, addfile = dirs.add, files.add
2485 2485 for f, st in pycompat.iteritems(dirstate):
2486 2486 if f.startswith(spec) and st[0] in acceptable:
2487 2487 if fixpaths:
2488 2488 f = f.replace(b'/', pycompat.ossep)
2489 2489 if fullpaths:
2490 2490 addfile(f)
2491 2491 continue
2492 2492 s = f.find(pycompat.ossep, speclen)
2493 2493 if s >= 0:
2494 2494 adddir(f[:s])
2495 2495 else:
2496 2496 addfile(f)
2497 2497 return files, dirs
2498 2498
2499 2499 acceptable = b''
2500 2500 if opts['normal']:
2501 2501 acceptable += b'nm'
2502 2502 if opts['added']:
2503 2503 acceptable += b'a'
2504 2504 if opts['removed']:
2505 2505 acceptable += b'r'
2506 2506 cwd = repo.getcwd()
2507 2507 if not specs:
2508 2508 specs = [b'.']
2509 2509
2510 2510 files, dirs = set(), set()
2511 2511 for spec in specs:
2512 2512 f, d = complete(spec, acceptable or b'nmar')
2513 2513 files.update(f)
2514 2514 dirs.update(d)
2515 2515 files.update(dirs)
2516 2516 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2517 2517 ui.write(b'\n')
2518 2518
2519 2519
2520 2520 @command(
2521 2521 b'debugpathcopies',
2522 2522 cmdutil.walkopts,
2523 2523 b'hg debugpathcopies REV1 REV2 [FILE]',
2524 2524 inferrepo=True,
2525 2525 )
2526 2526 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2527 2527 """show copies between two revisions"""
2528 2528 ctx1 = scmutil.revsingle(repo, rev1)
2529 2529 ctx2 = scmutil.revsingle(repo, rev2)
2530 2530 m = scmutil.match(ctx1, pats, opts)
2531 2531 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2532 2532 ui.write(b'%s -> %s\n' % (src, dst))
2533 2533
2534 2534
2535 2535 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2536 2536 def debugpeer(ui, path):
2537 2537 """establish a connection to a peer repository"""
2538 2538 # Always enable peer request logging. Requires --debug to display
2539 2539 # though.
2540 2540 overrides = {
2541 2541 (b'devel', b'debug.peer-request'): True,
2542 2542 }
2543 2543
2544 2544 with ui.configoverride(overrides):
2545 2545 peer = hg.peer(ui, {}, path)
2546 2546
2547 2547 local = peer.local() is not None
2548 2548 canpush = peer.canpush()
2549 2549
2550 2550 ui.write(_(b'url: %s\n') % peer.url())
2551 2551 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2552 2552 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2553 2553
2554 2554
2555 2555 @command(
2556 2556 b'debugpickmergetool',
2557 2557 [
2558 2558 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2559 2559 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2560 2560 ]
2561 2561 + cmdutil.walkopts
2562 2562 + cmdutil.mergetoolopts,
2563 2563 _(b'[PATTERN]...'),
2564 2564 inferrepo=True,
2565 2565 )
2566 2566 def debugpickmergetool(ui, repo, *pats, **opts):
2567 2567 """examine which merge tool is chosen for specified file
2568 2568
2569 2569 As described in :hg:`help merge-tools`, Mercurial examines
2570 2570 configurations below in this order to decide which merge tool is
2571 2571 chosen for specified file.
2572 2572
2573 2573 1. ``--tool`` option
2574 2574 2. ``HGMERGE`` environment variable
2575 2575 3. configurations in ``merge-patterns`` section
2576 2576 4. configuration of ``ui.merge``
2577 2577 5. configurations in ``merge-tools`` section
2578 2578 6. ``hgmerge`` tool (for historical reason only)
2579 2579 7. default tool for fallback (``:merge`` or ``:prompt``)
2580 2580
2581 2581 This command writes out examination result in the style below::
2582 2582
2583 2583 FILE = MERGETOOL
2584 2584
2585 2585 By default, all files known in the first parent context of the
2586 2586 working directory are examined. Use file patterns and/or -I/-X
2587 2587 options to limit target files. -r/--rev is also useful to examine
2588 2588 files in another context without actual updating to it.
2589 2589
2590 2590 With --debug, this command shows warning messages while matching
2591 2591 against ``merge-patterns`` and so on, too. It is recommended to
2592 2592 use this option with explicit file patterns and/or -I/-X options,
2593 2593 because this option increases amount of output per file according
2594 2594 to configurations in hgrc.
2595 2595
2596 2596 With -v/--verbose, this command shows configurations below at
2597 2597 first (only if specified).
2598 2598
2599 2599 - ``--tool`` option
2600 2600 - ``HGMERGE`` environment variable
2601 2601 - configuration of ``ui.merge``
2602 2602
2603 2603 If merge tool is chosen before matching against
2604 2604 ``merge-patterns``, this command can't show any helpful
2605 2605 information, even with --debug. In such case, information above is
2606 2606 useful to know why a merge tool is chosen.
2607 2607 """
2608 2608 opts = pycompat.byteskwargs(opts)
2609 2609 overrides = {}
2610 2610 if opts[b'tool']:
2611 2611 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2612 2612 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2613 2613
2614 2614 with ui.configoverride(overrides, b'debugmergepatterns'):
2615 2615 hgmerge = encoding.environ.get(b"HGMERGE")
2616 2616 if hgmerge is not None:
2617 2617 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2618 2618 uimerge = ui.config(b"ui", b"merge")
2619 2619 if uimerge:
2620 2620 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2621 2621
2622 2622 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2623 2623 m = scmutil.match(ctx, pats, opts)
2624 2624 changedelete = opts[b'changedelete']
2625 2625 for path in ctx.walk(m):
2626 2626 fctx = ctx[path]
2627 2627 try:
2628 2628 if not ui.debugflag:
2629 2629 ui.pushbuffer(error=True)
2630 2630 tool, toolpath = filemerge._picktool(
2631 2631 repo,
2632 2632 ui,
2633 2633 path,
2634 2634 fctx.isbinary(),
2635 2635 b'l' in fctx.flags(),
2636 2636 changedelete,
2637 2637 )
2638 2638 finally:
2639 2639 if not ui.debugflag:
2640 2640 ui.popbuffer()
2641 2641 ui.write(b'%s = %s\n' % (path, tool))
2642 2642
2643 2643
2644 2644 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2645 2645 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2646 2646 """access the pushkey key/value protocol
2647 2647
2648 2648 With two args, list the keys in the given namespace.
2649 2649
2650 2650 With five args, set a key to new if it currently is set to old.
2651 2651 Reports success or failure.
2652 2652 """
2653 2653
2654 2654 target = hg.peer(ui, {}, repopath)
2655 2655 if keyinfo:
2656 2656 key, old, new = keyinfo
2657 2657 with target.commandexecutor() as e:
2658 2658 r = e.callcommand(
2659 2659 b'pushkey',
2660 2660 {
2661 2661 b'namespace': namespace,
2662 2662 b'key': key,
2663 2663 b'old': old,
2664 2664 b'new': new,
2665 2665 },
2666 2666 ).result()
2667 2667
2668 2668 ui.status(pycompat.bytestr(r) + b'\n')
2669 2669 return not r
2670 2670 else:
2671 2671 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2672 2672 ui.write(
2673 2673 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2674 2674 )
2675 2675
2676 2676
2677 2677 @command(b'debugpvec', [], _(b'A B'))
2678 2678 def debugpvec(ui, repo, a, b=None):
2679 2679 ca = scmutil.revsingle(repo, a)
2680 2680 cb = scmutil.revsingle(repo, b)
2681 2681 pa = pvec.ctxpvec(ca)
2682 2682 pb = pvec.ctxpvec(cb)
2683 2683 if pa == pb:
2684 2684 rel = b"="
2685 2685 elif pa > pb:
2686 2686 rel = b">"
2687 2687 elif pa < pb:
2688 2688 rel = b"<"
2689 2689 elif pa | pb:
2690 2690 rel = b"|"
2691 2691 ui.write(_(b"a: %s\n") % pa)
2692 2692 ui.write(_(b"b: %s\n") % pb)
2693 2693 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2694 2694 ui.write(
2695 2695 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2696 2696 % (
2697 2697 abs(pa._depth - pb._depth),
2698 2698 pvec._hamming(pa._vec, pb._vec),
2699 2699 pa.distance(pb),
2700 2700 rel,
2701 2701 )
2702 2702 )
2703 2703
2704 2704
2705 2705 @command(
2706 2706 b'debugrebuilddirstate|debugrebuildstate',
2707 2707 [
2708 2708 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2709 2709 (
2710 2710 b'',
2711 2711 b'minimal',
2712 2712 None,
2713 2713 _(
2714 2714 b'only rebuild files that are inconsistent with '
2715 2715 b'the working copy parent'
2716 2716 ),
2717 2717 ),
2718 2718 ],
2719 2719 _(b'[-r REV]'),
2720 2720 )
2721 2721 def debugrebuilddirstate(ui, repo, rev, **opts):
2722 2722 """rebuild the dirstate as it would look like for the given revision
2723 2723
2724 2724 If no revision is specified the first current parent will be used.
2725 2725
2726 2726 The dirstate will be set to the files of the given revision.
2727 2727 The actual working directory content or existing dirstate
2728 2728 information such as adds or removes is not considered.
2729 2729
2730 2730 ``minimal`` will only rebuild the dirstate status for files that claim to be
2731 2731 tracked but are not in the parent manifest, or that exist in the parent
2732 2732 manifest but are not in the dirstate. It will not change adds, removes, or
2733 2733 modified files that are in the working copy parent.
2734 2734
2735 2735 One use of this command is to make the next :hg:`status` invocation
2736 2736 check the actual file content.
2737 2737 """
2738 2738 ctx = scmutil.revsingle(repo, rev)
2739 2739 with repo.wlock():
2740 2740 dirstate = repo.dirstate
2741 2741 changedfiles = None
2742 2742 # See command doc for what minimal does.
2743 2743 if opts.get('minimal'):
2744 2744 manifestfiles = set(ctx.manifest().keys())
2745 2745 dirstatefiles = set(dirstate)
2746 2746 manifestonly = manifestfiles - dirstatefiles
2747 2747 dsonly = dirstatefiles - manifestfiles
2748 2748 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2749 2749 changedfiles = manifestonly | dsnotadded
2750 2750
2751 2751 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2752 2752
2753 2753
2754 2754 @command(b'debugrebuildfncache', [], b'')
2755 2755 def debugrebuildfncache(ui, repo):
2756 2756 """rebuild the fncache file"""
2757 2757 repair.rebuildfncache(ui, repo)
2758 2758
2759 2759
2760 2760 @command(
2761 2761 b'debugrename',
2762 2762 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2763 2763 _(b'[-r REV] [FILE]...'),
2764 2764 )
2765 2765 def debugrename(ui, repo, *pats, **opts):
2766 2766 """dump rename information"""
2767 2767
2768 2768 opts = pycompat.byteskwargs(opts)
2769 2769 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2770 2770 m = scmutil.match(ctx, pats, opts)
2771 2771 for abs in ctx.walk(m):
2772 2772 fctx = ctx[abs]
2773 2773 o = fctx.filelog().renamed(fctx.filenode())
2774 2774 rel = repo.pathto(abs)
2775 2775 if o:
2776 2776 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2777 2777 else:
2778 2778 ui.write(_(b"%s not renamed\n") % rel)
2779 2779
2780 2780
2781 2781 @command(b'debugrequires|debugrequirements', [], b'')
2782 2782 def debugrequirements(ui, repo):
2783 2783 """ print the current repo requirements """
2784 2784 for r in sorted(repo.requirements):
2785 2785 ui.write(b"%s\n" % r)
2786 2786
2787 2787
2788 2788 @command(
2789 2789 b'debugrevlog',
2790 2790 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2791 2791 _(b'-c|-m|FILE'),
2792 2792 optionalrepo=True,
2793 2793 )
2794 2794 def debugrevlog(ui, repo, file_=None, **opts):
2795 2795 """show data and statistics about a revlog"""
2796 2796 opts = pycompat.byteskwargs(opts)
2797 2797 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2798 2798
2799 2799 if opts.get(b"dump"):
2800 2800 numrevs = len(r)
2801 2801 ui.write(
2802 2802 (
2803 2803 b"# rev p1rev p2rev start end deltastart base p1 p2"
2804 2804 b" rawsize totalsize compression heads chainlen\n"
2805 2805 )
2806 2806 )
2807 2807 ts = 0
2808 2808 heads = set()
2809 2809
2810 2810 for rev in pycompat.xrange(numrevs):
2811 2811 dbase = r.deltaparent(rev)
2812 2812 if dbase == -1:
2813 2813 dbase = rev
2814 2814 cbase = r.chainbase(rev)
2815 2815 clen = r.chainlen(rev)
2816 2816 p1, p2 = r.parentrevs(rev)
2817 2817 rs = r.rawsize(rev)
2818 2818 ts = ts + rs
2819 2819 heads -= set(r.parentrevs(rev))
2820 2820 heads.add(rev)
2821 2821 try:
2822 2822 compression = ts / r.end(rev)
2823 2823 except ZeroDivisionError:
2824 2824 compression = 0
2825 2825 ui.write(
2826 2826 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2827 2827 b"%11d %5d %8d\n"
2828 2828 % (
2829 2829 rev,
2830 2830 p1,
2831 2831 p2,
2832 2832 r.start(rev),
2833 2833 r.end(rev),
2834 2834 r.start(dbase),
2835 2835 r.start(cbase),
2836 2836 r.start(p1),
2837 2837 r.start(p2),
2838 2838 rs,
2839 2839 ts,
2840 2840 compression,
2841 2841 len(heads),
2842 2842 clen,
2843 2843 )
2844 2844 )
2845 2845 return 0
2846 2846
2847 2847 v = r.version
2848 2848 format = v & 0xFFFF
2849 2849 flags = []
2850 2850 gdelta = False
2851 2851 if v & revlog.FLAG_INLINE_DATA:
2852 2852 flags.append(b'inline')
2853 2853 if v & revlog.FLAG_GENERALDELTA:
2854 2854 gdelta = True
2855 2855 flags.append(b'generaldelta')
2856 2856 if not flags:
2857 2857 flags = [b'(none)']
2858 2858
2859 2859 ### tracks merge vs single parent
2860 2860 nummerges = 0
2861 2861
2862 2862 ### tracks ways the "delta" are build
2863 2863 # nodelta
2864 2864 numempty = 0
2865 2865 numemptytext = 0
2866 2866 numemptydelta = 0
2867 2867 # full file content
2868 2868 numfull = 0
2869 2869 # intermediate snapshot against a prior snapshot
2870 2870 numsemi = 0
2871 2871 # snapshot count per depth
2872 2872 numsnapdepth = collections.defaultdict(lambda: 0)
2873 2873 # delta against previous revision
2874 2874 numprev = 0
2875 2875 # delta against first or second parent (not prev)
2876 2876 nump1 = 0
2877 2877 nump2 = 0
2878 2878 # delta against neither prev nor parents
2879 2879 numother = 0
2880 2880 # delta against prev that are also first or second parent
2881 2881 # (details of `numprev`)
2882 2882 nump1prev = 0
2883 2883 nump2prev = 0
2884 2884
2885 2885 # data about delta chain of each revs
2886 2886 chainlengths = []
2887 2887 chainbases = []
2888 2888 chainspans = []
2889 2889
2890 2890 # data about each revision
2891 2891 datasize = [None, 0, 0]
2892 2892 fullsize = [None, 0, 0]
2893 2893 semisize = [None, 0, 0]
2894 2894 # snapshot count per depth
2895 2895 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2896 2896 deltasize = [None, 0, 0]
2897 2897 chunktypecounts = {}
2898 2898 chunktypesizes = {}
2899 2899
2900 2900 def addsize(size, l):
2901 2901 if l[0] is None or size < l[0]:
2902 2902 l[0] = size
2903 2903 if size > l[1]:
2904 2904 l[1] = size
2905 2905 l[2] += size
2906 2906
2907 2907 numrevs = len(r)
2908 2908 for rev in pycompat.xrange(numrevs):
2909 2909 p1, p2 = r.parentrevs(rev)
2910 2910 delta = r.deltaparent(rev)
2911 2911 if format > 0:
2912 2912 addsize(r.rawsize(rev), datasize)
2913 2913 if p2 != nullrev:
2914 2914 nummerges += 1
2915 2915 size = r.length(rev)
2916 2916 if delta == nullrev:
2917 2917 chainlengths.append(0)
2918 2918 chainbases.append(r.start(rev))
2919 2919 chainspans.append(size)
2920 2920 if size == 0:
2921 2921 numempty += 1
2922 2922 numemptytext += 1
2923 2923 else:
2924 2924 numfull += 1
2925 2925 numsnapdepth[0] += 1
2926 2926 addsize(size, fullsize)
2927 2927 addsize(size, snapsizedepth[0])
2928 2928 else:
2929 2929 chainlengths.append(chainlengths[delta] + 1)
2930 2930 baseaddr = chainbases[delta]
2931 2931 revaddr = r.start(rev)
2932 2932 chainbases.append(baseaddr)
2933 2933 chainspans.append((revaddr - baseaddr) + size)
2934 2934 if size == 0:
2935 2935 numempty += 1
2936 2936 numemptydelta += 1
2937 2937 elif r.issnapshot(rev):
2938 2938 addsize(size, semisize)
2939 2939 numsemi += 1
2940 2940 depth = r.snapshotdepth(rev)
2941 2941 numsnapdepth[depth] += 1
2942 2942 addsize(size, snapsizedepth[depth])
2943 2943 else:
2944 2944 addsize(size, deltasize)
2945 2945 if delta == rev - 1:
2946 2946 numprev += 1
2947 2947 if delta == p1:
2948 2948 nump1prev += 1
2949 2949 elif delta == p2:
2950 2950 nump2prev += 1
2951 2951 elif delta == p1:
2952 2952 nump1 += 1
2953 2953 elif delta == p2:
2954 2954 nump2 += 1
2955 2955 elif delta != nullrev:
2956 2956 numother += 1
2957 2957
2958 2958 # Obtain data on the raw chunks in the revlog.
2959 2959 if util.safehasattr(r, b'_getsegmentforrevs'):
2960 2960 segment = r._getsegmentforrevs(rev, rev)[1]
2961 2961 else:
2962 2962 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2963 2963 if segment:
2964 2964 chunktype = bytes(segment[0:1])
2965 2965 else:
2966 2966 chunktype = b'empty'
2967 2967
2968 2968 if chunktype not in chunktypecounts:
2969 2969 chunktypecounts[chunktype] = 0
2970 2970 chunktypesizes[chunktype] = 0
2971 2971
2972 2972 chunktypecounts[chunktype] += 1
2973 2973 chunktypesizes[chunktype] += size
2974 2974
2975 2975 # Adjust size min value for empty cases
2976 2976 for size in (datasize, fullsize, semisize, deltasize):
2977 2977 if size[0] is None:
2978 2978 size[0] = 0
2979 2979
2980 2980 numdeltas = numrevs - numfull - numempty - numsemi
2981 2981 numoprev = numprev - nump1prev - nump2prev
2982 2982 totalrawsize = datasize[2]
2983 2983 datasize[2] /= numrevs
2984 2984 fulltotal = fullsize[2]
2985 2985 if numfull == 0:
2986 2986 fullsize[2] = 0
2987 2987 else:
2988 2988 fullsize[2] /= numfull
2989 2989 semitotal = semisize[2]
2990 2990 snaptotal = {}
2991 2991 if numsemi > 0:
2992 2992 semisize[2] /= numsemi
2993 2993 for depth in snapsizedepth:
2994 2994 snaptotal[depth] = snapsizedepth[depth][2]
2995 2995 snapsizedepth[depth][2] /= numsnapdepth[depth]
2996 2996
2997 2997 deltatotal = deltasize[2]
2998 2998 if numdeltas > 0:
2999 2999 deltasize[2] /= numdeltas
3000 3000 totalsize = fulltotal + semitotal + deltatotal
3001 3001 avgchainlen = sum(chainlengths) / numrevs
3002 3002 maxchainlen = max(chainlengths)
3003 3003 maxchainspan = max(chainspans)
3004 3004 compratio = 1
3005 3005 if totalsize:
3006 3006 compratio = totalrawsize / totalsize
3007 3007
3008 3008 basedfmtstr = b'%%%dd\n'
3009 3009 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3010 3010
3011 3011 def dfmtstr(max):
3012 3012 return basedfmtstr % len(str(max))
3013 3013
3014 3014 def pcfmtstr(max, padding=0):
3015 3015 return basepcfmtstr % (len(str(max)), b' ' * padding)
3016 3016
3017 3017 def pcfmt(value, total):
3018 3018 if total:
3019 3019 return (value, 100 * float(value) / total)
3020 3020 else:
3021 3021 return value, 100.0
3022 3022
3023 3023 ui.writenoi18n(b'format : %d\n' % format)
3024 3024 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3025 3025
3026 3026 ui.write(b'\n')
3027 3027 fmt = pcfmtstr(totalsize)
3028 3028 fmt2 = dfmtstr(totalsize)
3029 3029 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3030 3030 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3031 3031 ui.writenoi18n(
3032 3032 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3033 3033 )
3034 3034 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3035 3035 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3036 3036 ui.writenoi18n(
3037 3037 b' text : '
3038 3038 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3039 3039 )
3040 3040 ui.writenoi18n(
3041 3041 b' delta : '
3042 3042 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3043 3043 )
3044 3044 ui.writenoi18n(
3045 3045 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3046 3046 )
3047 3047 for depth in sorted(numsnapdepth):
3048 3048 ui.write(
3049 3049 (b' lvl-%-3d : ' % depth)
3050 3050 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3051 3051 )
3052 3052 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3053 3053 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3054 3054 ui.writenoi18n(
3055 3055 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3056 3056 )
3057 3057 for depth in sorted(numsnapdepth):
3058 3058 ui.write(
3059 3059 (b' lvl-%-3d : ' % depth)
3060 3060 + fmt % pcfmt(snaptotal[depth], totalsize)
3061 3061 )
3062 3062 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3063 3063
3064 3064 def fmtchunktype(chunktype):
3065 3065 if chunktype == b'empty':
3066 3066 return b' %s : ' % chunktype
3067 3067 elif chunktype in pycompat.bytestr(string.ascii_letters):
3068 3068 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3069 3069 else:
3070 3070 return b' 0x%s : ' % hex(chunktype)
3071 3071
3072 3072 ui.write(b'\n')
3073 3073 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3074 3074 for chunktype in sorted(chunktypecounts):
3075 3075 ui.write(fmtchunktype(chunktype))
3076 3076 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3077 3077 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3078 3078 for chunktype in sorted(chunktypecounts):
3079 3079 ui.write(fmtchunktype(chunktype))
3080 3080 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3081 3081
3082 3082 ui.write(b'\n')
3083 3083 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3084 3084 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3085 3085 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3086 3086 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3087 3087 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3088 3088
3089 3089 if format > 0:
3090 3090 ui.write(b'\n')
3091 3091 ui.writenoi18n(
3092 3092 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3093 3093 % tuple(datasize)
3094 3094 )
3095 3095 ui.writenoi18n(
3096 3096 b'full revision size (min/max/avg) : %d / %d / %d\n'
3097 3097 % tuple(fullsize)
3098 3098 )
3099 3099 ui.writenoi18n(
3100 3100 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3101 3101 % tuple(semisize)
3102 3102 )
3103 3103 for depth in sorted(snapsizedepth):
3104 3104 if depth == 0:
3105 3105 continue
3106 3106 ui.writenoi18n(
3107 3107 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3108 3108 % ((depth,) + tuple(snapsizedepth[depth]))
3109 3109 )
3110 3110 ui.writenoi18n(
3111 3111 b'delta size (min/max/avg) : %d / %d / %d\n'
3112 3112 % tuple(deltasize)
3113 3113 )
3114 3114
3115 3115 if numdeltas > 0:
3116 3116 ui.write(b'\n')
3117 3117 fmt = pcfmtstr(numdeltas)
3118 3118 fmt2 = pcfmtstr(numdeltas, 4)
3119 3119 ui.writenoi18n(
3120 3120 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3121 3121 )
3122 3122 if numprev > 0:
3123 3123 ui.writenoi18n(
3124 3124 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3125 3125 )
3126 3126 ui.writenoi18n(
3127 3127 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3128 3128 )
3129 3129 ui.writenoi18n(
3130 3130 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3131 3131 )
3132 3132 if gdelta:
3133 3133 ui.writenoi18n(
3134 3134 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3135 3135 )
3136 3136 ui.writenoi18n(
3137 3137 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3138 3138 )
3139 3139 ui.writenoi18n(
3140 3140 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3141 3141 )
3142 3142
3143 3143
3144 3144 @command(
3145 3145 b'debugrevlogindex',
3146 3146 cmdutil.debugrevlogopts
3147 3147 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3148 3148 _(b'[-f FORMAT] -c|-m|FILE'),
3149 3149 optionalrepo=True,
3150 3150 )
3151 3151 def debugrevlogindex(ui, repo, file_=None, **opts):
3152 3152 """dump the contents of a revlog index"""
3153 3153 opts = pycompat.byteskwargs(opts)
3154 3154 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3155 3155 format = opts.get(b'format', 0)
3156 3156 if format not in (0, 1):
3157 3157 raise error.Abort(_(b"unknown format %d") % format)
3158 3158
3159 3159 if ui.debugflag:
3160 3160 shortfn = hex
3161 3161 else:
3162 3162 shortfn = short
3163 3163
3164 3164 # There might not be anything in r, so have a sane default
3165 3165 idlen = 12
3166 3166 for i in r:
3167 3167 idlen = len(shortfn(r.node(i)))
3168 3168 break
3169 3169
3170 3170 if format == 0:
3171 3171 if ui.verbose:
3172 3172 ui.writenoi18n(
3173 3173 b" rev offset length linkrev %s %s p2\n"
3174 3174 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3175 3175 )
3176 3176 else:
3177 3177 ui.writenoi18n(
3178 3178 b" rev linkrev %s %s p2\n"
3179 3179 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3180 3180 )
3181 3181 elif format == 1:
3182 3182 if ui.verbose:
3183 3183 ui.writenoi18n(
3184 3184 (
3185 3185 b" rev flag offset length size link p1"
3186 3186 b" p2 %s\n"
3187 3187 )
3188 3188 % b"nodeid".rjust(idlen)
3189 3189 )
3190 3190 else:
3191 3191 ui.writenoi18n(
3192 3192 b" rev flag size link p1 p2 %s\n"
3193 3193 % b"nodeid".rjust(idlen)
3194 3194 )
3195 3195
3196 3196 for i in r:
3197 3197 node = r.node(i)
3198 3198 if format == 0:
3199 3199 try:
3200 3200 pp = r.parents(node)
3201 3201 except Exception:
3202 3202 pp = [nullid, nullid]
3203 3203 if ui.verbose:
3204 3204 ui.write(
3205 3205 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3206 3206 % (
3207 3207 i,
3208 3208 r.start(i),
3209 3209 r.length(i),
3210 3210 r.linkrev(i),
3211 3211 shortfn(node),
3212 3212 shortfn(pp[0]),
3213 3213 shortfn(pp[1]),
3214 3214 )
3215 3215 )
3216 3216 else:
3217 3217 ui.write(
3218 3218 b"% 6d % 7d %s %s %s\n"
3219 3219 % (
3220 3220 i,
3221 3221 r.linkrev(i),
3222 3222 shortfn(node),
3223 3223 shortfn(pp[0]),
3224 3224 shortfn(pp[1]),
3225 3225 )
3226 3226 )
3227 3227 elif format == 1:
3228 3228 pr = r.parentrevs(i)
3229 3229 if ui.verbose:
3230 3230 ui.write(
3231 3231 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3232 3232 % (
3233 3233 i,
3234 3234 r.flags(i),
3235 3235 r.start(i),
3236 3236 r.length(i),
3237 3237 r.rawsize(i),
3238 3238 r.linkrev(i),
3239 3239 pr[0],
3240 3240 pr[1],
3241 3241 shortfn(node),
3242 3242 )
3243 3243 )
3244 3244 else:
3245 3245 ui.write(
3246 3246 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3247 3247 % (
3248 3248 i,
3249 3249 r.flags(i),
3250 3250 r.rawsize(i),
3251 3251 r.linkrev(i),
3252 3252 pr[0],
3253 3253 pr[1],
3254 3254 shortfn(node),
3255 3255 )
3256 3256 )
3257 3257
3258 3258
3259 3259 @command(
3260 3260 b'debugrevspec',
3261 3261 [
3262 3262 (
3263 3263 b'',
3264 3264 b'optimize',
3265 3265 None,
3266 3266 _(b'print parsed tree after optimizing (DEPRECATED)'),
3267 3267 ),
3268 3268 (
3269 3269 b'',
3270 3270 b'show-revs',
3271 3271 True,
3272 3272 _(b'print list of result revisions (default)'),
3273 3273 ),
3274 3274 (
3275 3275 b's',
3276 3276 b'show-set',
3277 3277 None,
3278 3278 _(b'print internal representation of result set'),
3279 3279 ),
3280 3280 (
3281 3281 b'p',
3282 3282 b'show-stage',
3283 3283 [],
3284 3284 _(b'print parsed tree at the given stage'),
3285 3285 _(b'NAME'),
3286 3286 ),
3287 3287 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3288 3288 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3289 3289 ],
3290 3290 b'REVSPEC',
3291 3291 )
3292 3292 def debugrevspec(ui, repo, expr, **opts):
3293 3293 """parse and apply a revision specification
3294 3294
3295 3295 Use -p/--show-stage option to print the parsed tree at the given stages.
3296 3296 Use -p all to print tree at every stage.
3297 3297
3298 3298 Use --no-show-revs option with -s or -p to print only the set
3299 3299 representation or the parsed tree respectively.
3300 3300
3301 3301 Use --verify-optimized to compare the optimized result with the unoptimized
3302 3302 one. Returns 1 if the optimized result differs.
3303 3303 """
3304 3304 opts = pycompat.byteskwargs(opts)
3305 3305 aliases = ui.configitems(b'revsetalias')
3306 3306 stages = [
3307 3307 (b'parsed', lambda tree: tree),
3308 3308 (
3309 3309 b'expanded',
3310 3310 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3311 3311 ),
3312 3312 (b'concatenated', revsetlang.foldconcat),
3313 3313 (b'analyzed', revsetlang.analyze),
3314 3314 (b'optimized', revsetlang.optimize),
3315 3315 ]
3316 3316 if opts[b'no_optimized']:
3317 3317 stages = stages[:-1]
3318 3318 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3319 3319 raise error.Abort(
3320 3320 _(b'cannot use --verify-optimized with --no-optimized')
3321 3321 )
3322 3322 stagenames = {n for n, f in stages}
3323 3323
3324 3324 showalways = set()
3325 3325 showchanged = set()
3326 3326 if ui.verbose and not opts[b'show_stage']:
3327 3327 # show parsed tree by --verbose (deprecated)
3328 3328 showalways.add(b'parsed')
3329 3329 showchanged.update([b'expanded', b'concatenated'])
3330 3330 if opts[b'optimize']:
3331 3331 showalways.add(b'optimized')
3332 3332 if opts[b'show_stage'] and opts[b'optimize']:
3333 3333 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3334 3334 if opts[b'show_stage'] == [b'all']:
3335 3335 showalways.update(stagenames)
3336 3336 else:
3337 3337 for n in opts[b'show_stage']:
3338 3338 if n not in stagenames:
3339 3339 raise error.Abort(_(b'invalid stage name: %s') % n)
3340 3340 showalways.update(opts[b'show_stage'])
3341 3341
3342 3342 treebystage = {}
3343 3343 printedtree = None
3344 3344 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3345 3345 for n, f in stages:
3346 3346 treebystage[n] = tree = f(tree)
3347 3347 if n in showalways or (n in showchanged and tree != printedtree):
3348 3348 if opts[b'show_stage'] or n != b'parsed':
3349 3349 ui.write(b"* %s:\n" % n)
3350 3350 ui.write(revsetlang.prettyformat(tree), b"\n")
3351 3351 printedtree = tree
3352 3352
3353 3353 if opts[b'verify_optimized']:
3354 3354 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3355 3355 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3356 3356 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3357 3357 ui.writenoi18n(
3358 3358 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3359 3359 )
3360 3360 ui.writenoi18n(
3361 3361 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3362 3362 )
3363 3363 arevs = list(arevs)
3364 3364 brevs = list(brevs)
3365 3365 if arevs == brevs:
3366 3366 return 0
3367 3367 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3368 3368 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3369 3369 sm = difflib.SequenceMatcher(None, arevs, brevs)
3370 3370 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3371 3371 if tag in ('delete', 'replace'):
3372 3372 for c in arevs[alo:ahi]:
3373 3373 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3374 3374 if tag in ('insert', 'replace'):
3375 3375 for c in brevs[blo:bhi]:
3376 3376 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3377 3377 if tag == 'equal':
3378 3378 for c in arevs[alo:ahi]:
3379 3379 ui.write(b' %d\n' % c)
3380 3380 return 1
3381 3381
3382 3382 func = revset.makematcher(tree)
3383 3383 revs = func(repo)
3384 3384 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3385 3385 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3386 3386 if not opts[b'show_revs']:
3387 3387 return
3388 3388 for c in revs:
3389 3389 ui.write(b"%d\n" % c)
3390 3390
3391 3391
3392 3392 @command(
3393 3393 b'debugserve',
3394 3394 [
3395 3395 (
3396 3396 b'',
3397 3397 b'sshstdio',
3398 3398 False,
3399 3399 _(b'run an SSH server bound to process handles'),
3400 3400 ),
3401 3401 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3402 3402 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3403 3403 ],
3404 3404 b'',
3405 3405 )
3406 3406 def debugserve(ui, repo, **opts):
3407 3407 """run a server with advanced settings
3408 3408
3409 3409 This command is similar to :hg:`serve`. It exists partially as a
3410 3410 workaround to the fact that ``hg serve --stdio`` must have specific
3411 3411 arguments for security reasons.
3412 3412 """
3413 3413 opts = pycompat.byteskwargs(opts)
3414 3414
3415 3415 if not opts[b'sshstdio']:
3416 3416 raise error.Abort(_(b'only --sshstdio is currently supported'))
3417 3417
3418 3418 logfh = None
3419 3419
3420 3420 if opts[b'logiofd'] and opts[b'logiofile']:
3421 3421 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3422 3422
3423 3423 if opts[b'logiofd']:
3424 3424 # Ideally we would be line buffered. But line buffering in binary
3425 3425 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3426 3426 # buffering could have performance impacts. But since this isn't
3427 3427 # performance critical code, it should be fine.
3428 3428 try:
3429 3429 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3430 3430 except OSError as e:
3431 3431 if e.errno != errno.ESPIPE:
3432 3432 raise
3433 3433 # can't seek a pipe, so `ab` mode fails on py3
3434 3434 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3435 3435 elif opts[b'logiofile']:
3436 3436 logfh = open(opts[b'logiofile'], b'ab', 0)
3437 3437
3438 3438 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3439 3439 s.serve_forever()
3440 3440
3441 3441
3442 3442 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3443 3443 def debugsetparents(ui, repo, rev1, rev2=None):
3444 3444 """manually set the parents of the current working directory (DANGEROUS)
3445 3445
3446 3446 This command is not what you are looking for and should not be used. Using
3447 3447 this command will most certainly results in slight corruption of the file
3448 3448 level histories withing your repository. DO NOT USE THIS COMMAND.
3449 3449
3450 3450 The command update the p1 and p2 field in the dirstate, and not touching
3451 3451 anything else. This useful for writing repository conversion tools, but
3452 3452 should be used with extreme care. For example, neither the working
3453 3453 directory nor the dirstate is updated, so file status may be incorrect
3454 3454 after running this command. Only used if you are one of the few people that
3455 3455 deeply unstand both conversion tools and file level histories. If you are
3456 3456 reading this help, you are not one of this people (most of them sailed west
3457 3457 from Mithlond anyway.
3458 3458
3459 3459 So one last time DO NOT USE THIS COMMAND.
3460 3460
3461 3461 Returns 0 on success.
3462 3462 """
3463 3463
3464 3464 node1 = scmutil.revsingle(repo, rev1).node()
3465 3465 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3466 3466
3467 3467 with repo.wlock():
3468 3468 repo.setparents(node1, node2)
3469 3469
3470 3470
3471 3471 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3472 3472 def debugsidedata(ui, repo, file_, rev=None, **opts):
3473 3473 """dump the side data for a cl/manifest/file revision
3474 3474
3475 3475 Use --verbose to dump the sidedata content."""
3476 3476 opts = pycompat.byteskwargs(opts)
3477 3477 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3478 3478 if rev is not None:
3479 3479 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3480 3480 file_, rev = None, file_
3481 3481 elif rev is None:
3482 3482 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3483 3483 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3484 3484 r = getattr(r, '_revlog', r)
3485 3485 try:
3486 3486 sidedata = r.sidedata(r.lookup(rev))
3487 3487 except KeyError:
3488 3488 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3489 3489 if sidedata:
3490 3490 sidedata = list(sidedata.items())
3491 3491 sidedata.sort()
3492 3492 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3493 3493 for key, value in sidedata:
3494 3494 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3495 3495 if ui.verbose:
3496 3496 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3497 3497
3498 3498
3499 3499 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3500 3500 def debugssl(ui, repo, source=None, **opts):
3501 3501 """test a secure connection to a server
3502 3502
3503 3503 This builds the certificate chain for the server on Windows, installing the
3504 3504 missing intermediates and trusted root via Windows Update if necessary. It
3505 3505 does nothing on other platforms.
3506 3506
3507 3507 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3508 3508 that server is used. See :hg:`help urls` for more information.
3509 3509
3510 3510 If the update succeeds, retry the original operation. Otherwise, the cause
3511 3511 of the SSL error is likely another issue.
3512 3512 """
3513 3513 if not pycompat.iswindows:
3514 3514 raise error.Abort(
3515 3515 _(b'certificate chain building is only possible on Windows')
3516 3516 )
3517 3517
3518 3518 if not source:
3519 3519 if not repo:
3520 3520 raise error.Abort(
3521 3521 _(
3522 3522 b"there is no Mercurial repository here, and no "
3523 3523 b"server specified"
3524 3524 )
3525 3525 )
3526 3526 source = b"default"
3527 3527
3528 3528 source, branches = hg.parseurl(ui.expandpath(source))
3529 3529 url = util.url(source)
3530 3530
3531 3531 defaultport = {b'https': 443, b'ssh': 22}
3532 3532 if url.scheme in defaultport:
3533 3533 try:
3534 3534 addr = (url.host, int(url.port or defaultport[url.scheme]))
3535 3535 except ValueError:
3536 3536 raise error.Abort(_(b"malformed port number in URL"))
3537 3537 else:
3538 3538 raise error.Abort(_(b"only https and ssh connections are supported"))
3539 3539
3540 3540 from . import win32
3541 3541
3542 3542 s = ssl.wrap_socket(
3543 3543 socket.socket(),
3544 3544 ssl_version=ssl.PROTOCOL_TLS,
3545 3545 cert_reqs=ssl.CERT_NONE,
3546 3546 ca_certs=None,
3547 3547 )
3548 3548
3549 3549 try:
3550 3550 s.connect(addr)
3551 3551 cert = s.getpeercert(True)
3552 3552
3553 3553 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3554 3554
3555 3555 complete = win32.checkcertificatechain(cert, build=False)
3556 3556
3557 3557 if not complete:
3558 3558 ui.status(_(b'certificate chain is incomplete, updating... '))
3559 3559
3560 3560 if not win32.checkcertificatechain(cert):
3561 3561 ui.status(_(b'failed.\n'))
3562 3562 else:
3563 3563 ui.status(_(b'done.\n'))
3564 3564 else:
3565 3565 ui.status(_(b'full certificate chain is available\n'))
3566 3566 finally:
3567 3567 s.close()
3568 3568
3569 3569
3570 3570 @command(
3571 3571 b"debugbackupbundle",
3572 3572 [
3573 3573 (
3574 3574 b"",
3575 3575 b"recover",
3576 3576 b"",
3577 3577 b"brings the specified changeset back into the repository",
3578 3578 )
3579 3579 ]
3580 3580 + cmdutil.logopts,
3581 3581 _(b"hg debugbackupbundle [--recover HASH]"),
3582 3582 )
3583 3583 def debugbackupbundle(ui, repo, *pats, **opts):
3584 3584 """lists the changesets available in backup bundles
3585 3585
3586 3586 Without any arguments, this command prints a list of the changesets in each
3587 3587 backup bundle.
3588 3588
3589 3589 --recover takes a changeset hash and unbundles the first bundle that
3590 3590 contains that hash, which puts that changeset back in your repository.
3591 3591
3592 3592 --verbose will print the entire commit message and the bundle path for that
3593 3593 backup.
3594 3594 """
3595 3595 backups = list(
3596 3596 filter(
3597 3597 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3598 3598 )
3599 3599 )
3600 3600 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3601 3601
3602 3602 opts = pycompat.byteskwargs(opts)
3603 3603 opts[b"bundle"] = b""
3604 3604 opts[b"force"] = None
3605 3605 limit = logcmdutil.getlimit(opts)
3606 3606
3607 3607 def display(other, chlist, displayer):
3608 3608 if opts.get(b"newest_first"):
3609 3609 chlist.reverse()
3610 3610 count = 0
3611 3611 for n in chlist:
3612 3612 if limit is not None and count >= limit:
3613 3613 break
3614 3614 parents = [True for p in other.changelog.parents(n) if p != nullid]
3615 3615 if opts.get(b"no_merges") and len(parents) == 2:
3616 3616 continue
3617 3617 count += 1
3618 3618 displayer.show(other[n])
3619 3619
3620 3620 recovernode = opts.get(b"recover")
3621 3621 if recovernode:
3622 3622 if scmutil.isrevsymbol(repo, recovernode):
3623 3623 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3624 3624 return
3625 3625 elif backups:
3626 3626 msg = _(
3627 3627 b"Recover changesets using: hg debugbackupbundle --recover "
3628 3628 b"<changeset hash>\n\nAvailable backup changesets:"
3629 3629 )
3630 3630 ui.status(msg, label=b"status.removed")
3631 3631 else:
3632 3632 ui.status(_(b"no backup changesets found\n"))
3633 3633 return
3634 3634
3635 3635 for backup in backups:
3636 3636 # Much of this is copied from the hg incoming logic
3637 3637 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3638 3638 source, branches = hg.parseurl(source, opts.get(b"branch"))
3639 3639 try:
3640 3640 other = hg.peer(repo, opts, source)
3641 3641 except error.LookupError as ex:
3642 3642 msg = _(b"\nwarning: unable to open bundle %s") % source
3643 3643 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3644 3644 ui.warn(msg, hint=hint)
3645 3645 continue
3646 3646 revs, checkout = hg.addbranchrevs(
3647 3647 repo, other, branches, opts.get(b"rev")
3648 3648 )
3649 3649
3650 3650 if revs:
3651 3651 revs = [other.lookup(rev) for rev in revs]
3652 3652
3653 3653 quiet = ui.quiet
3654 3654 try:
3655 3655 ui.quiet = True
3656 3656 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3657 3657 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3658 3658 )
3659 3659 except error.LookupError:
3660 3660 continue
3661 3661 finally:
3662 3662 ui.quiet = quiet
3663 3663
3664 3664 try:
3665 3665 if not chlist:
3666 3666 continue
3667 3667 if recovernode:
3668 3668 with repo.lock(), repo.transaction(b"unbundle") as tr:
3669 3669 if scmutil.isrevsymbol(other, recovernode):
3670 3670 ui.status(_(b"Unbundling %s\n") % (recovernode))
3671 3671 f = hg.openpath(ui, source)
3672 3672 gen = exchange.readbundle(ui, f, source)
3673 3673 if isinstance(gen, bundle2.unbundle20):
3674 3674 bundle2.applybundle(
3675 3675 repo,
3676 3676 gen,
3677 3677 tr,
3678 3678 source=b"unbundle",
3679 3679 url=b"bundle:" + source,
3680 3680 )
3681 3681 else:
3682 3682 gen.apply(repo, b"unbundle", b"bundle:" + source)
3683 3683 break
3684 3684 else:
3685 3685 backupdate = encoding.strtolocal(
3686 3686 time.strftime(
3687 3687 "%a %H:%M, %Y-%m-%d",
3688 3688 time.localtime(os.path.getmtime(source)),
3689 3689 )
3690 3690 )
3691 3691 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3692 3692 if ui.verbose:
3693 3693 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3694 3694 else:
3695 3695 opts[
3696 3696 b"template"
3697 3697 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3698 3698 displayer = logcmdutil.changesetdisplayer(
3699 3699 ui, other, opts, False
3700 3700 )
3701 3701 display(other, chlist, displayer)
3702 3702 displayer.close()
3703 3703 finally:
3704 3704 cleanupfn()
3705 3705
3706 3706
3707 3707 @command(
3708 3708 b'debugsub',
3709 3709 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3710 3710 _(b'[-r REV] [REV]'),
3711 3711 )
3712 3712 def debugsub(ui, repo, rev=None):
3713 3713 ctx = scmutil.revsingle(repo, rev, None)
3714 3714 for k, v in sorted(ctx.substate.items()):
3715 3715 ui.writenoi18n(b'path %s\n' % k)
3716 3716 ui.writenoi18n(b' source %s\n' % v[0])
3717 3717 ui.writenoi18n(b' revision %s\n' % v[1])
3718 3718
3719 3719
3720 3720 @command(b'debugshell', optionalrepo=True)
3721 3721 def debugshell(ui, repo):
3722 3722 """run an interactive Python interpreter
3723 3723
3724 3724 The local namespace is provided with a reference to the ui and
3725 3725 the repo instance (if available).
3726 3726 """
3727 3727 import code
3728 3728
3729 3729 imported_objects = {
3730 3730 'ui': ui,
3731 3731 'repo': repo,
3732 3732 }
3733 3733
3734 3734 code.interact(local=imported_objects)
3735 3735
3736 3736
3737 3737 @command(
3738 3738 b'debugsuccessorssets',
3739 3739 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3740 3740 _(b'[REV]'),
3741 3741 )
3742 3742 def debugsuccessorssets(ui, repo, *revs, **opts):
3743 3743 """show set of successors for revision
3744 3744
3745 3745 A successors set of changeset A is a consistent group of revisions that
3746 3746 succeed A. It contains non-obsolete changesets only unless closests
3747 3747 successors set is set.
3748 3748
3749 3749 In most cases a changeset A has a single successors set containing a single
3750 3750 successor (changeset A replaced by A').
3751 3751
3752 3752 A changeset that is made obsolete with no successors are called "pruned".
3753 3753 Such changesets have no successors sets at all.
3754 3754
3755 3755 A changeset that has been "split" will have a successors set containing
3756 3756 more than one successor.
3757 3757
3758 3758 A changeset that has been rewritten in multiple different ways is called
3759 3759 "divergent". Such changesets have multiple successor sets (each of which
3760 3760 may also be split, i.e. have multiple successors).
3761 3761
3762 3762 Results are displayed as follows::
3763 3763
3764 3764 <rev1>
3765 3765 <successors-1A>
3766 3766 <rev2>
3767 3767 <successors-2A>
3768 3768 <successors-2B1> <successors-2B2> <successors-2B3>
3769 3769
3770 3770 Here rev2 has two possible (i.e. divergent) successors sets. The first
3771 3771 holds one element, whereas the second holds three (i.e. the changeset has
3772 3772 been split).
3773 3773 """
3774 3774 # passed to successorssets caching computation from one call to another
3775 3775 cache = {}
3776 3776 ctx2str = bytes
3777 3777 node2str = short
3778 3778 for rev in scmutil.revrange(repo, revs):
3779 3779 ctx = repo[rev]
3780 3780 ui.write(b'%s\n' % ctx2str(ctx))
3781 3781 for succsset in obsutil.successorssets(
3782 3782 repo, ctx.node(), closest=opts['closest'], cache=cache
3783 3783 ):
3784 3784 if succsset:
3785 3785 ui.write(b' ')
3786 3786 ui.write(node2str(succsset[0]))
3787 3787 for node in succsset[1:]:
3788 3788 ui.write(b' ')
3789 3789 ui.write(node2str(node))
3790 3790 ui.write(b'\n')
3791 3791
3792 3792
3793 3793 @command(b'debugtagscache', [])
3794 3794 def debugtagscache(ui, repo):
3795 3795 """display the contents of .hg/cache/hgtagsfnodes1"""
3796 3796 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3797 3797 for r in repo:
3798 3798 node = repo[r].node()
3799 3799 tagsnode = cache.getfnode(node, computemissing=False)
3800 3800 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3801 3801 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3802 3802
3803 3803
3804 3804 @command(
3805 3805 b'debugtemplate',
3806 3806 [
3807 3807 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3808 3808 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3809 3809 ],
3810 3810 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3811 3811 optionalrepo=True,
3812 3812 )
3813 3813 def debugtemplate(ui, repo, tmpl, **opts):
3814 3814 """parse and apply a template
3815 3815
3816 3816 If -r/--rev is given, the template is processed as a log template and
3817 3817 applied to the given changesets. Otherwise, it is processed as a generic
3818 3818 template.
3819 3819
3820 3820 Use --verbose to print the parsed tree.
3821 3821 """
3822 3822 revs = None
3823 3823 if opts['rev']:
3824 3824 if repo is None:
3825 3825 raise error.RepoError(
3826 3826 _(b'there is no Mercurial repository here (.hg not found)')
3827 3827 )
3828 3828 revs = scmutil.revrange(repo, opts['rev'])
3829 3829
3830 3830 props = {}
3831 3831 for d in opts['define']:
3832 3832 try:
3833 3833 k, v = (e.strip() for e in d.split(b'=', 1))
3834 3834 if not k or k == b'ui':
3835 3835 raise ValueError
3836 3836 props[k] = v
3837 3837 except ValueError:
3838 3838 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3839 3839
3840 3840 if ui.verbose:
3841 3841 aliases = ui.configitems(b'templatealias')
3842 3842 tree = templater.parse(tmpl)
3843 3843 ui.note(templater.prettyformat(tree), b'\n')
3844 3844 newtree = templater.expandaliases(tree, aliases)
3845 3845 if newtree != tree:
3846 3846 ui.notenoi18n(
3847 3847 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3848 3848 )
3849 3849
3850 3850 if revs is None:
3851 3851 tres = formatter.templateresources(ui, repo)
3852 3852 t = formatter.maketemplater(ui, tmpl, resources=tres)
3853 3853 if ui.verbose:
3854 3854 kwds, funcs = t.symbolsuseddefault()
3855 3855 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3856 3856 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3857 3857 ui.write(t.renderdefault(props))
3858 3858 else:
3859 3859 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3860 3860 if ui.verbose:
3861 3861 kwds, funcs = displayer.t.symbolsuseddefault()
3862 3862 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3863 3863 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3864 3864 for r in revs:
3865 3865 displayer.show(repo[r], **pycompat.strkwargs(props))
3866 3866 displayer.close()
3867 3867
3868 3868
3869 3869 @command(
3870 3870 b'debuguigetpass',
3871 3871 [
3872 3872 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3873 3873 ],
3874 3874 _(b'[-p TEXT]'),
3875 3875 norepo=True,
3876 3876 )
3877 3877 def debuguigetpass(ui, prompt=b''):
3878 3878 """show prompt to type password"""
3879 3879 r = ui.getpass(prompt)
3880 3880 if r is None:
3881 3881 r = b"<default response>"
3882 3882 ui.writenoi18n(b'response: %s\n' % r)
3883 3883
3884 3884
3885 3885 @command(
3886 3886 b'debuguiprompt',
3887 3887 [
3888 3888 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3889 3889 ],
3890 3890 _(b'[-p TEXT]'),
3891 3891 norepo=True,
3892 3892 )
3893 3893 def debuguiprompt(ui, prompt=b''):
3894 3894 """show plain prompt"""
3895 3895 r = ui.prompt(prompt)
3896 3896 ui.writenoi18n(b'response: %s\n' % r)
3897 3897
3898 3898
3899 3899 @command(b'debugupdatecaches', [])
3900 3900 def debugupdatecaches(ui, repo, *pats, **opts):
3901 3901 """warm all known caches in the repository"""
3902 3902 with repo.wlock(), repo.lock():
3903 3903 repo.updatecaches(full=True)
3904 3904
3905 3905
3906 3906 @command(
3907 3907 b'debugupgraderepo',
3908 3908 [
3909 3909 (
3910 3910 b'o',
3911 3911 b'optimize',
3912 3912 [],
3913 3913 _(b'extra optimization to perform'),
3914 3914 _(b'NAME'),
3915 3915 ),
3916 3916 (b'', b'run', False, _(b'performs an upgrade')),
3917 3917 (b'', b'backup', True, _(b'keep the old repository content around')),
3918 3918 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3919 3919 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3920 3920 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3921 3921 ],
3922 3922 )
3923 3923 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3924 3924 """upgrade a repository to use different features
3925 3925
3926 3926 If no arguments are specified, the repository is evaluated for upgrade
3927 3927 and a list of problems and potential optimizations is printed.
3928 3928
3929 3929 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3930 3930 can be influenced via additional arguments. More details will be provided
3931 3931 by the command output when run without ``--run``.
3932 3932
3933 3933 During the upgrade, the repository will be locked and no writes will be
3934 3934 allowed.
3935 3935
3936 3936 At the end of the upgrade, the repository may not be readable while new
3937 3937 repository data is swapped in. This window will be as long as it takes to
3938 3938 rename some directories inside the ``.hg`` directory. On most machines, this
3939 3939 should complete almost instantaneously and the chances of a consumer being
3940 3940 unable to access the repository should be low.
3941 3941
3942 3942 By default, all revlog will be upgraded. You can restrict this using flag
3943 3943 such as `--manifest`:
3944 3944
3945 3945 * `--manifest`: only optimize the manifest
3946 3946 * `--no-manifest`: optimize all revlog but the manifest
3947 3947 * `--changelog`: optimize the changelog only
3948 3948 * `--no-changelog --no-manifest`: optimize filelogs only
3949 3949 * `--filelogs`: optimize the filelogs only
3950 3950 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
3951 3951 """
3952 3952 return upgrade.upgraderepo(
3953 3953 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
3954 3954 )
3955 3955
3956 3956
3957 3957 @command(
3958 3958 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3959 3959 )
3960 3960 def debugwalk(ui, repo, *pats, **opts):
3961 3961 """show how files match on given patterns"""
3962 3962 opts = pycompat.byteskwargs(opts)
3963 3963 m = scmutil.match(repo[None], pats, opts)
3964 3964 if ui.verbose:
3965 3965 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3966 3966 items = list(repo[None].walk(m))
3967 3967 if not items:
3968 3968 return
3969 3969 f = lambda fn: fn
3970 3970 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3971 3971 f = lambda fn: util.normpath(fn)
3972 3972 fmt = b'f %%-%ds %%-%ds %%s' % (
3973 3973 max([len(abs) for abs in items]),
3974 3974 max([len(repo.pathto(abs)) for abs in items]),
3975 3975 )
3976 3976 for abs in items:
3977 3977 line = fmt % (
3978 3978 abs,
3979 3979 f(repo.pathto(abs)),
3980 3980 m.exact(abs) and b'exact' or b'',
3981 3981 )
3982 3982 ui.write(b"%s\n" % line.rstrip())
3983 3983
3984 3984
3985 3985 @command(b'debugwhyunstable', [], _(b'REV'))
3986 3986 def debugwhyunstable(ui, repo, rev):
3987 3987 """explain instabilities of a changeset"""
3988 3988 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3989 3989 dnodes = b''
3990 3990 if entry.get(b'divergentnodes'):
3991 3991 dnodes = (
3992 3992 b' '.join(
3993 3993 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3994 3994 for ctx in entry[b'divergentnodes']
3995 3995 )
3996 3996 + b' '
3997 3997 )
3998 3998 ui.write(
3999 3999 b'%s: %s%s %s\n'
4000 4000 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4001 4001 )
4002 4002
4003 4003
4004 4004 @command(
4005 4005 b'debugwireargs',
4006 4006 [
4007 4007 (b'', b'three', b'', b'three'),
4008 4008 (b'', b'four', b'', b'four'),
4009 4009 (b'', b'five', b'', b'five'),
4010 4010 ]
4011 4011 + cmdutil.remoteopts,
4012 4012 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4013 4013 norepo=True,
4014 4014 )
4015 4015 def debugwireargs(ui, repopath, *vals, **opts):
4016 4016 opts = pycompat.byteskwargs(opts)
4017 4017 repo = hg.peer(ui, opts, repopath)
4018 4018 for opt in cmdutil.remoteopts:
4019 4019 del opts[opt[1]]
4020 4020 args = {}
4021 4021 for k, v in pycompat.iteritems(opts):
4022 4022 if v:
4023 4023 args[k] = v
4024 4024 args = pycompat.strkwargs(args)
4025 4025 # run twice to check that we don't mess up the stream for the next command
4026 4026 res1 = repo.debugwireargs(*vals, **args)
4027 4027 res2 = repo.debugwireargs(*vals, **args)
4028 4028 ui.write(b"%s\n" % res1)
4029 4029 if res1 != res2:
4030 4030 ui.warn(b"%s\n" % res2)
4031 4031
4032 4032
4033 4033 def _parsewirelangblocks(fh):
4034 4034 activeaction = None
4035 4035 blocklines = []
4036 4036 lastindent = 0
4037 4037
4038 4038 for line in fh:
4039 4039 line = line.rstrip()
4040 4040 if not line:
4041 4041 continue
4042 4042
4043 4043 if line.startswith(b'#'):
4044 4044 continue
4045 4045
4046 4046 if not line.startswith(b' '):
4047 4047 # New block. Flush previous one.
4048 4048 if activeaction:
4049 4049 yield activeaction, blocklines
4050 4050
4051 4051 activeaction = line
4052 4052 blocklines = []
4053 4053 lastindent = 0
4054 4054 continue
4055 4055
4056 4056 # Else we start with an indent.
4057 4057
4058 4058 if not activeaction:
4059 4059 raise error.Abort(_(b'indented line outside of block'))
4060 4060
4061 4061 indent = len(line) - len(line.lstrip())
4062 4062
4063 4063 # If this line is indented more than the last line, concatenate it.
4064 4064 if indent > lastindent and blocklines:
4065 4065 blocklines[-1] += line.lstrip()
4066 4066 else:
4067 4067 blocklines.append(line)
4068 4068 lastindent = indent
4069 4069
4070 4070 # Flush last block.
4071 4071 if activeaction:
4072 4072 yield activeaction, blocklines
4073 4073
4074 4074
4075 4075 @command(
4076 4076 b'debugwireproto',
4077 4077 [
4078 4078 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4079 4079 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4080 4080 (
4081 4081 b'',
4082 4082 b'noreadstderr',
4083 4083 False,
4084 4084 _(b'do not read from stderr of the remote'),
4085 4085 ),
4086 4086 (
4087 4087 b'',
4088 4088 b'nologhandshake',
4089 4089 False,
4090 4090 _(b'do not log I/O related to the peer handshake'),
4091 4091 ),
4092 4092 ]
4093 4093 + cmdutil.remoteopts,
4094 4094 _(b'[PATH]'),
4095 4095 optionalrepo=True,
4096 4096 )
4097 4097 def debugwireproto(ui, repo, path=None, **opts):
4098 4098 """send wire protocol commands to a server
4099 4099
4100 4100 This command can be used to issue wire protocol commands to remote
4101 4101 peers and to debug the raw data being exchanged.
4102 4102
4103 4103 ``--localssh`` will start an SSH server against the current repository
4104 4104 and connect to that. By default, the connection will perform a handshake
4105 4105 and establish an appropriate peer instance.
4106 4106
4107 4107 ``--peer`` can be used to bypass the handshake protocol and construct a
4108 4108 peer instance using the specified class type. Valid values are ``raw``,
4109 4109 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4110 4110 raw data payloads and don't support higher-level command actions.
4111 4111
4112 4112 ``--noreadstderr`` can be used to disable automatic reading from stderr
4113 4113 of the peer (for SSH connections only). Disabling automatic reading of
4114 4114 stderr is useful for making output more deterministic.
4115 4115
4116 4116 Commands are issued via a mini language which is specified via stdin.
4117 4117 The language consists of individual actions to perform. An action is
4118 4118 defined by a block. A block is defined as a line with no leading
4119 4119 space followed by 0 or more lines with leading space. Blocks are
4120 4120 effectively a high-level command with additional metadata.
4121 4121
4122 4122 Lines beginning with ``#`` are ignored.
4123 4123
4124 4124 The following sections denote available actions.
4125 4125
4126 4126 raw
4127 4127 ---
4128 4128
4129 4129 Send raw data to the server.
4130 4130
4131 4131 The block payload contains the raw data to send as one atomic send
4132 4132 operation. The data may not actually be delivered in a single system
4133 4133 call: it depends on the abilities of the transport being used.
4134 4134
4135 4135 Each line in the block is de-indented and concatenated. Then, that
4136 4136 value is evaluated as a Python b'' literal. This allows the use of
4137 4137 backslash escaping, etc.
4138 4138
4139 4139 raw+
4140 4140 ----
4141 4141
4142 4142 Behaves like ``raw`` except flushes output afterwards.
4143 4143
4144 4144 command <X>
4145 4145 -----------
4146 4146
4147 4147 Send a request to run a named command, whose name follows the ``command``
4148 4148 string.
4149 4149
4150 4150 Arguments to the command are defined as lines in this block. The format of
4151 4151 each line is ``<key> <value>``. e.g.::
4152 4152
4153 4153 command listkeys
4154 4154 namespace bookmarks
4155 4155
4156 4156 If the value begins with ``eval:``, it will be interpreted as a Python
4157 4157 literal expression. Otherwise values are interpreted as Python b'' literals.
4158 4158 This allows sending complex types and encoding special byte sequences via
4159 4159 backslash escaping.
4160 4160
4161 4161 The following arguments have special meaning:
4162 4162
4163 4163 ``PUSHFILE``
4164 4164 When defined, the *push* mechanism of the peer will be used instead
4165 4165 of the static request-response mechanism and the content of the
4166 4166 file specified in the value of this argument will be sent as the
4167 4167 command payload.
4168 4168
4169 4169 This can be used to submit a local bundle file to the remote.
4170 4170
4171 4171 batchbegin
4172 4172 ----------
4173 4173
4174 4174 Instruct the peer to begin a batched send.
4175 4175
4176 4176 All ``command`` blocks are queued for execution until the next
4177 4177 ``batchsubmit`` block.
4178 4178
4179 4179 batchsubmit
4180 4180 -----------
4181 4181
4182 4182 Submit previously queued ``command`` blocks as a batch request.
4183 4183
4184 4184 This action MUST be paired with a ``batchbegin`` action.
4185 4185
4186 4186 httprequest <method> <path>
4187 4187 ---------------------------
4188 4188
4189 4189 (HTTP peer only)
4190 4190
4191 4191 Send an HTTP request to the peer.
4192 4192
4193 4193 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4194 4194
4195 4195 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4196 4196 headers to add to the request. e.g. ``Accept: foo``.
4197 4197
4198 4198 The following arguments are special:
4199 4199
4200 4200 ``BODYFILE``
4201 4201 The content of the file defined as the value to this argument will be
4202 4202 transferred verbatim as the HTTP request body.
4203 4203
4204 4204 ``frame <type> <flags> <payload>``
4205 4205 Send a unified protocol frame as part of the request body.
4206 4206
4207 4207 All frames will be collected and sent as the body to the HTTP
4208 4208 request.
4209 4209
4210 4210 close
4211 4211 -----
4212 4212
4213 4213 Close the connection to the server.
4214 4214
4215 4215 flush
4216 4216 -----
4217 4217
4218 4218 Flush data written to the server.
4219 4219
4220 4220 readavailable
4221 4221 -------------
4222 4222
4223 4223 Close the write end of the connection and read all available data from
4224 4224 the server.
4225 4225
4226 4226 If the connection to the server encompasses multiple pipes, we poll both
4227 4227 pipes and read available data.
4228 4228
4229 4229 readline
4230 4230 --------
4231 4231
4232 4232 Read a line of output from the server. If there are multiple output
4233 4233 pipes, reads only the main pipe.
4234 4234
4235 4235 ereadline
4236 4236 ---------
4237 4237
4238 4238 Like ``readline``, but read from the stderr pipe, if available.
4239 4239
4240 4240 read <X>
4241 4241 --------
4242 4242
4243 4243 ``read()`` N bytes from the server's main output pipe.
4244 4244
4245 4245 eread <X>
4246 4246 ---------
4247 4247
4248 4248 ``read()`` N bytes from the server's stderr pipe, if available.
4249 4249
4250 4250 Specifying Unified Frame-Based Protocol Frames
4251 4251 ----------------------------------------------
4252 4252
4253 4253 It is possible to emit a *Unified Frame-Based Protocol* by using special
4254 4254 syntax.
4255 4255
4256 4256 A frame is composed as a type, flags, and payload. These can be parsed
4257 4257 from a string of the form:
4258 4258
4259 4259 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4260 4260
4261 4261 ``request-id`` and ``stream-id`` are integers defining the request and
4262 4262 stream identifiers.
4263 4263
4264 4264 ``type`` can be an integer value for the frame type or the string name
4265 4265 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4266 4266 ``command-name``.
4267 4267
4268 4268 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4269 4269 components. Each component (and there can be just one) can be an integer
4270 4270 or a flag name for stream flags or frame flags, respectively. Values are
4271 4271 resolved to integers and then bitwise OR'd together.
4272 4272
4273 4273 ``payload`` represents the raw frame payload. If it begins with
4274 4274 ``cbor:``, the following string is evaluated as Python code and the
4275 4275 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4276 4276 as a Python byte string literal.
4277 4277 """
4278 4278 opts = pycompat.byteskwargs(opts)
4279 4279
4280 4280 if opts[b'localssh'] and not repo:
4281 4281 raise error.Abort(_(b'--localssh requires a repository'))
4282 4282
4283 4283 if opts[b'peer'] and opts[b'peer'] not in (
4284 4284 b'raw',
4285 4285 b'http2',
4286 4286 b'ssh1',
4287 4287 b'ssh2',
4288 4288 ):
4289 4289 raise error.Abort(
4290 4290 _(b'invalid value for --peer'),
4291 4291 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4292 4292 )
4293 4293
4294 4294 if path and opts[b'localssh']:
4295 4295 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4296 4296
4297 4297 if ui.interactive():
4298 4298 ui.write(_(b'(waiting for commands on stdin)\n'))
4299 4299
4300 4300 blocks = list(_parsewirelangblocks(ui.fin))
4301 4301
4302 4302 proc = None
4303 4303 stdin = None
4304 4304 stdout = None
4305 4305 stderr = None
4306 4306 opener = None
4307 4307
4308 4308 if opts[b'localssh']:
4309 4309 # We start the SSH server in its own process so there is process
4310 4310 # separation. This prevents a whole class of potential bugs around
4311 4311 # shared state from interfering with server operation.
4312 4312 args = procutil.hgcmd() + [
4313 4313 b'-R',
4314 4314 repo.root,
4315 4315 b'debugserve',
4316 4316 b'--sshstdio',
4317 4317 ]
4318 4318 proc = subprocess.Popen(
4319 4319 pycompat.rapply(procutil.tonativestr, args),
4320 4320 stdin=subprocess.PIPE,
4321 4321 stdout=subprocess.PIPE,
4322 4322 stderr=subprocess.PIPE,
4323 4323 bufsize=0,
4324 4324 )
4325 4325
4326 4326 stdin = proc.stdin
4327 4327 stdout = proc.stdout
4328 4328 stderr = proc.stderr
4329 4329
4330 4330 # We turn the pipes into observers so we can log I/O.
4331 4331 if ui.verbose or opts[b'peer'] == b'raw':
4332 4332 stdin = util.makeloggingfileobject(
4333 4333 ui, proc.stdin, b'i', logdata=True
4334 4334 )
4335 4335 stdout = util.makeloggingfileobject(
4336 4336 ui, proc.stdout, b'o', logdata=True
4337 4337 )
4338 4338 stderr = util.makeloggingfileobject(
4339 4339 ui, proc.stderr, b'e', logdata=True
4340 4340 )
4341 4341
4342 4342 # --localssh also implies the peer connection settings.
4343 4343
4344 4344 url = b'ssh://localserver'
4345 4345 autoreadstderr = not opts[b'noreadstderr']
4346 4346
4347 4347 if opts[b'peer'] == b'ssh1':
4348 4348 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4349 4349 peer = sshpeer.sshv1peer(
4350 4350 ui,
4351 4351 url,
4352 4352 proc,
4353 4353 stdin,
4354 4354 stdout,
4355 4355 stderr,
4356 4356 None,
4357 4357 autoreadstderr=autoreadstderr,
4358 4358 )
4359 4359 elif opts[b'peer'] == b'ssh2':
4360 4360 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4361 4361 peer = sshpeer.sshv2peer(
4362 4362 ui,
4363 4363 url,
4364 4364 proc,
4365 4365 stdin,
4366 4366 stdout,
4367 4367 stderr,
4368 4368 None,
4369 4369 autoreadstderr=autoreadstderr,
4370 4370 )
4371 4371 elif opts[b'peer'] == b'raw':
4372 4372 ui.write(_(b'using raw connection to peer\n'))
4373 4373 peer = None
4374 4374 else:
4375 4375 ui.write(_(b'creating ssh peer from handshake results\n'))
4376 4376 peer = sshpeer.makepeer(
4377 4377 ui,
4378 4378 url,
4379 4379 proc,
4380 4380 stdin,
4381 4381 stdout,
4382 4382 stderr,
4383 4383 autoreadstderr=autoreadstderr,
4384 4384 )
4385 4385
4386 4386 elif path:
4387 4387 # We bypass hg.peer() so we can proxy the sockets.
4388 4388 # TODO consider not doing this because we skip
4389 4389 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4390 4390 u = util.url(path)
4391 4391 if u.scheme != b'http':
4392 4392 raise error.Abort(_(b'only http:// paths are currently supported'))
4393 4393
4394 4394 url, authinfo = u.authinfo()
4395 4395 openerargs = {
4396 4396 'useragent': b'Mercurial debugwireproto',
4397 4397 }
4398 4398
4399 4399 # Turn pipes/sockets into observers so we can log I/O.
4400 4400 if ui.verbose:
4401 4401 openerargs.update(
4402 4402 {
4403 4403 'loggingfh': ui,
4404 4404 'loggingname': b's',
4405 4405 'loggingopts': {
4406 4406 'logdata': True,
4407 4407 'logdataapis': False,
4408 4408 },
4409 4409 }
4410 4410 )
4411 4411
4412 4412 if ui.debugflag:
4413 4413 openerargs['loggingopts']['logdataapis'] = True
4414 4414
4415 4415 # Don't send default headers when in raw mode. This allows us to
4416 4416 # bypass most of the behavior of our URL handling code so we can
4417 4417 # have near complete control over what's sent on the wire.
4418 4418 if opts[b'peer'] == b'raw':
4419 4419 openerargs['sendaccept'] = False
4420 4420
4421 4421 opener = urlmod.opener(ui, authinfo, **openerargs)
4422 4422
4423 4423 if opts[b'peer'] == b'http2':
4424 4424 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4425 4425 # We go through makepeer() because we need an API descriptor for
4426 4426 # the peer instance to be useful.
4427 4427 with ui.configoverride(
4428 4428 {(b'experimental', b'httppeer.advertise-v2'): True}
4429 4429 ):
4430 4430 if opts[b'nologhandshake']:
4431 4431 ui.pushbuffer()
4432 4432
4433 4433 peer = httppeer.makepeer(ui, path, opener=opener)
4434 4434
4435 4435 if opts[b'nologhandshake']:
4436 4436 ui.popbuffer()
4437 4437
4438 4438 if not isinstance(peer, httppeer.httpv2peer):
4439 4439 raise error.Abort(
4440 4440 _(
4441 4441 b'could not instantiate HTTP peer for '
4442 4442 b'wire protocol version 2'
4443 4443 ),
4444 4444 hint=_(
4445 4445 b'the server may not have the feature '
4446 4446 b'enabled or is not allowing this '
4447 4447 b'client version'
4448 4448 ),
4449 4449 )
4450 4450
4451 4451 elif opts[b'peer'] == b'raw':
4452 4452 ui.write(_(b'using raw connection to peer\n'))
4453 4453 peer = None
4454 4454 elif opts[b'peer']:
4455 4455 raise error.Abort(
4456 4456 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4457 4457 )
4458 4458 else:
4459 4459 peer = httppeer.makepeer(ui, path, opener=opener)
4460 4460
4461 4461 # We /could/ populate stdin/stdout with sock.makefile()...
4462 4462 else:
4463 4463 raise error.Abort(_(b'unsupported connection configuration'))
4464 4464
4465 4465 batchedcommands = None
4466 4466
4467 4467 # Now perform actions based on the parsed wire language instructions.
4468 4468 for action, lines in blocks:
4469 4469 if action in (b'raw', b'raw+'):
4470 4470 if not stdin:
4471 4471 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4472 4472
4473 4473 # Concatenate the data together.
4474 4474 data = b''.join(l.lstrip() for l in lines)
4475 4475 data = stringutil.unescapestr(data)
4476 4476 stdin.write(data)
4477 4477
4478 4478 if action == b'raw+':
4479 4479 stdin.flush()
4480 4480 elif action == b'flush':
4481 4481 if not stdin:
4482 4482 raise error.Abort(_(b'cannot call flush on this peer'))
4483 4483 stdin.flush()
4484 4484 elif action.startswith(b'command'):
4485 4485 if not peer:
4486 4486 raise error.Abort(
4487 4487 _(
4488 4488 b'cannot send commands unless peer instance '
4489 4489 b'is available'
4490 4490 )
4491 4491 )
4492 4492
4493 4493 command = action.split(b' ', 1)[1]
4494 4494
4495 4495 args = {}
4496 4496 for line in lines:
4497 4497 # We need to allow empty values.
4498 4498 fields = line.lstrip().split(b' ', 1)
4499 4499 if len(fields) == 1:
4500 4500 key = fields[0]
4501 4501 value = b''
4502 4502 else:
4503 4503 key, value = fields
4504 4504
4505 4505 if value.startswith(b'eval:'):
4506 4506 value = stringutil.evalpythonliteral(value[5:])
4507 4507 else:
4508 4508 value = stringutil.unescapestr(value)
4509 4509
4510 4510 args[key] = value
4511 4511
4512 4512 if batchedcommands is not None:
4513 4513 batchedcommands.append((command, args))
4514 4514 continue
4515 4515
4516 4516 ui.status(_(b'sending %s command\n') % command)
4517 4517
4518 4518 if b'PUSHFILE' in args:
4519 4519 with open(args[b'PUSHFILE'], 'rb') as fh:
4520 4520 del args[b'PUSHFILE']
4521 4521 res, output = peer._callpush(
4522 4522 command, fh, **pycompat.strkwargs(args)
4523 4523 )
4524 4524 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4525 4525 ui.status(
4526 4526 _(b'remote output: %s\n') % stringutil.escapestr(output)
4527 4527 )
4528 4528 else:
4529 4529 with peer.commandexecutor() as e:
4530 4530 res = e.callcommand(command, args).result()
4531 4531
4532 4532 if isinstance(res, wireprotov2peer.commandresponse):
4533 4533 val = res.objects()
4534 4534 ui.status(
4535 4535 _(b'response: %s\n')
4536 4536 % stringutil.pprint(val, bprefix=True, indent=2)
4537 4537 )
4538 4538 else:
4539 4539 ui.status(
4540 4540 _(b'response: %s\n')
4541 4541 % stringutil.pprint(res, bprefix=True, indent=2)
4542 4542 )
4543 4543
4544 4544 elif action == b'batchbegin':
4545 4545 if batchedcommands is not None:
4546 4546 raise error.Abort(_(b'nested batchbegin not allowed'))
4547 4547
4548 4548 batchedcommands = []
4549 4549 elif action == b'batchsubmit':
4550 4550 # There is a batching API we could go through. But it would be
4551 4551 # difficult to normalize requests into function calls. It is easier
4552 4552 # to bypass this layer and normalize to commands + args.
4553 4553 ui.status(
4554 4554 _(b'sending batch with %d sub-commands\n')
4555 4555 % len(batchedcommands)
4556 4556 )
4557 4557 assert peer is not None
4558 4558 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4559 4559 ui.status(
4560 4560 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4561 4561 )
4562 4562
4563 4563 batchedcommands = None
4564 4564
4565 4565 elif action.startswith(b'httprequest '):
4566 4566 if not opener:
4567 4567 raise error.Abort(
4568 4568 _(b'cannot use httprequest without an HTTP peer')
4569 4569 )
4570 4570
4571 4571 request = action.split(b' ', 2)
4572 4572 if len(request) != 3:
4573 4573 raise error.Abort(
4574 4574 _(
4575 4575 b'invalid httprequest: expected format is '
4576 4576 b'"httprequest <method> <path>'
4577 4577 )
4578 4578 )
4579 4579
4580 4580 method, httppath = request[1:]
4581 4581 headers = {}
4582 4582 body = None
4583 4583 frames = []
4584 4584 for line in lines:
4585 4585 line = line.lstrip()
4586 4586 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4587 4587 if m:
4588 4588 # Headers need to use native strings.
4589 4589 key = pycompat.strurl(m.group(1))
4590 4590 value = pycompat.strurl(m.group(2))
4591 4591 headers[key] = value
4592 4592 continue
4593 4593
4594 4594 if line.startswith(b'BODYFILE '):
4595 4595 with open(line.split(b' ', 1), b'rb') as fh:
4596 4596 body = fh.read()
4597 4597 elif line.startswith(b'frame '):
4598 4598 frame = wireprotoframing.makeframefromhumanstring(
4599 4599 line[len(b'frame ') :]
4600 4600 )
4601 4601
4602 4602 frames.append(frame)
4603 4603 else:
4604 4604 raise error.Abort(
4605 4605 _(b'unknown argument to httprequest: %s') % line
4606 4606 )
4607 4607
4608 4608 url = path + httppath
4609 4609
4610 4610 if frames:
4611 4611 body = b''.join(bytes(f) for f in frames)
4612 4612
4613 4613 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4614 4614
4615 4615 # urllib.Request insists on using has_data() as a proxy for
4616 4616 # determining the request method. Override that to use our
4617 4617 # explicitly requested method.
4618 4618 req.get_method = lambda: pycompat.sysstr(method)
4619 4619
4620 4620 try:
4621 4621 res = opener.open(req)
4622 4622 body = res.read()
4623 4623 except util.urlerr.urlerror as e:
4624 4624 # read() method must be called, but only exists in Python 2
4625 4625 getattr(e, 'read', lambda: None)()
4626 4626 continue
4627 4627
4628 4628 ct = res.headers.get('Content-Type')
4629 4629 if ct == 'application/mercurial-cbor':
4630 4630 ui.write(
4631 4631 _(b'cbor> %s\n')
4632 4632 % stringutil.pprint(
4633 4633 cborutil.decodeall(body), bprefix=True, indent=2
4634 4634 )
4635 4635 )
4636 4636
4637 4637 elif action == b'close':
4638 4638 assert peer is not None
4639 4639 peer.close()
4640 4640 elif action == b'readavailable':
4641 4641 if not stdout or not stderr:
4642 4642 raise error.Abort(
4643 4643 _(b'readavailable not available on this peer')
4644 4644 )
4645 4645
4646 4646 stdin.close()
4647 4647 stdout.read()
4648 4648 stderr.read()
4649 4649
4650 4650 elif action == b'readline':
4651 4651 if not stdout:
4652 4652 raise error.Abort(_(b'readline not available on this peer'))
4653 4653 stdout.readline()
4654 4654 elif action == b'ereadline':
4655 4655 if not stderr:
4656 4656 raise error.Abort(_(b'ereadline not available on this peer'))
4657 4657 stderr.readline()
4658 4658 elif action.startswith(b'read '):
4659 4659 count = int(action.split(b' ', 1)[1])
4660 4660 if not stdout:
4661 4661 raise error.Abort(_(b'read not available on this peer'))
4662 4662 stdout.read(count)
4663 4663 elif action.startswith(b'eread '):
4664 4664 count = int(action.split(b' ', 1)[1])
4665 4665 if not stderr:
4666 4666 raise error.Abort(_(b'eread not available on this peer'))
4667 4667 stderr.read(count)
4668 4668 else:
4669 4669 raise error.Abort(_(b'unknown action: %s') % action)
4670 4670
4671 4671 if batchedcommands is not None:
4672 4672 raise error.Abort(_(b'unclosed "batchbegin" request'))
4673 4673
4674 4674 if peer:
4675 4675 peer.close()
4676 4676
4677 4677 if proc:
4678 4678 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now