##// END OF EJS Templates
debug: convert a few exceptions to bytes before wrapping in another error...
Matt Harbison -
r47516:8408c319 stable
parent child Browse files
Show More
@@ -1,4661 +1,4663
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import glob
15 15 import operator
16 16 import os
17 17 import platform
18 18 import random
19 19 import re
20 20 import socket
21 21 import ssl
22 22 import stat
23 23 import string
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullid,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 revlog,
73 73 revset,
74 74 revsetlang,
75 75 scmutil,
76 76 setdiscovery,
77 77 simplemerge,
78 78 sshpeer,
79 79 sslutil,
80 80 streamclone,
81 81 strip,
82 82 tags as tagsmod,
83 83 templater,
84 84 treediscovery,
85 85 upgrade,
86 86 url as urlmod,
87 87 util,
88 88 vfs as vfsmod,
89 89 wireprotoframing,
90 90 wireprotoserver,
91 91 wireprotov2peer,
92 92 )
93 93 from .utils import (
94 94 cborutil,
95 95 compression,
96 96 dateutil,
97 97 procutil,
98 98 stringutil,
99 99 )
100 100
101 101 from .revlogutils import (
102 102 deltas as deltautil,
103 103 nodemap,
104 104 sidedata,
105 105 )
106 106
107 107 release = lockmod.release
108 108
109 109 table = {}
110 110 table.update(strip.command._table)
111 111 command = registrar.command(table)
112 112
113 113
114 114 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
115 115 def debugancestor(ui, repo, *args):
116 116 """find the ancestor revision of two revisions in a given index"""
117 117 if len(args) == 3:
118 118 index, rev1, rev2 = args
119 119 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
120 120 lookup = r.lookup
121 121 elif len(args) == 2:
122 122 if not repo:
123 123 raise error.Abort(
124 124 _(b'there is no Mercurial repository here (.hg not found)')
125 125 )
126 126 rev1, rev2 = args
127 127 r = repo.changelog
128 128 lookup = repo.lookup
129 129 else:
130 130 raise error.Abort(_(b'either two or three arguments required'))
131 131 a = r.ancestor(lookup(rev1), lookup(rev2))
132 132 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
133 133
134 134
135 135 @command(b'debugantivirusrunning', [])
136 136 def debugantivirusrunning(ui, repo):
137 137 """attempt to trigger an antivirus scanner to see if one is active"""
138 138 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
139 139 f.write(
140 140 util.b85decode(
141 141 # This is a base85-armored version of the EICAR test file. See
142 142 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
143 143 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
144 144 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
145 145 )
146 146 )
147 147 # Give an AV engine time to scan the file.
148 148 time.sleep(2)
149 149 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
150 150
151 151
152 152 @command(b'debugapplystreamclonebundle', [], b'FILE')
153 153 def debugapplystreamclonebundle(ui, repo, fname):
154 154 """apply a stream clone bundle file"""
155 155 f = hg.openpath(ui, fname)
156 156 gen = exchange.readbundle(ui, f, fname)
157 157 gen.apply(repo)
158 158
159 159
160 160 @command(
161 161 b'debugbuilddag',
162 162 [
163 163 (
164 164 b'm',
165 165 b'mergeable-file',
166 166 None,
167 167 _(b'add single file mergeable changes'),
168 168 ),
169 169 (
170 170 b'o',
171 171 b'overwritten-file',
172 172 None,
173 173 _(b'add single file all revs overwrite'),
174 174 ),
175 175 (b'n', b'new-file', None, _(b'add new file at each rev')),
176 176 ],
177 177 _(b'[OPTION]... [TEXT]'),
178 178 )
179 179 def debugbuilddag(
180 180 ui,
181 181 repo,
182 182 text=None,
183 183 mergeable_file=False,
184 184 overwritten_file=False,
185 185 new_file=False,
186 186 ):
187 187 """builds a repo with a given DAG from scratch in the current empty repo
188 188
189 189 The description of the DAG is read from stdin if not given on the
190 190 command line.
191 191
192 192 Elements:
193 193
194 194 - "+n" is a linear run of n nodes based on the current default parent
195 195 - "." is a single node based on the current default parent
196 196 - "$" resets the default parent to null (implied at the start);
197 197 otherwise the default parent is always the last node created
198 198 - "<p" sets the default parent to the backref p
199 199 - "*p" is a fork at parent p, which is a backref
200 200 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
201 201 - "/p2" is a merge of the preceding node and p2
202 202 - ":tag" defines a local tag for the preceding node
203 203 - "@branch" sets the named branch for subsequent nodes
204 204 - "#...\\n" is a comment up to the end of the line
205 205
206 206 Whitespace between the above elements is ignored.
207 207
208 208 A backref is either
209 209
210 210 - a number n, which references the node curr-n, where curr is the current
211 211 node, or
212 212 - the name of a local tag you placed earlier using ":tag", or
213 213 - empty to denote the default parent.
214 214
215 215 All string valued-elements are either strictly alphanumeric, or must
216 216 be enclosed in double quotes ("..."), with "\\" as escape character.
217 217 """
218 218
219 219 if text is None:
220 220 ui.status(_(b"reading DAG from stdin\n"))
221 221 text = ui.fin.read()
222 222
223 223 cl = repo.changelog
224 224 if len(cl) > 0:
225 225 raise error.Abort(_(b'repository is not empty'))
226 226
227 227 # determine number of revs in DAG
228 228 total = 0
229 229 for type, data in dagparser.parsedag(text):
230 230 if type == b'n':
231 231 total += 1
232 232
233 233 if mergeable_file:
234 234 linesperrev = 2
235 235 # make a file with k lines per rev
236 236 initialmergedlines = [
237 237 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
238 238 ]
239 239 initialmergedlines.append(b"")
240 240
241 241 tags = []
242 242 progress = ui.makeprogress(
243 243 _(b'building'), unit=_(b'revisions'), total=total
244 244 )
245 245 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
246 246 at = -1
247 247 atbranch = b'default'
248 248 nodeids = []
249 249 id = 0
250 250 progress.update(id)
251 251 for type, data in dagparser.parsedag(text):
252 252 if type == b'n':
253 253 ui.note((b'node %s\n' % pycompat.bytestr(data)))
254 254 id, ps = data
255 255
256 256 files = []
257 257 filecontent = {}
258 258
259 259 p2 = None
260 260 if mergeable_file:
261 261 fn = b"mf"
262 262 p1 = repo[ps[0]]
263 263 if len(ps) > 1:
264 264 p2 = repo[ps[1]]
265 265 pa = p1.ancestor(p2)
266 266 base, local, other = [
267 267 x[fn].data() for x in (pa, p1, p2)
268 268 ]
269 269 m3 = simplemerge.Merge3Text(base, local, other)
270 270 ml = [l.strip() for l in m3.merge_lines()]
271 271 ml.append(b"")
272 272 elif at > 0:
273 273 ml = p1[fn].data().split(b"\n")
274 274 else:
275 275 ml = initialmergedlines
276 276 ml[id * linesperrev] += b" r%i" % id
277 277 mergedtext = b"\n".join(ml)
278 278 files.append(fn)
279 279 filecontent[fn] = mergedtext
280 280
281 281 if overwritten_file:
282 282 fn = b"of"
283 283 files.append(fn)
284 284 filecontent[fn] = b"r%i\n" % id
285 285
286 286 if new_file:
287 287 fn = b"nf%i" % id
288 288 files.append(fn)
289 289 filecontent[fn] = b"r%i\n" % id
290 290 if len(ps) > 1:
291 291 if not p2:
292 292 p2 = repo[ps[1]]
293 293 for fn in p2:
294 294 if fn.startswith(b"nf"):
295 295 files.append(fn)
296 296 filecontent[fn] = p2[fn].data()
297 297
298 298 def fctxfn(repo, cx, path):
299 299 if path in filecontent:
300 300 return context.memfilectx(
301 301 repo, cx, path, filecontent[path]
302 302 )
303 303 return None
304 304
305 305 if len(ps) == 0 or ps[0] < 0:
306 306 pars = [None, None]
307 307 elif len(ps) == 1:
308 308 pars = [nodeids[ps[0]], None]
309 309 else:
310 310 pars = [nodeids[p] for p in ps]
311 311 cx = context.memctx(
312 312 repo,
313 313 pars,
314 314 b"r%i" % id,
315 315 files,
316 316 fctxfn,
317 317 date=(id, 0),
318 318 user=b"debugbuilddag",
319 319 extra={b'branch': atbranch},
320 320 )
321 321 nodeid = repo.commitctx(cx)
322 322 nodeids.append(nodeid)
323 323 at = id
324 324 elif type == b'l':
325 325 id, name = data
326 326 ui.note((b'tag %s\n' % name))
327 327 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
328 328 elif type == b'a':
329 329 ui.note((b'branch %s\n' % data))
330 330 atbranch = data
331 331 progress.update(id)
332 332
333 333 if tags:
334 334 repo.vfs.write(b"localtags", b"".join(tags))
335 335
336 336
337 337 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
338 338 indent_string = b' ' * indent
339 339 if all:
340 340 ui.writenoi18n(
341 341 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
342 342 % indent_string
343 343 )
344 344
345 345 def showchunks(named):
346 346 ui.write(b"\n%s%s\n" % (indent_string, named))
347 347 for deltadata in gen.deltaiter():
348 348 node, p1, p2, cs, deltabase, delta, flags = deltadata
349 349 ui.write(
350 350 b"%s%s %s %s %s %s %d\n"
351 351 % (
352 352 indent_string,
353 353 hex(node),
354 354 hex(p1),
355 355 hex(p2),
356 356 hex(cs),
357 357 hex(deltabase),
358 358 len(delta),
359 359 )
360 360 )
361 361
362 362 gen.changelogheader()
363 363 showchunks(b"changelog")
364 364 gen.manifestheader()
365 365 showchunks(b"manifest")
366 366 for chunkdata in iter(gen.filelogheader, {}):
367 367 fname = chunkdata[b'filename']
368 368 showchunks(fname)
369 369 else:
370 370 if isinstance(gen, bundle2.unbundle20):
371 371 raise error.Abort(_(b'use debugbundle2 for this file'))
372 372 gen.changelogheader()
373 373 for deltadata in gen.deltaiter():
374 374 node, p1, p2, cs, deltabase, delta, flags = deltadata
375 375 ui.write(b"%s%s\n" % (indent_string, hex(node)))
376 376
377 377
378 378 def _debugobsmarkers(ui, part, indent=0, **opts):
379 379 """display version and markers contained in 'data'"""
380 380 opts = pycompat.byteskwargs(opts)
381 381 data = part.read()
382 382 indent_string = b' ' * indent
383 383 try:
384 384 version, markers = obsolete._readmarkers(data)
385 385 except error.UnknownVersion as exc:
386 386 msg = b"%sunsupported version: %s (%d bytes)\n"
387 387 msg %= indent_string, exc.version, len(data)
388 388 ui.write(msg)
389 389 else:
390 390 msg = b"%sversion: %d (%d bytes)\n"
391 391 msg %= indent_string, version, len(data)
392 392 ui.write(msg)
393 393 fm = ui.formatter(b'debugobsolete', opts)
394 394 for rawmarker in sorted(markers):
395 395 m = obsutil.marker(None, rawmarker)
396 396 fm.startitem()
397 397 fm.plain(indent_string)
398 398 cmdutil.showmarker(fm, m)
399 399 fm.end()
400 400
401 401
402 402 def _debugphaseheads(ui, data, indent=0):
403 403 """display version and markers contained in 'data'"""
404 404 indent_string = b' ' * indent
405 405 headsbyphase = phases.binarydecode(data)
406 406 for phase in phases.allphases:
407 407 for head in headsbyphase[phase]:
408 408 ui.write(indent_string)
409 409 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
410 410
411 411
412 412 def _quasirepr(thing):
413 413 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
414 414 return b'{%s}' % (
415 415 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
416 416 )
417 417 return pycompat.bytestr(repr(thing))
418 418
419 419
420 420 def _debugbundle2(ui, gen, all=None, **opts):
421 421 """lists the contents of a bundle2"""
422 422 if not isinstance(gen, bundle2.unbundle20):
423 423 raise error.Abort(_(b'not a bundle2 file'))
424 424 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
425 425 parttypes = opts.get('part_type', [])
426 426 for part in gen.iterparts():
427 427 if parttypes and part.type not in parttypes:
428 428 continue
429 429 msg = b'%s -- %s (mandatory: %r)\n'
430 430 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
431 431 if part.type == b'changegroup':
432 432 version = part.params.get(b'version', b'01')
433 433 cg = changegroup.getunbundler(version, part, b'UN')
434 434 if not ui.quiet:
435 435 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
436 436 if part.type == b'obsmarkers':
437 437 if not ui.quiet:
438 438 _debugobsmarkers(ui, part, indent=4, **opts)
439 439 if part.type == b'phase-heads':
440 440 if not ui.quiet:
441 441 _debugphaseheads(ui, part, indent=4)
442 442
443 443
444 444 @command(
445 445 b'debugbundle',
446 446 [
447 447 (b'a', b'all', None, _(b'show all details')),
448 448 (b'', b'part-type', [], _(b'show only the named part type')),
449 449 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
450 450 ],
451 451 _(b'FILE'),
452 452 norepo=True,
453 453 )
454 454 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
455 455 """lists the contents of a bundle"""
456 456 with hg.openpath(ui, bundlepath) as f:
457 457 if spec:
458 458 spec = exchange.getbundlespec(ui, f)
459 459 ui.write(b'%s\n' % spec)
460 460 return
461 461
462 462 gen = exchange.readbundle(ui, f, bundlepath)
463 463 if isinstance(gen, bundle2.unbundle20):
464 464 return _debugbundle2(ui, gen, all=all, **opts)
465 465 _debugchangegroup(ui, gen, all=all, **opts)
466 466
467 467
468 468 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
469 469 def debugcapabilities(ui, path, **opts):
470 470 """lists the capabilities of a remote peer"""
471 471 opts = pycompat.byteskwargs(opts)
472 472 peer = hg.peer(ui, opts, path)
473 473 caps = peer.capabilities()
474 474 ui.writenoi18n(b'Main capabilities:\n')
475 475 for c in sorted(caps):
476 476 ui.write(b' %s\n' % c)
477 477 b2caps = bundle2.bundle2caps(peer)
478 478 if b2caps:
479 479 ui.writenoi18n(b'Bundle2 capabilities:\n')
480 480 for key, values in sorted(pycompat.iteritems(b2caps)):
481 481 ui.write(b' %s\n' % key)
482 482 for v in values:
483 483 ui.write(b' %s\n' % v)
484 484
485 485
486 486 @command(b'debugchangedfiles', [], b'REV')
487 487 def debugchangedfiles(ui, repo, rev):
488 488 """list the stored files changes for a revision"""
489 489 ctx = scmutil.revsingle(repo, rev, None)
490 490 sd = repo.changelog.sidedata(ctx.rev())
491 491 files_block = sd.get(sidedata.SD_FILES)
492 492 if files_block is not None:
493 493 files = metadata.decode_files_sidedata(sd)
494 494 for f in sorted(files.touched):
495 495 if f in files.added:
496 496 action = b"added"
497 497 elif f in files.removed:
498 498 action = b"removed"
499 499 elif f in files.merged:
500 500 action = b"merged"
501 501 elif f in files.salvaged:
502 502 action = b"salvaged"
503 503 else:
504 504 action = b"touched"
505 505
506 506 copy_parent = b""
507 507 copy_source = b""
508 508 if f in files.copied_from_p1:
509 509 copy_parent = b"p1"
510 510 copy_source = files.copied_from_p1[f]
511 511 elif f in files.copied_from_p2:
512 512 copy_parent = b"p2"
513 513 copy_source = files.copied_from_p2[f]
514 514
515 515 data = (action, copy_parent, f, copy_source)
516 516 template = b"%-8s %2s: %s, %s;\n"
517 517 ui.write(template % data)
518 518
519 519
520 520 @command(b'debugcheckstate', [], b'')
521 521 def debugcheckstate(ui, repo):
522 522 """validate the correctness of the current dirstate"""
523 523 parent1, parent2 = repo.dirstate.parents()
524 524 m1 = repo[parent1].manifest()
525 525 m2 = repo[parent2].manifest()
526 526 errors = 0
527 527 for f in repo.dirstate:
528 528 state = repo.dirstate[f]
529 529 if state in b"nr" and f not in m1:
530 530 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
531 531 errors += 1
532 532 if state in b"a" and f in m1:
533 533 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
534 534 errors += 1
535 535 if state in b"m" and f not in m1 and f not in m2:
536 536 ui.warn(
537 537 _(b"%s in state %s, but not in either manifest\n") % (f, state)
538 538 )
539 539 errors += 1
540 540 for f in m1:
541 541 state = repo.dirstate[f]
542 542 if state not in b"nrm":
543 543 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
544 544 errors += 1
545 545 if errors:
546 546 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
547 547 raise error.Abort(errstr)
548 548
549 549
550 550 @command(
551 551 b'debugcolor',
552 552 [(b'', b'style', None, _(b'show all configured styles'))],
553 553 b'hg debugcolor',
554 554 )
555 555 def debugcolor(ui, repo, **opts):
556 556 """show available color, effects or style"""
557 557 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
558 558 if opts.get('style'):
559 559 return _debugdisplaystyle(ui)
560 560 else:
561 561 return _debugdisplaycolor(ui)
562 562
563 563
564 564 def _debugdisplaycolor(ui):
565 565 ui = ui.copy()
566 566 ui._styles.clear()
567 567 for effect in color._activeeffects(ui).keys():
568 568 ui._styles[effect] = effect
569 569 if ui._terminfoparams:
570 570 for k, v in ui.configitems(b'color'):
571 571 if k.startswith(b'color.'):
572 572 ui._styles[k] = k[6:]
573 573 elif k.startswith(b'terminfo.'):
574 574 ui._styles[k] = k[9:]
575 575 ui.write(_(b'available colors:\n'))
576 576 # sort label with a '_' after the other to group '_background' entry.
577 577 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
578 578 for colorname, label in items:
579 579 ui.write(b'%s\n' % colorname, label=label)
580 580
581 581
582 582 def _debugdisplaystyle(ui):
583 583 ui.write(_(b'available style:\n'))
584 584 if not ui._styles:
585 585 return
586 586 width = max(len(s) for s in ui._styles)
587 587 for label, effects in sorted(ui._styles.items()):
588 588 ui.write(b'%s' % label, label=label)
589 589 if effects:
590 590 # 50
591 591 ui.write(b': ')
592 592 ui.write(b' ' * (max(0, width - len(label))))
593 593 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
594 594 ui.write(b'\n')
595 595
596 596
597 597 @command(b'debugcreatestreamclonebundle', [], b'FILE')
598 598 def debugcreatestreamclonebundle(ui, repo, fname):
599 599 """create a stream clone bundle file
600 600
601 601 Stream bundles are special bundles that are essentially archives of
602 602 revlog files. They are commonly used for cloning very quickly.
603 603 """
604 604 # TODO we may want to turn this into an abort when this functionality
605 605 # is moved into `hg bundle`.
606 606 if phases.hassecret(repo):
607 607 ui.warn(
608 608 _(
609 609 b'(warning: stream clone bundle will contain secret '
610 610 b'revisions)\n'
611 611 )
612 612 )
613 613
614 614 requirements, gen = streamclone.generatebundlev1(repo)
615 615 changegroup.writechunks(ui, gen, fname)
616 616
617 617 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
618 618
619 619
620 620 @command(
621 621 b'debugdag',
622 622 [
623 623 (b't', b'tags', None, _(b'use tags as labels')),
624 624 (b'b', b'branches', None, _(b'annotate with branch names')),
625 625 (b'', b'dots', None, _(b'use dots for runs')),
626 626 (b's', b'spaces', None, _(b'separate elements by spaces')),
627 627 ],
628 628 _(b'[OPTION]... [FILE [REV]...]'),
629 629 optionalrepo=True,
630 630 )
631 631 def debugdag(ui, repo, file_=None, *revs, **opts):
632 632 """format the changelog or an index DAG as a concise textual description
633 633
634 634 If you pass a revlog index, the revlog's DAG is emitted. If you list
635 635 revision numbers, they get labeled in the output as rN.
636 636
637 637 Otherwise, the changelog DAG of the current repo is emitted.
638 638 """
639 639 spaces = opts.get('spaces')
640 640 dots = opts.get('dots')
641 641 if file_:
642 642 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
643 643 revs = {int(r) for r in revs}
644 644
645 645 def events():
646 646 for r in rlog:
647 647 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
648 648 if r in revs:
649 649 yield b'l', (r, b"r%i" % r)
650 650
651 651 elif repo:
652 652 cl = repo.changelog
653 653 tags = opts.get('tags')
654 654 branches = opts.get('branches')
655 655 if tags:
656 656 labels = {}
657 657 for l, n in repo.tags().items():
658 658 labels.setdefault(cl.rev(n), []).append(l)
659 659
660 660 def events():
661 661 b = b"default"
662 662 for r in cl:
663 663 if branches:
664 664 newb = cl.read(cl.node(r))[5][b'branch']
665 665 if newb != b:
666 666 yield b'a', newb
667 667 b = newb
668 668 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
669 669 if tags:
670 670 ls = labels.get(r)
671 671 if ls:
672 672 for l in ls:
673 673 yield b'l', (r, l)
674 674
675 675 else:
676 676 raise error.Abort(_(b'need repo for changelog dag'))
677 677
678 678 for line in dagparser.dagtextlines(
679 679 events(),
680 680 addspaces=spaces,
681 681 wraplabels=True,
682 682 wrapannotations=True,
683 683 wrapnonlinear=dots,
684 684 usedots=dots,
685 685 maxlinewidth=70,
686 686 ):
687 687 ui.write(line)
688 688 ui.write(b"\n")
689 689
690 690
691 691 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
692 692 def debugdata(ui, repo, file_, rev=None, **opts):
693 693 """dump the contents of a data file revision"""
694 694 opts = pycompat.byteskwargs(opts)
695 695 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
696 696 if rev is not None:
697 697 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
698 698 file_, rev = None, file_
699 699 elif rev is None:
700 700 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
701 701 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
702 702 try:
703 703 ui.write(r.rawdata(r.lookup(rev)))
704 704 except KeyError:
705 705 raise error.Abort(_(b'invalid revision identifier %s') % rev)
706 706
707 707
708 708 @command(
709 709 b'debugdate',
710 710 [(b'e', b'extended', None, _(b'try extended date formats'))],
711 711 _(b'[-e] DATE [RANGE]'),
712 712 norepo=True,
713 713 optionalrepo=True,
714 714 )
715 715 def debugdate(ui, date, range=None, **opts):
716 716 """parse and display a date"""
717 717 if opts["extended"]:
718 718 d = dateutil.parsedate(date, dateutil.extendeddateformats)
719 719 else:
720 720 d = dateutil.parsedate(date)
721 721 ui.writenoi18n(b"internal: %d %d\n" % d)
722 722 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
723 723 if range:
724 724 m = dateutil.matchdate(range)
725 725 ui.writenoi18n(b"match: %s\n" % m(d[0]))
726 726
727 727
728 728 @command(
729 729 b'debugdeltachain',
730 730 cmdutil.debugrevlogopts + cmdutil.formatteropts,
731 731 _(b'-c|-m|FILE'),
732 732 optionalrepo=True,
733 733 )
734 734 def debugdeltachain(ui, repo, file_=None, **opts):
735 735 """dump information about delta chains in a revlog
736 736
737 737 Output can be templatized. Available template keywords are:
738 738
739 739 :``rev``: revision number
740 740 :``chainid``: delta chain identifier (numbered by unique base)
741 741 :``chainlen``: delta chain length to this revision
742 742 :``prevrev``: previous revision in delta chain
743 743 :``deltatype``: role of delta / how it was computed
744 744 :``compsize``: compressed size of revision
745 745 :``uncompsize``: uncompressed size of revision
746 746 :``chainsize``: total size of compressed revisions in chain
747 747 :``chainratio``: total chain size divided by uncompressed revision size
748 748 (new delta chains typically start at ratio 2.00)
749 749 :``lindist``: linear distance from base revision in delta chain to end
750 750 of this revision
751 751 :``extradist``: total size of revisions not part of this delta chain from
752 752 base of delta chain to end of this revision; a measurement
753 753 of how much extra data we need to read/seek across to read
754 754 the delta chain for this revision
755 755 :``extraratio``: extradist divided by chainsize; another representation of
756 756 how much unrelated data is needed to load this delta chain
757 757
758 758 If the repository is configured to use the sparse read, additional keywords
759 759 are available:
760 760
761 761 :``readsize``: total size of data read from the disk for a revision
762 762 (sum of the sizes of all the blocks)
763 763 :``largestblock``: size of the largest block of data read from the disk
764 764 :``readdensity``: density of useful bytes in the data read from the disk
765 765 :``srchunks``: in how many data hunks the whole revision would be read
766 766
767 767 The sparse read can be enabled with experimental.sparse-read = True
768 768 """
769 769 opts = pycompat.byteskwargs(opts)
770 770 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
771 771 index = r.index
772 772 start = r.start
773 773 length = r.length
774 774 generaldelta = r.version & revlog.FLAG_GENERALDELTA
775 775 withsparseread = getattr(r, '_withsparseread', False)
776 776
777 777 def revinfo(rev):
778 778 e = index[rev]
779 779 compsize = e[1]
780 780 uncompsize = e[2]
781 781 chainsize = 0
782 782
783 783 if generaldelta:
784 784 if e[3] == e[5]:
785 785 deltatype = b'p1'
786 786 elif e[3] == e[6]:
787 787 deltatype = b'p2'
788 788 elif e[3] == rev - 1:
789 789 deltatype = b'prev'
790 790 elif e[3] == rev:
791 791 deltatype = b'base'
792 792 else:
793 793 deltatype = b'other'
794 794 else:
795 795 if e[3] == rev:
796 796 deltatype = b'base'
797 797 else:
798 798 deltatype = b'prev'
799 799
800 800 chain = r._deltachain(rev)[0]
801 801 for iterrev in chain:
802 802 e = index[iterrev]
803 803 chainsize += e[1]
804 804
805 805 return compsize, uncompsize, deltatype, chain, chainsize
806 806
807 807 fm = ui.formatter(b'debugdeltachain', opts)
808 808
809 809 fm.plain(
810 810 b' rev chain# chainlen prev delta '
811 811 b'size rawsize chainsize ratio lindist extradist '
812 812 b'extraratio'
813 813 )
814 814 if withsparseread:
815 815 fm.plain(b' readsize largestblk rddensity srchunks')
816 816 fm.plain(b'\n')
817 817
818 818 chainbases = {}
819 819 for rev in r:
820 820 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
821 821 chainbase = chain[0]
822 822 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
823 823 basestart = start(chainbase)
824 824 revstart = start(rev)
825 825 lineardist = revstart + comp - basestart
826 826 extradist = lineardist - chainsize
827 827 try:
828 828 prevrev = chain[-2]
829 829 except IndexError:
830 830 prevrev = -1
831 831
832 832 if uncomp != 0:
833 833 chainratio = float(chainsize) / float(uncomp)
834 834 else:
835 835 chainratio = chainsize
836 836
837 837 if chainsize != 0:
838 838 extraratio = float(extradist) / float(chainsize)
839 839 else:
840 840 extraratio = extradist
841 841
842 842 fm.startitem()
843 843 fm.write(
844 844 b'rev chainid chainlen prevrev deltatype compsize '
845 845 b'uncompsize chainsize chainratio lindist extradist '
846 846 b'extraratio',
847 847 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
848 848 rev,
849 849 chainid,
850 850 len(chain),
851 851 prevrev,
852 852 deltatype,
853 853 comp,
854 854 uncomp,
855 855 chainsize,
856 856 chainratio,
857 857 lineardist,
858 858 extradist,
859 859 extraratio,
860 860 rev=rev,
861 861 chainid=chainid,
862 862 chainlen=len(chain),
863 863 prevrev=prevrev,
864 864 deltatype=deltatype,
865 865 compsize=comp,
866 866 uncompsize=uncomp,
867 867 chainsize=chainsize,
868 868 chainratio=chainratio,
869 869 lindist=lineardist,
870 870 extradist=extradist,
871 871 extraratio=extraratio,
872 872 )
873 873 if withsparseread:
874 874 readsize = 0
875 875 largestblock = 0
876 876 srchunks = 0
877 877
878 878 for revschunk in deltautil.slicechunk(r, chain):
879 879 srchunks += 1
880 880 blkend = start(revschunk[-1]) + length(revschunk[-1])
881 881 blksize = blkend - start(revschunk[0])
882 882
883 883 readsize += blksize
884 884 if largestblock < blksize:
885 885 largestblock = blksize
886 886
887 887 if readsize:
888 888 readdensity = float(chainsize) / float(readsize)
889 889 else:
890 890 readdensity = 1
891 891
892 892 fm.write(
893 893 b'readsize largestblock readdensity srchunks',
894 894 b' %10d %10d %9.5f %8d',
895 895 readsize,
896 896 largestblock,
897 897 readdensity,
898 898 srchunks,
899 899 readsize=readsize,
900 900 largestblock=largestblock,
901 901 readdensity=readdensity,
902 902 srchunks=srchunks,
903 903 )
904 904
905 905 fm.plain(b'\n')
906 906
907 907 fm.end()
908 908
909 909
910 910 @command(
911 911 b'debugdirstate|debugstate',
912 912 [
913 913 (
914 914 b'',
915 915 b'nodates',
916 916 None,
917 917 _(b'do not display the saved mtime (DEPRECATED)'),
918 918 ),
919 919 (b'', b'dates', True, _(b'display the saved mtime')),
920 920 (b'', b'datesort', None, _(b'sort by saved mtime')),
921 921 ],
922 922 _(b'[OPTION]...'),
923 923 )
924 924 def debugstate(ui, repo, **opts):
925 925 """show the contents of the current dirstate"""
926 926
927 927 nodates = not opts['dates']
928 928 if opts.get('nodates') is not None:
929 929 nodates = True
930 930 datesort = opts.get('datesort')
931 931
932 932 if datesort:
933 933 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
934 934 else:
935 935 keyfunc = None # sort by filename
936 936 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
937 937 if ent[3] == -1:
938 938 timestr = b'unset '
939 939 elif nodates:
940 940 timestr = b'set '
941 941 else:
942 942 timestr = time.strftime(
943 943 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
944 944 )
945 945 timestr = encoding.strtolocal(timestr)
946 946 if ent[1] & 0o20000:
947 947 mode = b'lnk'
948 948 else:
949 949 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
950 950 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
951 951 for f in repo.dirstate.copies():
952 952 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
953 953
954 954
955 955 @command(
956 956 b'debugdiscovery',
957 957 [
958 958 (b'', b'old', None, _(b'use old-style discovery')),
959 959 (
960 960 b'',
961 961 b'nonheads',
962 962 None,
963 963 _(b'use old-style discovery with non-heads included'),
964 964 ),
965 965 (b'', b'rev', [], b'restrict discovery to this set of revs'),
966 966 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
967 967 ]
968 968 + cmdutil.remoteopts,
969 969 _(b'[--rev REV] [OTHER]'),
970 970 )
971 971 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
972 972 """runs the changeset discovery protocol in isolation"""
973 973 opts = pycompat.byteskwargs(opts)
974 974 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
975 975 remote = hg.peer(repo, opts, remoteurl)
976 976 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
977 977
978 978 # make sure tests are repeatable
979 979 random.seed(int(opts[b'seed']))
980 980
981 981 data = {}
982 982 if opts.get(b'old'):
983 983
984 984 def doit(pushedrevs, remoteheads, remote=remote):
985 985 if not util.safehasattr(remote, b'branches'):
986 986 # enable in-client legacy support
987 987 remote = localrepo.locallegacypeer(remote.local())
988 988 common, _in, hds = treediscovery.findcommonincoming(
989 989 repo, remote, force=True, audit=data
990 990 )
991 991 common = set(common)
992 992 if not opts.get(b'nonheads'):
993 993 ui.writenoi18n(
994 994 b"unpruned common: %s\n"
995 995 % b" ".join(sorted(short(n) for n in common))
996 996 )
997 997
998 998 clnode = repo.changelog.node
999 999 common = repo.revs(b'heads(::%ln)', common)
1000 1000 common = {clnode(r) for r in common}
1001 1001 return common, hds
1002 1002
1003 1003 else:
1004 1004
1005 1005 def doit(pushedrevs, remoteheads, remote=remote):
1006 1006 nodes = None
1007 1007 if pushedrevs:
1008 1008 revs = scmutil.revrange(repo, pushedrevs)
1009 1009 nodes = [repo[r].node() for r in revs]
1010 1010 common, any, hds = setdiscovery.findcommonheads(
1011 1011 ui, repo, remote, ancestorsof=nodes, audit=data
1012 1012 )
1013 1013 return common, hds
1014 1014
1015 1015 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1016 1016 localrevs = opts[b'rev']
1017 1017 with util.timedcm('debug-discovery') as t:
1018 1018 common, hds = doit(localrevs, remoterevs)
1019 1019
1020 1020 # compute all statistics
1021 1021 heads_common = set(common)
1022 1022 heads_remote = set(hds)
1023 1023 heads_local = set(repo.heads())
1024 1024 # note: they cannot be a local or remote head that is in common and not
1025 1025 # itself a head of common.
1026 1026 heads_common_local = heads_common & heads_local
1027 1027 heads_common_remote = heads_common & heads_remote
1028 1028 heads_common_both = heads_common & heads_remote & heads_local
1029 1029
1030 1030 all = repo.revs(b'all()')
1031 1031 common = repo.revs(b'::%ln', common)
1032 1032 roots_common = repo.revs(b'roots(::%ld)', common)
1033 1033 missing = repo.revs(b'not ::%ld', common)
1034 1034 heads_missing = repo.revs(b'heads(%ld)', missing)
1035 1035 roots_missing = repo.revs(b'roots(%ld)', missing)
1036 1036 assert len(common) + len(missing) == len(all)
1037 1037
1038 1038 initial_undecided = repo.revs(
1039 1039 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1040 1040 )
1041 1041 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1042 1042 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1043 1043 common_initial_undecided = initial_undecided & common
1044 1044 missing_initial_undecided = initial_undecided & missing
1045 1045
1046 1046 data[b'elapsed'] = t.elapsed
1047 1047 data[b'nb-common-heads'] = len(heads_common)
1048 1048 data[b'nb-common-heads-local'] = len(heads_common_local)
1049 1049 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1050 1050 data[b'nb-common-heads-both'] = len(heads_common_both)
1051 1051 data[b'nb-common-roots'] = len(roots_common)
1052 1052 data[b'nb-head-local'] = len(heads_local)
1053 1053 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1054 1054 data[b'nb-head-remote'] = len(heads_remote)
1055 1055 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1056 1056 heads_common_remote
1057 1057 )
1058 1058 data[b'nb-revs'] = len(all)
1059 1059 data[b'nb-revs-common'] = len(common)
1060 1060 data[b'nb-revs-missing'] = len(missing)
1061 1061 data[b'nb-missing-heads'] = len(heads_missing)
1062 1062 data[b'nb-missing-roots'] = len(roots_missing)
1063 1063 data[b'nb-ini_und'] = len(initial_undecided)
1064 1064 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1065 1065 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1066 1066 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1067 1067 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1068 1068
1069 1069 # display discovery summary
1070 1070 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
1071 1071 ui.writenoi18n(b"round-trips: %(total-roundtrips)9d\n" % data)
1072 1072 ui.writenoi18n(b"heads summary:\n")
1073 1073 ui.writenoi18n(b" total common heads: %(nb-common-heads)9d\n" % data)
1074 1074 ui.writenoi18n(
1075 1075 b" also local heads: %(nb-common-heads-local)9d\n" % data
1076 1076 )
1077 1077 ui.writenoi18n(
1078 1078 b" also remote heads: %(nb-common-heads-remote)9d\n" % data
1079 1079 )
1080 1080 ui.writenoi18n(b" both: %(nb-common-heads-both)9d\n" % data)
1081 1081 ui.writenoi18n(b" local heads: %(nb-head-local)9d\n" % data)
1082 1082 ui.writenoi18n(
1083 1083 b" common: %(nb-common-heads-local)9d\n" % data
1084 1084 )
1085 1085 ui.writenoi18n(
1086 1086 b" missing: %(nb-head-local-missing)9d\n" % data
1087 1087 )
1088 1088 ui.writenoi18n(b" remote heads: %(nb-head-remote)9d\n" % data)
1089 1089 ui.writenoi18n(
1090 1090 b" common: %(nb-common-heads-remote)9d\n" % data
1091 1091 )
1092 1092 ui.writenoi18n(
1093 1093 b" unknown: %(nb-head-remote-unknown)9d\n" % data
1094 1094 )
1095 1095 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
1096 1096 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
1097 1097 ui.writenoi18n(b" heads: %(nb-common-heads)9d\n" % data)
1098 1098 ui.writenoi18n(b" roots: %(nb-common-roots)9d\n" % data)
1099 1099 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
1100 1100 ui.writenoi18n(b" heads: %(nb-missing-heads)9d\n" % data)
1101 1101 ui.writenoi18n(b" roots: %(nb-missing-roots)9d\n" % data)
1102 1102 ui.writenoi18n(b" first undecided set: %(nb-ini_und)9d\n" % data)
1103 1103 ui.writenoi18n(b" heads: %(nb-ini_und-heads)9d\n" % data)
1104 1104 ui.writenoi18n(b" roots: %(nb-ini_und-roots)9d\n" % data)
1105 1105 ui.writenoi18n(b" common: %(nb-ini_und-common)9d\n" % data)
1106 1106 ui.writenoi18n(b" missing: %(nb-ini_und-missing)9d\n" % data)
1107 1107
1108 1108 if ui.verbose:
1109 1109 ui.writenoi18n(
1110 1110 b"common heads: %s\n"
1111 1111 % b" ".join(sorted(short(n) for n in heads_common))
1112 1112 )
1113 1113
1114 1114
1115 1115 _chunksize = 4 << 10
1116 1116
1117 1117
1118 1118 @command(
1119 1119 b'debugdownload',
1120 1120 [
1121 1121 (b'o', b'output', b'', _(b'path')),
1122 1122 ],
1123 1123 optionalrepo=True,
1124 1124 )
1125 1125 def debugdownload(ui, repo, url, output=None, **opts):
1126 1126 """download a resource using Mercurial logic and config"""
1127 1127 fh = urlmod.open(ui, url, output)
1128 1128
1129 1129 dest = ui
1130 1130 if output:
1131 1131 dest = open(output, b"wb", _chunksize)
1132 1132 try:
1133 1133 data = fh.read(_chunksize)
1134 1134 while data:
1135 1135 dest.write(data)
1136 1136 data = fh.read(_chunksize)
1137 1137 finally:
1138 1138 if output:
1139 1139 dest.close()
1140 1140
1141 1141
1142 1142 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1143 1143 def debugextensions(ui, repo, **opts):
1144 1144 '''show information about active extensions'''
1145 1145 opts = pycompat.byteskwargs(opts)
1146 1146 exts = extensions.extensions(ui)
1147 1147 hgver = util.version()
1148 1148 fm = ui.formatter(b'debugextensions', opts)
1149 1149 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1150 1150 isinternal = extensions.ismoduleinternal(extmod)
1151 1151 extsource = None
1152 1152
1153 1153 if util.safehasattr(extmod, '__file__'):
1154 1154 extsource = pycompat.fsencode(extmod.__file__)
1155 1155 elif getattr(sys, 'oxidized', False):
1156 1156 extsource = pycompat.sysexecutable
1157 1157 if isinternal:
1158 1158 exttestedwith = [] # never expose magic string to users
1159 1159 else:
1160 1160 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1161 1161 extbuglink = getattr(extmod, 'buglink', None)
1162 1162
1163 1163 fm.startitem()
1164 1164
1165 1165 if ui.quiet or ui.verbose:
1166 1166 fm.write(b'name', b'%s\n', extname)
1167 1167 else:
1168 1168 fm.write(b'name', b'%s', extname)
1169 1169 if isinternal or hgver in exttestedwith:
1170 1170 fm.plain(b'\n')
1171 1171 elif not exttestedwith:
1172 1172 fm.plain(_(b' (untested!)\n'))
1173 1173 else:
1174 1174 lasttestedversion = exttestedwith[-1]
1175 1175 fm.plain(b' (%s!)\n' % lasttestedversion)
1176 1176
1177 1177 fm.condwrite(
1178 1178 ui.verbose and extsource,
1179 1179 b'source',
1180 1180 _(b' location: %s\n'),
1181 1181 extsource or b"",
1182 1182 )
1183 1183
1184 1184 if ui.verbose:
1185 1185 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1186 1186 fm.data(bundled=isinternal)
1187 1187
1188 1188 fm.condwrite(
1189 1189 ui.verbose and exttestedwith,
1190 1190 b'testedwith',
1191 1191 _(b' tested with: %s\n'),
1192 1192 fm.formatlist(exttestedwith, name=b'ver'),
1193 1193 )
1194 1194
1195 1195 fm.condwrite(
1196 1196 ui.verbose and extbuglink,
1197 1197 b'buglink',
1198 1198 _(b' bug reporting: %s\n'),
1199 1199 extbuglink or b"",
1200 1200 )
1201 1201
1202 1202 fm.end()
1203 1203
1204 1204
1205 1205 @command(
1206 1206 b'debugfileset',
1207 1207 [
1208 1208 (
1209 1209 b'r',
1210 1210 b'rev',
1211 1211 b'',
1212 1212 _(b'apply the filespec on this revision'),
1213 1213 _(b'REV'),
1214 1214 ),
1215 1215 (
1216 1216 b'',
1217 1217 b'all-files',
1218 1218 False,
1219 1219 _(b'test files from all revisions and working directory'),
1220 1220 ),
1221 1221 (
1222 1222 b's',
1223 1223 b'show-matcher',
1224 1224 None,
1225 1225 _(b'print internal representation of matcher'),
1226 1226 ),
1227 1227 (
1228 1228 b'p',
1229 1229 b'show-stage',
1230 1230 [],
1231 1231 _(b'print parsed tree at the given stage'),
1232 1232 _(b'NAME'),
1233 1233 ),
1234 1234 ],
1235 1235 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1236 1236 )
1237 1237 def debugfileset(ui, repo, expr, **opts):
1238 1238 '''parse and apply a fileset specification'''
1239 1239 from . import fileset
1240 1240
1241 1241 fileset.symbols # force import of fileset so we have predicates to optimize
1242 1242 opts = pycompat.byteskwargs(opts)
1243 1243 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1244 1244
1245 1245 stages = [
1246 1246 (b'parsed', pycompat.identity),
1247 1247 (b'analyzed', filesetlang.analyze),
1248 1248 (b'optimized', filesetlang.optimize),
1249 1249 ]
1250 1250 stagenames = {n for n, f in stages}
1251 1251
1252 1252 showalways = set()
1253 1253 if ui.verbose and not opts[b'show_stage']:
1254 1254 # show parsed tree by --verbose (deprecated)
1255 1255 showalways.add(b'parsed')
1256 1256 if opts[b'show_stage'] == [b'all']:
1257 1257 showalways.update(stagenames)
1258 1258 else:
1259 1259 for n in opts[b'show_stage']:
1260 1260 if n not in stagenames:
1261 1261 raise error.Abort(_(b'invalid stage name: %s') % n)
1262 1262 showalways.update(opts[b'show_stage'])
1263 1263
1264 1264 tree = filesetlang.parse(expr)
1265 1265 for n, f in stages:
1266 1266 tree = f(tree)
1267 1267 if n in showalways:
1268 1268 if opts[b'show_stage'] or n != b'parsed':
1269 1269 ui.write(b"* %s:\n" % n)
1270 1270 ui.write(filesetlang.prettyformat(tree), b"\n")
1271 1271
1272 1272 files = set()
1273 1273 if opts[b'all_files']:
1274 1274 for r in repo:
1275 1275 c = repo[r]
1276 1276 files.update(c.files())
1277 1277 files.update(c.substate)
1278 1278 if opts[b'all_files'] or ctx.rev() is None:
1279 1279 wctx = repo[None]
1280 1280 files.update(
1281 1281 repo.dirstate.walk(
1282 1282 scmutil.matchall(repo),
1283 1283 subrepos=list(wctx.substate),
1284 1284 unknown=True,
1285 1285 ignored=True,
1286 1286 )
1287 1287 )
1288 1288 files.update(wctx.substate)
1289 1289 else:
1290 1290 files.update(ctx.files())
1291 1291 files.update(ctx.substate)
1292 1292
1293 1293 m = ctx.matchfileset(repo.getcwd(), expr)
1294 1294 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1295 1295 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1296 1296 for f in sorted(files):
1297 1297 if not m(f):
1298 1298 continue
1299 1299 ui.write(b"%s\n" % f)
1300 1300
1301 1301
1302 1302 @command(b'debugformat', [] + cmdutil.formatteropts)
1303 1303 def debugformat(ui, repo, **opts):
1304 1304 """display format information about the current repository
1305 1305
1306 1306 Use --verbose to get extra information about current config value and
1307 1307 Mercurial default."""
1308 1308 opts = pycompat.byteskwargs(opts)
1309 1309 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1310 1310 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1311 1311
1312 1312 def makeformatname(name):
1313 1313 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1314 1314
1315 1315 fm = ui.formatter(b'debugformat', opts)
1316 1316 if fm.isplain():
1317 1317
1318 1318 def formatvalue(value):
1319 1319 if util.safehasattr(value, b'startswith'):
1320 1320 return value
1321 1321 if value:
1322 1322 return b'yes'
1323 1323 else:
1324 1324 return b'no'
1325 1325
1326 1326 else:
1327 1327 formatvalue = pycompat.identity
1328 1328
1329 1329 fm.plain(b'format-variant')
1330 1330 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1331 1331 fm.plain(b' repo')
1332 1332 if ui.verbose:
1333 1333 fm.plain(b' config default')
1334 1334 fm.plain(b'\n')
1335 1335 for fv in upgrade.allformatvariant:
1336 1336 fm.startitem()
1337 1337 repovalue = fv.fromrepo(repo)
1338 1338 configvalue = fv.fromconfig(repo)
1339 1339
1340 1340 if repovalue != configvalue:
1341 1341 namelabel = b'formatvariant.name.mismatchconfig'
1342 1342 repolabel = b'formatvariant.repo.mismatchconfig'
1343 1343 elif repovalue != fv.default:
1344 1344 namelabel = b'formatvariant.name.mismatchdefault'
1345 1345 repolabel = b'formatvariant.repo.mismatchdefault'
1346 1346 else:
1347 1347 namelabel = b'formatvariant.name.uptodate'
1348 1348 repolabel = b'formatvariant.repo.uptodate'
1349 1349
1350 1350 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1351 1351 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1352 1352 if fv.default != configvalue:
1353 1353 configlabel = b'formatvariant.config.special'
1354 1354 else:
1355 1355 configlabel = b'formatvariant.config.default'
1356 1356 fm.condwrite(
1357 1357 ui.verbose,
1358 1358 b'config',
1359 1359 b' %6s',
1360 1360 formatvalue(configvalue),
1361 1361 label=configlabel,
1362 1362 )
1363 1363 fm.condwrite(
1364 1364 ui.verbose,
1365 1365 b'default',
1366 1366 b' %7s',
1367 1367 formatvalue(fv.default),
1368 1368 label=b'formatvariant.default',
1369 1369 )
1370 1370 fm.plain(b'\n')
1371 1371 fm.end()
1372 1372
1373 1373
1374 1374 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1375 1375 def debugfsinfo(ui, path=b"."):
1376 1376 """show information detected about current filesystem"""
1377 1377 ui.writenoi18n(b'path: %s\n' % path)
1378 1378 ui.writenoi18n(
1379 1379 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1380 1380 )
1381 1381 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1382 1382 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1383 1383 ui.writenoi18n(
1384 1384 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1385 1385 )
1386 1386 ui.writenoi18n(
1387 1387 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1388 1388 )
1389 1389 casesensitive = b'(unknown)'
1390 1390 try:
1391 1391 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1392 1392 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1393 1393 except OSError:
1394 1394 pass
1395 1395 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1396 1396
1397 1397
1398 1398 @command(
1399 1399 b'debuggetbundle',
1400 1400 [
1401 1401 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1402 1402 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1403 1403 (
1404 1404 b't',
1405 1405 b'type',
1406 1406 b'bzip2',
1407 1407 _(b'bundle compression type to use'),
1408 1408 _(b'TYPE'),
1409 1409 ),
1410 1410 ],
1411 1411 _(b'REPO FILE [-H|-C ID]...'),
1412 1412 norepo=True,
1413 1413 )
1414 1414 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1415 1415 """retrieves a bundle from a repo
1416 1416
1417 1417 Every ID must be a full-length hex node id string. Saves the bundle to the
1418 1418 given file.
1419 1419 """
1420 1420 opts = pycompat.byteskwargs(opts)
1421 1421 repo = hg.peer(ui, opts, repopath)
1422 1422 if not repo.capable(b'getbundle'):
1423 1423 raise error.Abort(b"getbundle() not supported by target repository")
1424 1424 args = {}
1425 1425 if common:
1426 1426 args['common'] = [bin(s) for s in common]
1427 1427 if head:
1428 1428 args['heads'] = [bin(s) for s in head]
1429 1429 # TODO: get desired bundlecaps from command line.
1430 1430 args['bundlecaps'] = None
1431 1431 bundle = repo.getbundle(b'debug', **args)
1432 1432
1433 1433 bundletype = opts.get(b'type', b'bzip2').lower()
1434 1434 btypes = {
1435 1435 b'none': b'HG10UN',
1436 1436 b'bzip2': b'HG10BZ',
1437 1437 b'gzip': b'HG10GZ',
1438 1438 b'bundle2': b'HG20',
1439 1439 }
1440 1440 bundletype = btypes.get(bundletype)
1441 1441 if bundletype not in bundle2.bundletypes:
1442 1442 raise error.Abort(_(b'unknown bundle type specified with --type'))
1443 1443 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1444 1444
1445 1445
1446 1446 @command(b'debugignore', [], b'[FILE]')
1447 1447 def debugignore(ui, repo, *files, **opts):
1448 1448 """display the combined ignore pattern and information about ignored files
1449 1449
1450 1450 With no argument display the combined ignore pattern.
1451 1451
1452 1452 Given space separated file names, shows if the given file is ignored and
1453 1453 if so, show the ignore rule (file and line number) that matched it.
1454 1454 """
1455 1455 ignore = repo.dirstate._ignore
1456 1456 if not files:
1457 1457 # Show all the patterns
1458 1458 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1459 1459 else:
1460 1460 m = scmutil.match(repo[None], pats=files)
1461 1461 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1462 1462 for f in m.files():
1463 1463 nf = util.normpath(f)
1464 1464 ignored = None
1465 1465 ignoredata = None
1466 1466 if nf != b'.':
1467 1467 if ignore(nf):
1468 1468 ignored = nf
1469 1469 ignoredata = repo.dirstate._ignorefileandline(nf)
1470 1470 else:
1471 1471 for p in pathutil.finddirs(nf):
1472 1472 if ignore(p):
1473 1473 ignored = p
1474 1474 ignoredata = repo.dirstate._ignorefileandline(p)
1475 1475 break
1476 1476 if ignored:
1477 1477 if ignored == nf:
1478 1478 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1479 1479 else:
1480 1480 ui.write(
1481 1481 _(
1482 1482 b"%s is ignored because of "
1483 1483 b"containing directory %s\n"
1484 1484 )
1485 1485 % (uipathfn(f), ignored)
1486 1486 )
1487 1487 ignorefile, lineno, line = ignoredata
1488 1488 ui.write(
1489 1489 _(b"(ignore rule in %s, line %d: '%s')\n")
1490 1490 % (ignorefile, lineno, line)
1491 1491 )
1492 1492 else:
1493 1493 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1494 1494
1495 1495
1496 1496 @command(
1497 1497 b'debugindex',
1498 1498 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1499 1499 _(b'-c|-m|FILE'),
1500 1500 )
1501 1501 def debugindex(ui, repo, file_=None, **opts):
1502 1502 """dump index data for a storage primitive"""
1503 1503 opts = pycompat.byteskwargs(opts)
1504 1504 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1505 1505
1506 1506 if ui.debugflag:
1507 1507 shortfn = hex
1508 1508 else:
1509 1509 shortfn = short
1510 1510
1511 1511 idlen = 12
1512 1512 for i in store:
1513 1513 idlen = len(shortfn(store.node(i)))
1514 1514 break
1515 1515
1516 1516 fm = ui.formatter(b'debugindex', opts)
1517 1517 fm.plain(
1518 1518 b' rev linkrev %s %s p2\n'
1519 1519 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1520 1520 )
1521 1521
1522 1522 for rev in store:
1523 1523 node = store.node(rev)
1524 1524 parents = store.parents(node)
1525 1525
1526 1526 fm.startitem()
1527 1527 fm.write(b'rev', b'%6d ', rev)
1528 1528 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1529 1529 fm.write(b'node', b'%s ', shortfn(node))
1530 1530 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1531 1531 fm.write(b'p2', b'%s', shortfn(parents[1]))
1532 1532 fm.plain(b'\n')
1533 1533
1534 1534 fm.end()
1535 1535
1536 1536
1537 1537 @command(
1538 1538 b'debugindexdot',
1539 1539 cmdutil.debugrevlogopts,
1540 1540 _(b'-c|-m|FILE'),
1541 1541 optionalrepo=True,
1542 1542 )
1543 1543 def debugindexdot(ui, repo, file_=None, **opts):
1544 1544 """dump an index DAG as a graphviz dot file"""
1545 1545 opts = pycompat.byteskwargs(opts)
1546 1546 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1547 1547 ui.writenoi18n(b"digraph G {\n")
1548 1548 for i in r:
1549 1549 node = r.node(i)
1550 1550 pp = r.parents(node)
1551 1551 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1552 1552 if pp[1] != nullid:
1553 1553 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1554 1554 ui.write(b"}\n")
1555 1555
1556 1556
1557 1557 @command(b'debugindexstats', [])
1558 1558 def debugindexstats(ui, repo):
1559 1559 """show stats related to the changelog index"""
1560 1560 repo.changelog.shortest(nullid, 1)
1561 1561 index = repo.changelog.index
1562 1562 if not util.safehasattr(index, b'stats'):
1563 1563 raise error.Abort(_(b'debugindexstats only works with native code'))
1564 1564 for k, v in sorted(index.stats().items()):
1565 1565 ui.write(b'%s: %d\n' % (k, v))
1566 1566
1567 1567
1568 1568 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1569 1569 def debuginstall(ui, **opts):
1570 1570 """test Mercurial installation
1571 1571
1572 1572 Returns 0 on success.
1573 1573 """
1574 1574 opts = pycompat.byteskwargs(opts)
1575 1575
1576 1576 problems = 0
1577 1577
1578 1578 fm = ui.formatter(b'debuginstall', opts)
1579 1579 fm.startitem()
1580 1580
1581 1581 # encoding might be unknown or wrong. don't translate these messages.
1582 1582 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1583 1583 err = None
1584 1584 try:
1585 1585 codecs.lookup(pycompat.sysstr(encoding.encoding))
1586 1586 except LookupError as inst:
1587 1587 err = stringutil.forcebytestr(inst)
1588 1588 problems += 1
1589 1589 fm.condwrite(
1590 1590 err,
1591 1591 b'encodingerror',
1592 1592 b" %s\n (check that your locale is properly set)\n",
1593 1593 err,
1594 1594 )
1595 1595
1596 1596 # Python
1597 1597 pythonlib = None
1598 1598 if util.safehasattr(os, '__file__'):
1599 1599 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1600 1600 elif getattr(sys, 'oxidized', False):
1601 1601 pythonlib = pycompat.sysexecutable
1602 1602
1603 1603 fm.write(
1604 1604 b'pythonexe',
1605 1605 _(b"checking Python executable (%s)\n"),
1606 1606 pycompat.sysexecutable or _(b"unknown"),
1607 1607 )
1608 1608 fm.write(
1609 1609 b'pythonimplementation',
1610 1610 _(b"checking Python implementation (%s)\n"),
1611 1611 pycompat.sysbytes(platform.python_implementation()),
1612 1612 )
1613 1613 fm.write(
1614 1614 b'pythonver',
1615 1615 _(b"checking Python version (%s)\n"),
1616 1616 (b"%d.%d.%d" % sys.version_info[:3]),
1617 1617 )
1618 1618 fm.write(
1619 1619 b'pythonlib',
1620 1620 _(b"checking Python lib (%s)...\n"),
1621 1621 pythonlib or _(b"unknown"),
1622 1622 )
1623 1623
1624 1624 try:
1625 1625 from . import rustext
1626 1626
1627 1627 rustext.__doc__ # trigger lazy import
1628 1628 except ImportError:
1629 1629 rustext = None
1630 1630
1631 1631 security = set(sslutil.supportedprotocols)
1632 1632 if sslutil.hassni:
1633 1633 security.add(b'sni')
1634 1634
1635 1635 fm.write(
1636 1636 b'pythonsecurity',
1637 1637 _(b"checking Python security support (%s)\n"),
1638 1638 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1639 1639 )
1640 1640
1641 1641 # These are warnings, not errors. So don't increment problem count. This
1642 1642 # may change in the future.
1643 1643 if b'tls1.2' not in security:
1644 1644 fm.plain(
1645 1645 _(
1646 1646 b' TLS 1.2 not supported by Python install; '
1647 1647 b'network connections lack modern security\n'
1648 1648 )
1649 1649 )
1650 1650 if b'sni' not in security:
1651 1651 fm.plain(
1652 1652 _(
1653 1653 b' SNI not supported by Python install; may have '
1654 1654 b'connectivity issues with some servers\n'
1655 1655 )
1656 1656 )
1657 1657
1658 1658 fm.plain(
1659 1659 _(
1660 1660 b"checking Rust extensions (%s)\n"
1661 1661 % (b'missing' if rustext is None else b'installed')
1662 1662 ),
1663 1663 )
1664 1664
1665 1665 # TODO print CA cert info
1666 1666
1667 1667 # hg version
1668 1668 hgver = util.version()
1669 1669 fm.write(
1670 1670 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1671 1671 )
1672 1672 fm.write(
1673 1673 b'hgverextra',
1674 1674 _(b"checking Mercurial custom build (%s)\n"),
1675 1675 b'+'.join(hgver.split(b'+')[1:]),
1676 1676 )
1677 1677
1678 1678 # compiled modules
1679 1679 hgmodules = None
1680 1680 if util.safehasattr(sys.modules[__name__], '__file__'):
1681 1681 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1682 1682 elif getattr(sys, 'oxidized', False):
1683 1683 hgmodules = pycompat.sysexecutable
1684 1684
1685 1685 fm.write(
1686 1686 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1687 1687 )
1688 1688 fm.write(
1689 1689 b'hgmodules',
1690 1690 _(b"checking installed modules (%s)...\n"),
1691 1691 hgmodules or _(b"unknown"),
1692 1692 )
1693 1693
1694 1694 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1695 1695 rustext = rustandc # for now, that's the only case
1696 1696 cext = policy.policy in (b'c', b'allow') or rustandc
1697 1697 nopure = cext or rustext
1698 1698 if nopure:
1699 1699 err = None
1700 1700 try:
1701 1701 if cext:
1702 1702 from .cext import ( # pytype: disable=import-error
1703 1703 base85,
1704 1704 bdiff,
1705 1705 mpatch,
1706 1706 osutil,
1707 1707 )
1708 1708
1709 1709 # quiet pyflakes
1710 1710 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1711 1711 if rustext:
1712 1712 from .rustext import ( # pytype: disable=import-error
1713 1713 ancestor,
1714 1714 dirstate,
1715 1715 )
1716 1716
1717 1717 dir(ancestor), dir(dirstate) # quiet pyflakes
1718 1718 except Exception as inst:
1719 1719 err = stringutil.forcebytestr(inst)
1720 1720 problems += 1
1721 1721 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1722 1722
1723 1723 compengines = util.compengines._engines.values()
1724 1724 fm.write(
1725 1725 b'compengines',
1726 1726 _(b'checking registered compression engines (%s)\n'),
1727 1727 fm.formatlist(
1728 1728 sorted(e.name() for e in compengines),
1729 1729 name=b'compengine',
1730 1730 fmt=b'%s',
1731 1731 sep=b', ',
1732 1732 ),
1733 1733 )
1734 1734 fm.write(
1735 1735 b'compenginesavail',
1736 1736 _(b'checking available compression engines (%s)\n'),
1737 1737 fm.formatlist(
1738 1738 sorted(e.name() for e in compengines if e.available()),
1739 1739 name=b'compengine',
1740 1740 fmt=b'%s',
1741 1741 sep=b', ',
1742 1742 ),
1743 1743 )
1744 1744 wirecompengines = compression.compengines.supportedwireengines(
1745 1745 compression.SERVERROLE
1746 1746 )
1747 1747 fm.write(
1748 1748 b'compenginesserver',
1749 1749 _(
1750 1750 b'checking available compression engines '
1751 1751 b'for wire protocol (%s)\n'
1752 1752 ),
1753 1753 fm.formatlist(
1754 1754 [e.name() for e in wirecompengines if e.wireprotosupport()],
1755 1755 name=b'compengine',
1756 1756 fmt=b'%s',
1757 1757 sep=b', ',
1758 1758 ),
1759 1759 )
1760 1760 re2 = b'missing'
1761 1761 if util._re2:
1762 1762 re2 = b'available'
1763 1763 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1764 1764 fm.data(re2=bool(util._re2))
1765 1765
1766 1766 # templates
1767 1767 p = templater.templatedir()
1768 1768 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1769 1769 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1770 1770 if p:
1771 1771 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1772 1772 if m:
1773 1773 # template found, check if it is working
1774 1774 err = None
1775 1775 try:
1776 1776 templater.templater.frommapfile(m)
1777 1777 except Exception as inst:
1778 1778 err = stringutil.forcebytestr(inst)
1779 1779 p = None
1780 1780 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1781 1781 else:
1782 1782 p = None
1783 1783 fm.condwrite(
1784 1784 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1785 1785 )
1786 1786 fm.condwrite(
1787 1787 not m,
1788 1788 b'defaulttemplatenotfound',
1789 1789 _(b" template '%s' not found\n"),
1790 1790 b"default",
1791 1791 )
1792 1792 if not p:
1793 1793 problems += 1
1794 1794 fm.condwrite(
1795 1795 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1796 1796 )
1797 1797
1798 1798 # editor
1799 1799 editor = ui.geteditor()
1800 1800 editor = util.expandpath(editor)
1801 1801 editorbin = procutil.shellsplit(editor)[0]
1802 1802 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1803 1803 cmdpath = procutil.findexe(editorbin)
1804 1804 fm.condwrite(
1805 1805 not cmdpath and editor == b'vi',
1806 1806 b'vinotfound',
1807 1807 _(
1808 1808 b" No commit editor set and can't find %s in PATH\n"
1809 1809 b" (specify a commit editor in your configuration"
1810 1810 b" file)\n"
1811 1811 ),
1812 1812 not cmdpath and editor == b'vi' and editorbin,
1813 1813 )
1814 1814 fm.condwrite(
1815 1815 not cmdpath and editor != b'vi',
1816 1816 b'editornotfound',
1817 1817 _(
1818 1818 b" Can't find editor '%s' in PATH\n"
1819 1819 b" (specify a commit editor in your configuration"
1820 1820 b" file)\n"
1821 1821 ),
1822 1822 not cmdpath and editorbin,
1823 1823 )
1824 1824 if not cmdpath and editor != b'vi':
1825 1825 problems += 1
1826 1826
1827 1827 # check username
1828 1828 username = None
1829 1829 err = None
1830 1830 try:
1831 1831 username = ui.username()
1832 1832 except error.Abort as e:
1833 1833 err = e.message
1834 1834 problems += 1
1835 1835
1836 1836 fm.condwrite(
1837 1837 username, b'username', _(b"checking username (%s)\n"), username
1838 1838 )
1839 1839 fm.condwrite(
1840 1840 err,
1841 1841 b'usernameerror',
1842 1842 _(
1843 1843 b"checking username...\n %s\n"
1844 1844 b" (specify a username in your configuration file)\n"
1845 1845 ),
1846 1846 err,
1847 1847 )
1848 1848
1849 1849 for name, mod in extensions.extensions():
1850 1850 handler = getattr(mod, 'debuginstall', None)
1851 1851 if handler is not None:
1852 1852 problems += handler(ui, fm)
1853 1853
1854 1854 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1855 1855 if not problems:
1856 1856 fm.data(problems=problems)
1857 1857 fm.condwrite(
1858 1858 problems,
1859 1859 b'problems',
1860 1860 _(b"%d problems detected, please check your install!\n"),
1861 1861 problems,
1862 1862 )
1863 1863 fm.end()
1864 1864
1865 1865 return problems
1866 1866
1867 1867
1868 1868 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1869 1869 def debugknown(ui, repopath, *ids, **opts):
1870 1870 """test whether node ids are known to a repo
1871 1871
1872 1872 Every ID must be a full-length hex node id string. Returns a list of 0s
1873 1873 and 1s indicating unknown/known.
1874 1874 """
1875 1875 opts = pycompat.byteskwargs(opts)
1876 1876 repo = hg.peer(ui, opts, repopath)
1877 1877 if not repo.capable(b'known'):
1878 1878 raise error.Abort(b"known() not supported by target repository")
1879 1879 flags = repo.known([bin(s) for s in ids])
1880 1880 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1881 1881
1882 1882
1883 1883 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1884 1884 def debuglabelcomplete(ui, repo, *args):
1885 1885 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1886 1886 debugnamecomplete(ui, repo, *args)
1887 1887
1888 1888
1889 1889 @command(
1890 1890 b'debuglocks',
1891 1891 [
1892 1892 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1893 1893 (
1894 1894 b'W',
1895 1895 b'force-free-wlock',
1896 1896 None,
1897 1897 _(b'free the working state lock (DANGEROUS)'),
1898 1898 ),
1899 1899 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1900 1900 (
1901 1901 b'S',
1902 1902 b'set-wlock',
1903 1903 None,
1904 1904 _(b'set the working state lock until stopped'),
1905 1905 ),
1906 1906 ],
1907 1907 _(b'[OPTION]...'),
1908 1908 )
1909 1909 def debuglocks(ui, repo, **opts):
1910 1910 """show or modify state of locks
1911 1911
1912 1912 By default, this command will show which locks are held. This
1913 1913 includes the user and process holding the lock, the amount of time
1914 1914 the lock has been held, and the machine name where the process is
1915 1915 running if it's not local.
1916 1916
1917 1917 Locks protect the integrity of Mercurial's data, so should be
1918 1918 treated with care. System crashes or other interruptions may cause
1919 1919 locks to not be properly released, though Mercurial will usually
1920 1920 detect and remove such stale locks automatically.
1921 1921
1922 1922 However, detecting stale locks may not always be possible (for
1923 1923 instance, on a shared filesystem). Removing locks may also be
1924 1924 blocked by filesystem permissions.
1925 1925
1926 1926 Setting a lock will prevent other commands from changing the data.
1927 1927 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1928 1928 The set locks are removed when the command exits.
1929 1929
1930 1930 Returns 0 if no locks are held.
1931 1931
1932 1932 """
1933 1933
1934 1934 if opts.get('force_free_lock'):
1935 1935 repo.svfs.unlink(b'lock')
1936 1936 if opts.get('force_free_wlock'):
1937 1937 repo.vfs.unlink(b'wlock')
1938 1938 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
1939 1939 return 0
1940 1940
1941 1941 locks = []
1942 1942 try:
1943 1943 if opts.get('set_wlock'):
1944 1944 try:
1945 1945 locks.append(repo.wlock(False))
1946 1946 except error.LockHeld:
1947 1947 raise error.Abort(_(b'wlock is already held'))
1948 1948 if opts.get('set_lock'):
1949 1949 try:
1950 1950 locks.append(repo.lock(False))
1951 1951 except error.LockHeld:
1952 1952 raise error.Abort(_(b'lock is already held'))
1953 1953 if len(locks):
1954 1954 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1955 1955 return 0
1956 1956 finally:
1957 1957 release(*locks)
1958 1958
1959 1959 now = time.time()
1960 1960 held = 0
1961 1961
1962 1962 def report(vfs, name, method):
1963 1963 # this causes stale locks to get reaped for more accurate reporting
1964 1964 try:
1965 1965 l = method(False)
1966 1966 except error.LockHeld:
1967 1967 l = None
1968 1968
1969 1969 if l:
1970 1970 l.release()
1971 1971 else:
1972 1972 try:
1973 1973 st = vfs.lstat(name)
1974 1974 age = now - st[stat.ST_MTIME]
1975 1975 user = util.username(st.st_uid)
1976 1976 locker = vfs.readlock(name)
1977 1977 if b":" in locker:
1978 1978 host, pid = locker.split(b':')
1979 1979 if host == socket.gethostname():
1980 1980 locker = b'user %s, process %s' % (user or b'None', pid)
1981 1981 else:
1982 1982 locker = b'user %s, process %s, host %s' % (
1983 1983 user or b'None',
1984 1984 pid,
1985 1985 host,
1986 1986 )
1987 1987 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1988 1988 return 1
1989 1989 except OSError as e:
1990 1990 if e.errno != errno.ENOENT:
1991 1991 raise
1992 1992
1993 1993 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1994 1994 return 0
1995 1995
1996 1996 held += report(repo.svfs, b"lock", repo.lock)
1997 1997 held += report(repo.vfs, b"wlock", repo.wlock)
1998 1998
1999 1999 return held
2000 2000
2001 2001
2002 2002 @command(
2003 2003 b'debugmanifestfulltextcache',
2004 2004 [
2005 2005 (b'', b'clear', False, _(b'clear the cache')),
2006 2006 (
2007 2007 b'a',
2008 2008 b'add',
2009 2009 [],
2010 2010 _(b'add the given manifest nodes to the cache'),
2011 2011 _(b'NODE'),
2012 2012 ),
2013 2013 ],
2014 2014 b'',
2015 2015 )
2016 2016 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2017 2017 """show, clear or amend the contents of the manifest fulltext cache"""
2018 2018
2019 2019 def getcache():
2020 2020 r = repo.manifestlog.getstorage(b'')
2021 2021 try:
2022 2022 return r._fulltextcache
2023 2023 except AttributeError:
2024 2024 msg = _(
2025 2025 b"Current revlog implementation doesn't appear to have a "
2026 2026 b"manifest fulltext cache\n"
2027 2027 )
2028 2028 raise error.Abort(msg)
2029 2029
2030 2030 if opts.get('clear'):
2031 2031 with repo.wlock():
2032 2032 cache = getcache()
2033 2033 cache.clear(clear_persisted_data=True)
2034 2034 return
2035 2035
2036 2036 if add:
2037 2037 with repo.wlock():
2038 2038 m = repo.manifestlog
2039 2039 store = m.getstorage(b'')
2040 2040 for n in add:
2041 2041 try:
2042 2042 manifest = m[store.lookup(n)]
2043 2043 except error.LookupError as e:
2044 raise error.Abort(e, hint=b"Check your manifest node id")
2044 raise error.Abort(
2045 bytes(e), hint=b"Check your manifest node id"
2046 )
2045 2047 manifest.read() # stores revisision in cache too
2046 2048 return
2047 2049
2048 2050 cache = getcache()
2049 2051 if not len(cache):
2050 2052 ui.write(_(b'cache empty\n'))
2051 2053 else:
2052 2054 ui.write(
2053 2055 _(
2054 2056 b'cache contains %d manifest entries, in order of most to '
2055 2057 b'least recent:\n'
2056 2058 )
2057 2059 % (len(cache),)
2058 2060 )
2059 2061 totalsize = 0
2060 2062 for nodeid in cache:
2061 2063 # Use cache.get to not update the LRU order
2062 2064 data = cache.peek(nodeid)
2063 2065 size = len(data)
2064 2066 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2065 2067 ui.write(
2066 2068 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2067 2069 )
2068 2070 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2069 2071 ui.write(
2070 2072 _(b'total cache data size %s, on-disk %s\n')
2071 2073 % (util.bytecount(totalsize), util.bytecount(ondisk))
2072 2074 )
2073 2075
2074 2076
2075 2077 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2076 2078 def debugmergestate(ui, repo, *args, **opts):
2077 2079 """print merge state
2078 2080
2079 2081 Use --verbose to print out information about whether v1 or v2 merge state
2080 2082 was chosen."""
2081 2083
2082 2084 if ui.verbose:
2083 2085 ms = mergestatemod.mergestate(repo)
2084 2086
2085 2087 # sort so that reasonable information is on top
2086 2088 v1records = ms._readrecordsv1()
2087 2089 v2records = ms._readrecordsv2()
2088 2090
2089 2091 if not v1records and not v2records:
2090 2092 pass
2091 2093 elif not v2records:
2092 2094 ui.writenoi18n(b'no version 2 merge state\n')
2093 2095 elif ms._v1v2match(v1records, v2records):
2094 2096 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2095 2097 else:
2096 2098 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2097 2099
2098 2100 opts = pycompat.byteskwargs(opts)
2099 2101 if not opts[b'template']:
2100 2102 opts[b'template'] = (
2101 2103 b'{if(commits, "", "no merge state found\n")}'
2102 2104 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2103 2105 b'{files % "file: {path} (state \\"{state}\\")\n'
2104 2106 b'{if(local_path, "'
2105 2107 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2106 2108 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2107 2109 b' other path: {other_path} (node {other_node})\n'
2108 2110 b'")}'
2109 2111 b'{if(rename_side, "'
2110 2112 b' rename side: {rename_side}\n'
2111 2113 b' renamed path: {renamed_path}\n'
2112 2114 b'")}'
2113 2115 b'{extras % " extra: {key} = {value}\n"}'
2114 2116 b'"}'
2115 2117 b'{extras % "extra: {file} ({key} = {value})\n"}'
2116 2118 )
2117 2119
2118 2120 ms = mergestatemod.mergestate.read(repo)
2119 2121
2120 2122 fm = ui.formatter(b'debugmergestate', opts)
2121 2123 fm.startitem()
2122 2124
2123 2125 fm_commits = fm.nested(b'commits')
2124 2126 if ms.active():
2125 2127 for name, node, label_index in (
2126 2128 (b'local', ms.local, 0),
2127 2129 (b'other', ms.other, 1),
2128 2130 ):
2129 2131 fm_commits.startitem()
2130 2132 fm_commits.data(name=name)
2131 2133 fm_commits.data(node=hex(node))
2132 2134 if ms._labels and len(ms._labels) > label_index:
2133 2135 fm_commits.data(label=ms._labels[label_index])
2134 2136 fm_commits.end()
2135 2137
2136 2138 fm_files = fm.nested(b'files')
2137 2139 if ms.active():
2138 2140 for f in ms:
2139 2141 fm_files.startitem()
2140 2142 fm_files.data(path=f)
2141 2143 state = ms._state[f]
2142 2144 fm_files.data(state=state[0])
2143 2145 if state[0] in (
2144 2146 mergestatemod.MERGE_RECORD_UNRESOLVED,
2145 2147 mergestatemod.MERGE_RECORD_RESOLVED,
2146 2148 ):
2147 2149 fm_files.data(local_key=state[1])
2148 2150 fm_files.data(local_path=state[2])
2149 2151 fm_files.data(ancestor_path=state[3])
2150 2152 fm_files.data(ancestor_node=state[4])
2151 2153 fm_files.data(other_path=state[5])
2152 2154 fm_files.data(other_node=state[6])
2153 2155 fm_files.data(local_flags=state[7])
2154 2156 elif state[0] in (
2155 2157 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2156 2158 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2157 2159 ):
2158 2160 fm_files.data(renamed_path=state[1])
2159 2161 fm_files.data(rename_side=state[2])
2160 2162 fm_extras = fm_files.nested(b'extras')
2161 2163 for k, v in sorted(ms.extras(f).items()):
2162 2164 fm_extras.startitem()
2163 2165 fm_extras.data(key=k)
2164 2166 fm_extras.data(value=v)
2165 2167 fm_extras.end()
2166 2168
2167 2169 fm_files.end()
2168 2170
2169 2171 fm_extras = fm.nested(b'extras')
2170 2172 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2171 2173 if f in ms:
2172 2174 # If file is in mergestate, we have already processed it's extras
2173 2175 continue
2174 2176 for k, v in pycompat.iteritems(d):
2175 2177 fm_extras.startitem()
2176 2178 fm_extras.data(file=f)
2177 2179 fm_extras.data(key=k)
2178 2180 fm_extras.data(value=v)
2179 2181 fm_extras.end()
2180 2182
2181 2183 fm.end()
2182 2184
2183 2185
2184 2186 @command(b'debugnamecomplete', [], _(b'NAME...'))
2185 2187 def debugnamecomplete(ui, repo, *args):
2186 2188 '''complete "names" - tags, open branch names, bookmark names'''
2187 2189
2188 2190 names = set()
2189 2191 # since we previously only listed open branches, we will handle that
2190 2192 # specially (after this for loop)
2191 2193 for name, ns in pycompat.iteritems(repo.names):
2192 2194 if name != b'branches':
2193 2195 names.update(ns.listnames(repo))
2194 2196 names.update(
2195 2197 tag
2196 2198 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2197 2199 if not closed
2198 2200 )
2199 2201 completions = set()
2200 2202 if not args:
2201 2203 args = [b'']
2202 2204 for a in args:
2203 2205 completions.update(n for n in names if n.startswith(a))
2204 2206 ui.write(b'\n'.join(sorted(completions)))
2205 2207 ui.write(b'\n')
2206 2208
2207 2209
2208 2210 @command(
2209 2211 b'debugnodemap',
2210 2212 [
2211 2213 (
2212 2214 b'',
2213 2215 b'dump-new',
2214 2216 False,
2215 2217 _(b'write a (new) persistent binary nodemap on stdin'),
2216 2218 ),
2217 2219 (b'', b'dump-disk', False, _(b'dump on-disk data on stdin')),
2218 2220 (
2219 2221 b'',
2220 2222 b'check',
2221 2223 False,
2222 2224 _(b'check that the data on disk data are correct.'),
2223 2225 ),
2224 2226 (
2225 2227 b'',
2226 2228 b'metadata',
2227 2229 False,
2228 2230 _(b'display the on disk meta data for the nodemap'),
2229 2231 ),
2230 2232 ],
2231 2233 )
2232 2234 def debugnodemap(ui, repo, **opts):
2233 2235 """write and inspect on disk nodemap"""
2234 2236 if opts['dump_new']:
2235 2237 unfi = repo.unfiltered()
2236 2238 cl = unfi.changelog
2237 2239 if util.safehasattr(cl.index, "nodemap_data_all"):
2238 2240 data = cl.index.nodemap_data_all()
2239 2241 else:
2240 2242 data = nodemap.persistent_data(cl.index)
2241 2243 ui.write(data)
2242 2244 elif opts['dump_disk']:
2243 2245 unfi = repo.unfiltered()
2244 2246 cl = unfi.changelog
2245 2247 nm_data = nodemap.persisted_data(cl)
2246 2248 if nm_data is not None:
2247 2249 docket, data = nm_data
2248 2250 ui.write(data[:])
2249 2251 elif opts['check']:
2250 2252 unfi = repo.unfiltered()
2251 2253 cl = unfi.changelog
2252 2254 nm_data = nodemap.persisted_data(cl)
2253 2255 if nm_data is not None:
2254 2256 docket, data = nm_data
2255 2257 return nodemap.check_data(ui, cl.index, data)
2256 2258 elif opts['metadata']:
2257 2259 unfi = repo.unfiltered()
2258 2260 cl = unfi.changelog
2259 2261 nm_data = nodemap.persisted_data(cl)
2260 2262 if nm_data is not None:
2261 2263 docket, data = nm_data
2262 2264 ui.write((b"uid: %s\n") % docket.uid)
2263 2265 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2264 2266 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2265 2267 ui.write((b"data-length: %d\n") % docket.data_length)
2266 2268 ui.write((b"data-unused: %d\n") % docket.data_unused)
2267 2269 unused_perc = docket.data_unused * 100.0 / docket.data_length
2268 2270 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2269 2271
2270 2272
2271 2273 @command(
2272 2274 b'debugobsolete',
2273 2275 [
2274 2276 (b'', b'flags', 0, _(b'markers flag')),
2275 2277 (
2276 2278 b'',
2277 2279 b'record-parents',
2278 2280 False,
2279 2281 _(b'record parent information for the precursor'),
2280 2282 ),
2281 2283 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2282 2284 (
2283 2285 b'',
2284 2286 b'exclusive',
2285 2287 False,
2286 2288 _(b'restrict display to markers only relevant to REV'),
2287 2289 ),
2288 2290 (b'', b'index', False, _(b'display index of the marker')),
2289 2291 (b'', b'delete', [], _(b'delete markers specified by indices')),
2290 2292 ]
2291 2293 + cmdutil.commitopts2
2292 2294 + cmdutil.formatteropts,
2293 2295 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2294 2296 )
2295 2297 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2296 2298 """create arbitrary obsolete marker
2297 2299
2298 2300 With no arguments, displays the list of obsolescence markers."""
2299 2301
2300 2302 opts = pycompat.byteskwargs(opts)
2301 2303
2302 2304 def parsenodeid(s):
2303 2305 try:
2304 2306 # We do not use revsingle/revrange functions here to accept
2305 2307 # arbitrary node identifiers, possibly not present in the
2306 2308 # local repository.
2307 2309 n = bin(s)
2308 2310 if len(n) != len(nullid):
2309 2311 raise TypeError()
2310 2312 return n
2311 2313 except TypeError:
2312 2314 raise error.InputError(
2313 2315 b'changeset references must be full hexadecimal '
2314 2316 b'node identifiers'
2315 2317 )
2316 2318
2317 2319 if opts.get(b'delete'):
2318 2320 indices = []
2319 2321 for v in opts.get(b'delete'):
2320 2322 try:
2321 2323 indices.append(int(v))
2322 2324 except ValueError:
2323 2325 raise error.InputError(
2324 2326 _(b'invalid index value: %r') % v,
2325 2327 hint=_(b'use integers for indices'),
2326 2328 )
2327 2329
2328 2330 if repo.currenttransaction():
2329 2331 raise error.Abort(
2330 2332 _(b'cannot delete obsmarkers in the middle of transaction.')
2331 2333 )
2332 2334
2333 2335 with repo.lock():
2334 2336 n = repair.deleteobsmarkers(repo.obsstore, indices)
2335 2337 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2336 2338
2337 2339 return
2338 2340
2339 2341 if precursor is not None:
2340 2342 if opts[b'rev']:
2341 2343 raise error.InputError(
2342 2344 b'cannot select revision when creating marker'
2343 2345 )
2344 2346 metadata = {}
2345 2347 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2346 2348 succs = tuple(parsenodeid(succ) for succ in successors)
2347 2349 l = repo.lock()
2348 2350 try:
2349 2351 tr = repo.transaction(b'debugobsolete')
2350 2352 try:
2351 2353 date = opts.get(b'date')
2352 2354 if date:
2353 2355 date = dateutil.parsedate(date)
2354 2356 else:
2355 2357 date = None
2356 2358 prec = parsenodeid(precursor)
2357 2359 parents = None
2358 2360 if opts[b'record_parents']:
2359 2361 if prec not in repo.unfiltered():
2360 2362 raise error.Abort(
2361 2363 b'cannot used --record-parents on '
2362 2364 b'unknown changesets'
2363 2365 )
2364 2366 parents = repo.unfiltered()[prec].parents()
2365 2367 parents = tuple(p.node() for p in parents)
2366 2368 repo.obsstore.create(
2367 2369 tr,
2368 2370 prec,
2369 2371 succs,
2370 2372 opts[b'flags'],
2371 2373 parents=parents,
2372 2374 date=date,
2373 2375 metadata=metadata,
2374 2376 ui=ui,
2375 2377 )
2376 2378 tr.close()
2377 2379 except ValueError as exc:
2378 2380 raise error.Abort(
2379 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2381 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2380 2382 )
2381 2383 finally:
2382 2384 tr.release()
2383 2385 finally:
2384 2386 l.release()
2385 2387 else:
2386 2388 if opts[b'rev']:
2387 2389 revs = scmutil.revrange(repo, opts[b'rev'])
2388 2390 nodes = [repo[r].node() for r in revs]
2389 2391 markers = list(
2390 2392 obsutil.getmarkers(
2391 2393 repo, nodes=nodes, exclusive=opts[b'exclusive']
2392 2394 )
2393 2395 )
2394 2396 markers.sort(key=lambda x: x._data)
2395 2397 else:
2396 2398 markers = obsutil.getmarkers(repo)
2397 2399
2398 2400 markerstoiter = markers
2399 2401 isrelevant = lambda m: True
2400 2402 if opts.get(b'rev') and opts.get(b'index'):
2401 2403 markerstoiter = obsutil.getmarkers(repo)
2402 2404 markerset = set(markers)
2403 2405 isrelevant = lambda m: m in markerset
2404 2406
2405 2407 fm = ui.formatter(b'debugobsolete', opts)
2406 2408 for i, m in enumerate(markerstoiter):
2407 2409 if not isrelevant(m):
2408 2410 # marker can be irrelevant when we're iterating over a set
2409 2411 # of markers (markerstoiter) which is bigger than the set
2410 2412 # of markers we want to display (markers)
2411 2413 # this can happen if both --index and --rev options are
2412 2414 # provided and thus we need to iterate over all of the markers
2413 2415 # to get the correct indices, but only display the ones that
2414 2416 # are relevant to --rev value
2415 2417 continue
2416 2418 fm.startitem()
2417 2419 ind = i if opts.get(b'index') else None
2418 2420 cmdutil.showmarker(fm, m, index=ind)
2419 2421 fm.end()
2420 2422
2421 2423
2422 2424 @command(
2423 2425 b'debugp1copies',
2424 2426 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2425 2427 _(b'[-r REV]'),
2426 2428 )
2427 2429 def debugp1copies(ui, repo, **opts):
2428 2430 """dump copy information compared to p1"""
2429 2431
2430 2432 opts = pycompat.byteskwargs(opts)
2431 2433 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2432 2434 for dst, src in ctx.p1copies().items():
2433 2435 ui.write(b'%s -> %s\n' % (src, dst))
2434 2436
2435 2437
2436 2438 @command(
2437 2439 b'debugp2copies',
2438 2440 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2439 2441 _(b'[-r REV]'),
2440 2442 )
2441 2443 def debugp1copies(ui, repo, **opts):
2442 2444 """dump copy information compared to p2"""
2443 2445
2444 2446 opts = pycompat.byteskwargs(opts)
2445 2447 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2446 2448 for dst, src in ctx.p2copies().items():
2447 2449 ui.write(b'%s -> %s\n' % (src, dst))
2448 2450
2449 2451
2450 2452 @command(
2451 2453 b'debugpathcomplete',
2452 2454 [
2453 2455 (b'f', b'full', None, _(b'complete an entire path')),
2454 2456 (b'n', b'normal', None, _(b'show only normal files')),
2455 2457 (b'a', b'added', None, _(b'show only added files')),
2456 2458 (b'r', b'removed', None, _(b'show only removed files')),
2457 2459 ],
2458 2460 _(b'FILESPEC...'),
2459 2461 )
2460 2462 def debugpathcomplete(ui, repo, *specs, **opts):
2461 2463 """complete part or all of a tracked path
2462 2464
2463 2465 This command supports shells that offer path name completion. It
2464 2466 currently completes only files already known to the dirstate.
2465 2467
2466 2468 Completion extends only to the next path segment unless
2467 2469 --full is specified, in which case entire paths are used."""
2468 2470
2469 2471 def complete(path, acceptable):
2470 2472 dirstate = repo.dirstate
2471 2473 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2472 2474 rootdir = repo.root + pycompat.ossep
2473 2475 if spec != repo.root and not spec.startswith(rootdir):
2474 2476 return [], []
2475 2477 if os.path.isdir(spec):
2476 2478 spec += b'/'
2477 2479 spec = spec[len(rootdir) :]
2478 2480 fixpaths = pycompat.ossep != b'/'
2479 2481 if fixpaths:
2480 2482 spec = spec.replace(pycompat.ossep, b'/')
2481 2483 speclen = len(spec)
2482 2484 fullpaths = opts['full']
2483 2485 files, dirs = set(), set()
2484 2486 adddir, addfile = dirs.add, files.add
2485 2487 for f, st in pycompat.iteritems(dirstate):
2486 2488 if f.startswith(spec) and st[0] in acceptable:
2487 2489 if fixpaths:
2488 2490 f = f.replace(b'/', pycompat.ossep)
2489 2491 if fullpaths:
2490 2492 addfile(f)
2491 2493 continue
2492 2494 s = f.find(pycompat.ossep, speclen)
2493 2495 if s >= 0:
2494 2496 adddir(f[:s])
2495 2497 else:
2496 2498 addfile(f)
2497 2499 return files, dirs
2498 2500
2499 2501 acceptable = b''
2500 2502 if opts['normal']:
2501 2503 acceptable += b'nm'
2502 2504 if opts['added']:
2503 2505 acceptable += b'a'
2504 2506 if opts['removed']:
2505 2507 acceptable += b'r'
2506 2508 cwd = repo.getcwd()
2507 2509 if not specs:
2508 2510 specs = [b'.']
2509 2511
2510 2512 files, dirs = set(), set()
2511 2513 for spec in specs:
2512 2514 f, d = complete(spec, acceptable or b'nmar')
2513 2515 files.update(f)
2514 2516 dirs.update(d)
2515 2517 files.update(dirs)
2516 2518 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2517 2519 ui.write(b'\n')
2518 2520
2519 2521
2520 2522 @command(
2521 2523 b'debugpathcopies',
2522 2524 cmdutil.walkopts,
2523 2525 b'hg debugpathcopies REV1 REV2 [FILE]',
2524 2526 inferrepo=True,
2525 2527 )
2526 2528 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2527 2529 """show copies between two revisions"""
2528 2530 ctx1 = scmutil.revsingle(repo, rev1)
2529 2531 ctx2 = scmutil.revsingle(repo, rev2)
2530 2532 m = scmutil.match(ctx1, pats, opts)
2531 2533 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2532 2534 ui.write(b'%s -> %s\n' % (src, dst))
2533 2535
2534 2536
2535 2537 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2536 2538 def debugpeer(ui, path):
2537 2539 """establish a connection to a peer repository"""
2538 2540 # Always enable peer request logging. Requires --debug to display
2539 2541 # though.
2540 2542 overrides = {
2541 2543 (b'devel', b'debug.peer-request'): True,
2542 2544 }
2543 2545
2544 2546 with ui.configoverride(overrides):
2545 2547 peer = hg.peer(ui, {}, path)
2546 2548
2547 2549 local = peer.local() is not None
2548 2550 canpush = peer.canpush()
2549 2551
2550 2552 ui.write(_(b'url: %s\n') % peer.url())
2551 2553 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2552 2554 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2553 2555
2554 2556
2555 2557 @command(
2556 2558 b'debugpickmergetool',
2557 2559 [
2558 2560 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2559 2561 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2560 2562 ]
2561 2563 + cmdutil.walkopts
2562 2564 + cmdutil.mergetoolopts,
2563 2565 _(b'[PATTERN]...'),
2564 2566 inferrepo=True,
2565 2567 )
2566 2568 def debugpickmergetool(ui, repo, *pats, **opts):
2567 2569 """examine which merge tool is chosen for specified file
2568 2570
2569 2571 As described in :hg:`help merge-tools`, Mercurial examines
2570 2572 configurations below in this order to decide which merge tool is
2571 2573 chosen for specified file.
2572 2574
2573 2575 1. ``--tool`` option
2574 2576 2. ``HGMERGE`` environment variable
2575 2577 3. configurations in ``merge-patterns`` section
2576 2578 4. configuration of ``ui.merge``
2577 2579 5. configurations in ``merge-tools`` section
2578 2580 6. ``hgmerge`` tool (for historical reason only)
2579 2581 7. default tool for fallback (``:merge`` or ``:prompt``)
2580 2582
2581 2583 This command writes out examination result in the style below::
2582 2584
2583 2585 FILE = MERGETOOL
2584 2586
2585 2587 By default, all files known in the first parent context of the
2586 2588 working directory are examined. Use file patterns and/or -I/-X
2587 2589 options to limit target files. -r/--rev is also useful to examine
2588 2590 files in another context without actual updating to it.
2589 2591
2590 2592 With --debug, this command shows warning messages while matching
2591 2593 against ``merge-patterns`` and so on, too. It is recommended to
2592 2594 use this option with explicit file patterns and/or -I/-X options,
2593 2595 because this option increases amount of output per file according
2594 2596 to configurations in hgrc.
2595 2597
2596 2598 With -v/--verbose, this command shows configurations below at
2597 2599 first (only if specified).
2598 2600
2599 2601 - ``--tool`` option
2600 2602 - ``HGMERGE`` environment variable
2601 2603 - configuration of ``ui.merge``
2602 2604
2603 2605 If merge tool is chosen before matching against
2604 2606 ``merge-patterns``, this command can't show any helpful
2605 2607 information, even with --debug. In such case, information above is
2606 2608 useful to know why a merge tool is chosen.
2607 2609 """
2608 2610 opts = pycompat.byteskwargs(opts)
2609 2611 overrides = {}
2610 2612 if opts[b'tool']:
2611 2613 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2612 2614 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2613 2615
2614 2616 with ui.configoverride(overrides, b'debugmergepatterns'):
2615 2617 hgmerge = encoding.environ.get(b"HGMERGE")
2616 2618 if hgmerge is not None:
2617 2619 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2618 2620 uimerge = ui.config(b"ui", b"merge")
2619 2621 if uimerge:
2620 2622 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2621 2623
2622 2624 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2623 2625 m = scmutil.match(ctx, pats, opts)
2624 2626 changedelete = opts[b'changedelete']
2625 2627 for path in ctx.walk(m):
2626 2628 fctx = ctx[path]
2627 2629 try:
2628 2630 if not ui.debugflag:
2629 2631 ui.pushbuffer(error=True)
2630 2632 tool, toolpath = filemerge._picktool(
2631 2633 repo,
2632 2634 ui,
2633 2635 path,
2634 2636 fctx.isbinary(),
2635 2637 b'l' in fctx.flags(),
2636 2638 changedelete,
2637 2639 )
2638 2640 finally:
2639 2641 if not ui.debugflag:
2640 2642 ui.popbuffer()
2641 2643 ui.write(b'%s = %s\n' % (path, tool))
2642 2644
2643 2645
2644 2646 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2645 2647 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2646 2648 """access the pushkey key/value protocol
2647 2649
2648 2650 With two args, list the keys in the given namespace.
2649 2651
2650 2652 With five args, set a key to new if it currently is set to old.
2651 2653 Reports success or failure.
2652 2654 """
2653 2655
2654 2656 target = hg.peer(ui, {}, repopath)
2655 2657 if keyinfo:
2656 2658 key, old, new = keyinfo
2657 2659 with target.commandexecutor() as e:
2658 2660 r = e.callcommand(
2659 2661 b'pushkey',
2660 2662 {
2661 2663 b'namespace': namespace,
2662 2664 b'key': key,
2663 2665 b'old': old,
2664 2666 b'new': new,
2665 2667 },
2666 2668 ).result()
2667 2669
2668 2670 ui.status(pycompat.bytestr(r) + b'\n')
2669 2671 return not r
2670 2672 else:
2671 2673 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2672 2674 ui.write(
2673 2675 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2674 2676 )
2675 2677
2676 2678
2677 2679 @command(b'debugpvec', [], _(b'A B'))
2678 2680 def debugpvec(ui, repo, a, b=None):
2679 2681 ca = scmutil.revsingle(repo, a)
2680 2682 cb = scmutil.revsingle(repo, b)
2681 2683 pa = pvec.ctxpvec(ca)
2682 2684 pb = pvec.ctxpvec(cb)
2683 2685 if pa == pb:
2684 2686 rel = b"="
2685 2687 elif pa > pb:
2686 2688 rel = b">"
2687 2689 elif pa < pb:
2688 2690 rel = b"<"
2689 2691 elif pa | pb:
2690 2692 rel = b"|"
2691 2693 ui.write(_(b"a: %s\n") % pa)
2692 2694 ui.write(_(b"b: %s\n") % pb)
2693 2695 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2694 2696 ui.write(
2695 2697 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2696 2698 % (
2697 2699 abs(pa._depth - pb._depth),
2698 2700 pvec._hamming(pa._vec, pb._vec),
2699 2701 pa.distance(pb),
2700 2702 rel,
2701 2703 )
2702 2704 )
2703 2705
2704 2706
2705 2707 @command(
2706 2708 b'debugrebuilddirstate|debugrebuildstate',
2707 2709 [
2708 2710 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2709 2711 (
2710 2712 b'',
2711 2713 b'minimal',
2712 2714 None,
2713 2715 _(
2714 2716 b'only rebuild files that are inconsistent with '
2715 2717 b'the working copy parent'
2716 2718 ),
2717 2719 ),
2718 2720 ],
2719 2721 _(b'[-r REV]'),
2720 2722 )
2721 2723 def debugrebuilddirstate(ui, repo, rev, **opts):
2722 2724 """rebuild the dirstate as it would look like for the given revision
2723 2725
2724 2726 If no revision is specified the first current parent will be used.
2725 2727
2726 2728 The dirstate will be set to the files of the given revision.
2727 2729 The actual working directory content or existing dirstate
2728 2730 information such as adds or removes is not considered.
2729 2731
2730 2732 ``minimal`` will only rebuild the dirstate status for files that claim to be
2731 2733 tracked but are not in the parent manifest, or that exist in the parent
2732 2734 manifest but are not in the dirstate. It will not change adds, removes, or
2733 2735 modified files that are in the working copy parent.
2734 2736
2735 2737 One use of this command is to make the next :hg:`status` invocation
2736 2738 check the actual file content.
2737 2739 """
2738 2740 ctx = scmutil.revsingle(repo, rev)
2739 2741 with repo.wlock():
2740 2742 dirstate = repo.dirstate
2741 2743 changedfiles = None
2742 2744 # See command doc for what minimal does.
2743 2745 if opts.get('minimal'):
2744 2746 manifestfiles = set(ctx.manifest().keys())
2745 2747 dirstatefiles = set(dirstate)
2746 2748 manifestonly = manifestfiles - dirstatefiles
2747 2749 dsonly = dirstatefiles - manifestfiles
2748 2750 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2749 2751 changedfiles = manifestonly | dsnotadded
2750 2752
2751 2753 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2752 2754
2753 2755
2754 2756 @command(b'debugrebuildfncache', [], b'')
2755 2757 def debugrebuildfncache(ui, repo):
2756 2758 """rebuild the fncache file"""
2757 2759 repair.rebuildfncache(ui, repo)
2758 2760
2759 2761
2760 2762 @command(
2761 2763 b'debugrename',
2762 2764 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2763 2765 _(b'[-r REV] [FILE]...'),
2764 2766 )
2765 2767 def debugrename(ui, repo, *pats, **opts):
2766 2768 """dump rename information"""
2767 2769
2768 2770 opts = pycompat.byteskwargs(opts)
2769 2771 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2770 2772 m = scmutil.match(ctx, pats, opts)
2771 2773 for abs in ctx.walk(m):
2772 2774 fctx = ctx[abs]
2773 2775 o = fctx.filelog().renamed(fctx.filenode())
2774 2776 rel = repo.pathto(abs)
2775 2777 if o:
2776 2778 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2777 2779 else:
2778 2780 ui.write(_(b"%s not renamed\n") % rel)
2779 2781
2780 2782
2781 2783 @command(b'debugrequires|debugrequirements', [], b'')
2782 2784 def debugrequirements(ui, repo):
2783 2785 """ print the current repo requirements """
2784 2786 for r in sorted(repo.requirements):
2785 2787 ui.write(b"%s\n" % r)
2786 2788
2787 2789
2788 2790 @command(
2789 2791 b'debugrevlog',
2790 2792 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2791 2793 _(b'-c|-m|FILE'),
2792 2794 optionalrepo=True,
2793 2795 )
2794 2796 def debugrevlog(ui, repo, file_=None, **opts):
2795 2797 """show data and statistics about a revlog"""
2796 2798 opts = pycompat.byteskwargs(opts)
2797 2799 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2798 2800
2799 2801 if opts.get(b"dump"):
2800 2802 numrevs = len(r)
2801 2803 ui.write(
2802 2804 (
2803 2805 b"# rev p1rev p2rev start end deltastart base p1 p2"
2804 2806 b" rawsize totalsize compression heads chainlen\n"
2805 2807 )
2806 2808 )
2807 2809 ts = 0
2808 2810 heads = set()
2809 2811
2810 2812 for rev in pycompat.xrange(numrevs):
2811 2813 dbase = r.deltaparent(rev)
2812 2814 if dbase == -1:
2813 2815 dbase = rev
2814 2816 cbase = r.chainbase(rev)
2815 2817 clen = r.chainlen(rev)
2816 2818 p1, p2 = r.parentrevs(rev)
2817 2819 rs = r.rawsize(rev)
2818 2820 ts = ts + rs
2819 2821 heads -= set(r.parentrevs(rev))
2820 2822 heads.add(rev)
2821 2823 try:
2822 2824 compression = ts / r.end(rev)
2823 2825 except ZeroDivisionError:
2824 2826 compression = 0
2825 2827 ui.write(
2826 2828 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2827 2829 b"%11d %5d %8d\n"
2828 2830 % (
2829 2831 rev,
2830 2832 p1,
2831 2833 p2,
2832 2834 r.start(rev),
2833 2835 r.end(rev),
2834 2836 r.start(dbase),
2835 2837 r.start(cbase),
2836 2838 r.start(p1),
2837 2839 r.start(p2),
2838 2840 rs,
2839 2841 ts,
2840 2842 compression,
2841 2843 len(heads),
2842 2844 clen,
2843 2845 )
2844 2846 )
2845 2847 return 0
2846 2848
2847 2849 v = r.version
2848 2850 format = v & 0xFFFF
2849 2851 flags = []
2850 2852 gdelta = False
2851 2853 if v & revlog.FLAG_INLINE_DATA:
2852 2854 flags.append(b'inline')
2853 2855 if v & revlog.FLAG_GENERALDELTA:
2854 2856 gdelta = True
2855 2857 flags.append(b'generaldelta')
2856 2858 if not flags:
2857 2859 flags = [b'(none)']
2858 2860
2859 2861 ### tracks merge vs single parent
2860 2862 nummerges = 0
2861 2863
2862 2864 ### tracks ways the "delta" are build
2863 2865 # nodelta
2864 2866 numempty = 0
2865 2867 numemptytext = 0
2866 2868 numemptydelta = 0
2867 2869 # full file content
2868 2870 numfull = 0
2869 2871 # intermediate snapshot against a prior snapshot
2870 2872 numsemi = 0
2871 2873 # snapshot count per depth
2872 2874 numsnapdepth = collections.defaultdict(lambda: 0)
2873 2875 # delta against previous revision
2874 2876 numprev = 0
2875 2877 # delta against first or second parent (not prev)
2876 2878 nump1 = 0
2877 2879 nump2 = 0
2878 2880 # delta against neither prev nor parents
2879 2881 numother = 0
2880 2882 # delta against prev that are also first or second parent
2881 2883 # (details of `numprev`)
2882 2884 nump1prev = 0
2883 2885 nump2prev = 0
2884 2886
2885 2887 # data about delta chain of each revs
2886 2888 chainlengths = []
2887 2889 chainbases = []
2888 2890 chainspans = []
2889 2891
2890 2892 # data about each revision
2891 2893 datasize = [None, 0, 0]
2892 2894 fullsize = [None, 0, 0]
2893 2895 semisize = [None, 0, 0]
2894 2896 # snapshot count per depth
2895 2897 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2896 2898 deltasize = [None, 0, 0]
2897 2899 chunktypecounts = {}
2898 2900 chunktypesizes = {}
2899 2901
2900 2902 def addsize(size, l):
2901 2903 if l[0] is None or size < l[0]:
2902 2904 l[0] = size
2903 2905 if size > l[1]:
2904 2906 l[1] = size
2905 2907 l[2] += size
2906 2908
2907 2909 numrevs = len(r)
2908 2910 for rev in pycompat.xrange(numrevs):
2909 2911 p1, p2 = r.parentrevs(rev)
2910 2912 delta = r.deltaparent(rev)
2911 2913 if format > 0:
2912 2914 addsize(r.rawsize(rev), datasize)
2913 2915 if p2 != nullrev:
2914 2916 nummerges += 1
2915 2917 size = r.length(rev)
2916 2918 if delta == nullrev:
2917 2919 chainlengths.append(0)
2918 2920 chainbases.append(r.start(rev))
2919 2921 chainspans.append(size)
2920 2922 if size == 0:
2921 2923 numempty += 1
2922 2924 numemptytext += 1
2923 2925 else:
2924 2926 numfull += 1
2925 2927 numsnapdepth[0] += 1
2926 2928 addsize(size, fullsize)
2927 2929 addsize(size, snapsizedepth[0])
2928 2930 else:
2929 2931 chainlengths.append(chainlengths[delta] + 1)
2930 2932 baseaddr = chainbases[delta]
2931 2933 revaddr = r.start(rev)
2932 2934 chainbases.append(baseaddr)
2933 2935 chainspans.append((revaddr - baseaddr) + size)
2934 2936 if size == 0:
2935 2937 numempty += 1
2936 2938 numemptydelta += 1
2937 2939 elif r.issnapshot(rev):
2938 2940 addsize(size, semisize)
2939 2941 numsemi += 1
2940 2942 depth = r.snapshotdepth(rev)
2941 2943 numsnapdepth[depth] += 1
2942 2944 addsize(size, snapsizedepth[depth])
2943 2945 else:
2944 2946 addsize(size, deltasize)
2945 2947 if delta == rev - 1:
2946 2948 numprev += 1
2947 2949 if delta == p1:
2948 2950 nump1prev += 1
2949 2951 elif delta == p2:
2950 2952 nump2prev += 1
2951 2953 elif delta == p1:
2952 2954 nump1 += 1
2953 2955 elif delta == p2:
2954 2956 nump2 += 1
2955 2957 elif delta != nullrev:
2956 2958 numother += 1
2957 2959
2958 2960 # Obtain data on the raw chunks in the revlog.
2959 2961 if util.safehasattr(r, b'_getsegmentforrevs'):
2960 2962 segment = r._getsegmentforrevs(rev, rev)[1]
2961 2963 else:
2962 2964 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2963 2965 if segment:
2964 2966 chunktype = bytes(segment[0:1])
2965 2967 else:
2966 2968 chunktype = b'empty'
2967 2969
2968 2970 if chunktype not in chunktypecounts:
2969 2971 chunktypecounts[chunktype] = 0
2970 2972 chunktypesizes[chunktype] = 0
2971 2973
2972 2974 chunktypecounts[chunktype] += 1
2973 2975 chunktypesizes[chunktype] += size
2974 2976
2975 2977 # Adjust size min value for empty cases
2976 2978 for size in (datasize, fullsize, semisize, deltasize):
2977 2979 if size[0] is None:
2978 2980 size[0] = 0
2979 2981
2980 2982 numdeltas = numrevs - numfull - numempty - numsemi
2981 2983 numoprev = numprev - nump1prev - nump2prev
2982 2984 totalrawsize = datasize[2]
2983 2985 datasize[2] /= numrevs
2984 2986 fulltotal = fullsize[2]
2985 2987 if numfull == 0:
2986 2988 fullsize[2] = 0
2987 2989 else:
2988 2990 fullsize[2] /= numfull
2989 2991 semitotal = semisize[2]
2990 2992 snaptotal = {}
2991 2993 if numsemi > 0:
2992 2994 semisize[2] /= numsemi
2993 2995 for depth in snapsizedepth:
2994 2996 snaptotal[depth] = snapsizedepth[depth][2]
2995 2997 snapsizedepth[depth][2] /= numsnapdepth[depth]
2996 2998
2997 2999 deltatotal = deltasize[2]
2998 3000 if numdeltas > 0:
2999 3001 deltasize[2] /= numdeltas
3000 3002 totalsize = fulltotal + semitotal + deltatotal
3001 3003 avgchainlen = sum(chainlengths) / numrevs
3002 3004 maxchainlen = max(chainlengths)
3003 3005 maxchainspan = max(chainspans)
3004 3006 compratio = 1
3005 3007 if totalsize:
3006 3008 compratio = totalrawsize / totalsize
3007 3009
3008 3010 basedfmtstr = b'%%%dd\n'
3009 3011 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3010 3012
3011 3013 def dfmtstr(max):
3012 3014 return basedfmtstr % len(str(max))
3013 3015
3014 3016 def pcfmtstr(max, padding=0):
3015 3017 return basepcfmtstr % (len(str(max)), b' ' * padding)
3016 3018
3017 3019 def pcfmt(value, total):
3018 3020 if total:
3019 3021 return (value, 100 * float(value) / total)
3020 3022 else:
3021 3023 return value, 100.0
3022 3024
3023 3025 ui.writenoi18n(b'format : %d\n' % format)
3024 3026 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3025 3027
3026 3028 ui.write(b'\n')
3027 3029 fmt = pcfmtstr(totalsize)
3028 3030 fmt2 = dfmtstr(totalsize)
3029 3031 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3030 3032 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3031 3033 ui.writenoi18n(
3032 3034 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3033 3035 )
3034 3036 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3035 3037 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3036 3038 ui.writenoi18n(
3037 3039 b' text : '
3038 3040 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3039 3041 )
3040 3042 ui.writenoi18n(
3041 3043 b' delta : '
3042 3044 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3043 3045 )
3044 3046 ui.writenoi18n(
3045 3047 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3046 3048 )
3047 3049 for depth in sorted(numsnapdepth):
3048 3050 ui.write(
3049 3051 (b' lvl-%-3d : ' % depth)
3050 3052 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3051 3053 )
3052 3054 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3053 3055 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3054 3056 ui.writenoi18n(
3055 3057 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3056 3058 )
3057 3059 for depth in sorted(numsnapdepth):
3058 3060 ui.write(
3059 3061 (b' lvl-%-3d : ' % depth)
3060 3062 + fmt % pcfmt(snaptotal[depth], totalsize)
3061 3063 )
3062 3064 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3063 3065
3064 3066 def fmtchunktype(chunktype):
3065 3067 if chunktype == b'empty':
3066 3068 return b' %s : ' % chunktype
3067 3069 elif chunktype in pycompat.bytestr(string.ascii_letters):
3068 3070 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3069 3071 else:
3070 3072 return b' 0x%s : ' % hex(chunktype)
3071 3073
3072 3074 ui.write(b'\n')
3073 3075 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3074 3076 for chunktype in sorted(chunktypecounts):
3075 3077 ui.write(fmtchunktype(chunktype))
3076 3078 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3077 3079 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3078 3080 for chunktype in sorted(chunktypecounts):
3079 3081 ui.write(fmtchunktype(chunktype))
3080 3082 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3081 3083
3082 3084 ui.write(b'\n')
3083 3085 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3084 3086 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3085 3087 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3086 3088 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3087 3089 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3088 3090
3089 3091 if format > 0:
3090 3092 ui.write(b'\n')
3091 3093 ui.writenoi18n(
3092 3094 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3093 3095 % tuple(datasize)
3094 3096 )
3095 3097 ui.writenoi18n(
3096 3098 b'full revision size (min/max/avg) : %d / %d / %d\n'
3097 3099 % tuple(fullsize)
3098 3100 )
3099 3101 ui.writenoi18n(
3100 3102 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3101 3103 % tuple(semisize)
3102 3104 )
3103 3105 for depth in sorted(snapsizedepth):
3104 3106 if depth == 0:
3105 3107 continue
3106 3108 ui.writenoi18n(
3107 3109 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3108 3110 % ((depth,) + tuple(snapsizedepth[depth]))
3109 3111 )
3110 3112 ui.writenoi18n(
3111 3113 b'delta size (min/max/avg) : %d / %d / %d\n'
3112 3114 % tuple(deltasize)
3113 3115 )
3114 3116
3115 3117 if numdeltas > 0:
3116 3118 ui.write(b'\n')
3117 3119 fmt = pcfmtstr(numdeltas)
3118 3120 fmt2 = pcfmtstr(numdeltas, 4)
3119 3121 ui.writenoi18n(
3120 3122 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3121 3123 )
3122 3124 if numprev > 0:
3123 3125 ui.writenoi18n(
3124 3126 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3125 3127 )
3126 3128 ui.writenoi18n(
3127 3129 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3128 3130 )
3129 3131 ui.writenoi18n(
3130 3132 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3131 3133 )
3132 3134 if gdelta:
3133 3135 ui.writenoi18n(
3134 3136 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3135 3137 )
3136 3138 ui.writenoi18n(
3137 3139 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3138 3140 )
3139 3141 ui.writenoi18n(
3140 3142 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3141 3143 )
3142 3144
3143 3145
3144 3146 @command(
3145 3147 b'debugrevlogindex',
3146 3148 cmdutil.debugrevlogopts
3147 3149 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3148 3150 _(b'[-f FORMAT] -c|-m|FILE'),
3149 3151 optionalrepo=True,
3150 3152 )
3151 3153 def debugrevlogindex(ui, repo, file_=None, **opts):
3152 3154 """dump the contents of a revlog index"""
3153 3155 opts = pycompat.byteskwargs(opts)
3154 3156 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3155 3157 format = opts.get(b'format', 0)
3156 3158 if format not in (0, 1):
3157 3159 raise error.Abort(_(b"unknown format %d") % format)
3158 3160
3159 3161 if ui.debugflag:
3160 3162 shortfn = hex
3161 3163 else:
3162 3164 shortfn = short
3163 3165
3164 3166 # There might not be anything in r, so have a sane default
3165 3167 idlen = 12
3166 3168 for i in r:
3167 3169 idlen = len(shortfn(r.node(i)))
3168 3170 break
3169 3171
3170 3172 if format == 0:
3171 3173 if ui.verbose:
3172 3174 ui.writenoi18n(
3173 3175 b" rev offset length linkrev %s %s p2\n"
3174 3176 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3175 3177 )
3176 3178 else:
3177 3179 ui.writenoi18n(
3178 3180 b" rev linkrev %s %s p2\n"
3179 3181 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3180 3182 )
3181 3183 elif format == 1:
3182 3184 if ui.verbose:
3183 3185 ui.writenoi18n(
3184 3186 (
3185 3187 b" rev flag offset length size link p1"
3186 3188 b" p2 %s\n"
3187 3189 )
3188 3190 % b"nodeid".rjust(idlen)
3189 3191 )
3190 3192 else:
3191 3193 ui.writenoi18n(
3192 3194 b" rev flag size link p1 p2 %s\n"
3193 3195 % b"nodeid".rjust(idlen)
3194 3196 )
3195 3197
3196 3198 for i in r:
3197 3199 node = r.node(i)
3198 3200 if format == 0:
3199 3201 try:
3200 3202 pp = r.parents(node)
3201 3203 except Exception:
3202 3204 pp = [nullid, nullid]
3203 3205 if ui.verbose:
3204 3206 ui.write(
3205 3207 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3206 3208 % (
3207 3209 i,
3208 3210 r.start(i),
3209 3211 r.length(i),
3210 3212 r.linkrev(i),
3211 3213 shortfn(node),
3212 3214 shortfn(pp[0]),
3213 3215 shortfn(pp[1]),
3214 3216 )
3215 3217 )
3216 3218 else:
3217 3219 ui.write(
3218 3220 b"% 6d % 7d %s %s %s\n"
3219 3221 % (
3220 3222 i,
3221 3223 r.linkrev(i),
3222 3224 shortfn(node),
3223 3225 shortfn(pp[0]),
3224 3226 shortfn(pp[1]),
3225 3227 )
3226 3228 )
3227 3229 elif format == 1:
3228 3230 pr = r.parentrevs(i)
3229 3231 if ui.verbose:
3230 3232 ui.write(
3231 3233 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3232 3234 % (
3233 3235 i,
3234 3236 r.flags(i),
3235 3237 r.start(i),
3236 3238 r.length(i),
3237 3239 r.rawsize(i),
3238 3240 r.linkrev(i),
3239 3241 pr[0],
3240 3242 pr[1],
3241 3243 shortfn(node),
3242 3244 )
3243 3245 )
3244 3246 else:
3245 3247 ui.write(
3246 3248 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3247 3249 % (
3248 3250 i,
3249 3251 r.flags(i),
3250 3252 r.rawsize(i),
3251 3253 r.linkrev(i),
3252 3254 pr[0],
3253 3255 pr[1],
3254 3256 shortfn(node),
3255 3257 )
3256 3258 )
3257 3259
3258 3260
3259 3261 @command(
3260 3262 b'debugrevspec',
3261 3263 [
3262 3264 (
3263 3265 b'',
3264 3266 b'optimize',
3265 3267 None,
3266 3268 _(b'print parsed tree after optimizing (DEPRECATED)'),
3267 3269 ),
3268 3270 (
3269 3271 b'',
3270 3272 b'show-revs',
3271 3273 True,
3272 3274 _(b'print list of result revisions (default)'),
3273 3275 ),
3274 3276 (
3275 3277 b's',
3276 3278 b'show-set',
3277 3279 None,
3278 3280 _(b'print internal representation of result set'),
3279 3281 ),
3280 3282 (
3281 3283 b'p',
3282 3284 b'show-stage',
3283 3285 [],
3284 3286 _(b'print parsed tree at the given stage'),
3285 3287 _(b'NAME'),
3286 3288 ),
3287 3289 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3288 3290 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3289 3291 ],
3290 3292 b'REVSPEC',
3291 3293 )
3292 3294 def debugrevspec(ui, repo, expr, **opts):
3293 3295 """parse and apply a revision specification
3294 3296
3295 3297 Use -p/--show-stage option to print the parsed tree at the given stages.
3296 3298 Use -p all to print tree at every stage.
3297 3299
3298 3300 Use --no-show-revs option with -s or -p to print only the set
3299 3301 representation or the parsed tree respectively.
3300 3302
3301 3303 Use --verify-optimized to compare the optimized result with the unoptimized
3302 3304 one. Returns 1 if the optimized result differs.
3303 3305 """
3304 3306 opts = pycompat.byteskwargs(opts)
3305 3307 aliases = ui.configitems(b'revsetalias')
3306 3308 stages = [
3307 3309 (b'parsed', lambda tree: tree),
3308 3310 (
3309 3311 b'expanded',
3310 3312 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3311 3313 ),
3312 3314 (b'concatenated', revsetlang.foldconcat),
3313 3315 (b'analyzed', revsetlang.analyze),
3314 3316 (b'optimized', revsetlang.optimize),
3315 3317 ]
3316 3318 if opts[b'no_optimized']:
3317 3319 stages = stages[:-1]
3318 3320 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3319 3321 raise error.Abort(
3320 3322 _(b'cannot use --verify-optimized with --no-optimized')
3321 3323 )
3322 3324 stagenames = {n for n, f in stages}
3323 3325
3324 3326 showalways = set()
3325 3327 showchanged = set()
3326 3328 if ui.verbose and not opts[b'show_stage']:
3327 3329 # show parsed tree by --verbose (deprecated)
3328 3330 showalways.add(b'parsed')
3329 3331 showchanged.update([b'expanded', b'concatenated'])
3330 3332 if opts[b'optimize']:
3331 3333 showalways.add(b'optimized')
3332 3334 if opts[b'show_stage'] and opts[b'optimize']:
3333 3335 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3334 3336 if opts[b'show_stage'] == [b'all']:
3335 3337 showalways.update(stagenames)
3336 3338 else:
3337 3339 for n in opts[b'show_stage']:
3338 3340 if n not in stagenames:
3339 3341 raise error.Abort(_(b'invalid stage name: %s') % n)
3340 3342 showalways.update(opts[b'show_stage'])
3341 3343
3342 3344 treebystage = {}
3343 3345 printedtree = None
3344 3346 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3345 3347 for n, f in stages:
3346 3348 treebystage[n] = tree = f(tree)
3347 3349 if n in showalways or (n in showchanged and tree != printedtree):
3348 3350 if opts[b'show_stage'] or n != b'parsed':
3349 3351 ui.write(b"* %s:\n" % n)
3350 3352 ui.write(revsetlang.prettyformat(tree), b"\n")
3351 3353 printedtree = tree
3352 3354
3353 3355 if opts[b'verify_optimized']:
3354 3356 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3355 3357 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3356 3358 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3357 3359 ui.writenoi18n(
3358 3360 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3359 3361 )
3360 3362 ui.writenoi18n(
3361 3363 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3362 3364 )
3363 3365 arevs = list(arevs)
3364 3366 brevs = list(brevs)
3365 3367 if arevs == brevs:
3366 3368 return 0
3367 3369 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3368 3370 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3369 3371 sm = difflib.SequenceMatcher(None, arevs, brevs)
3370 3372 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3371 3373 if tag in ('delete', 'replace'):
3372 3374 for c in arevs[alo:ahi]:
3373 3375 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3374 3376 if tag in ('insert', 'replace'):
3375 3377 for c in brevs[blo:bhi]:
3376 3378 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3377 3379 if tag == 'equal':
3378 3380 for c in arevs[alo:ahi]:
3379 3381 ui.write(b' %d\n' % c)
3380 3382 return 1
3381 3383
3382 3384 func = revset.makematcher(tree)
3383 3385 revs = func(repo)
3384 3386 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3385 3387 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3386 3388 if not opts[b'show_revs']:
3387 3389 return
3388 3390 for c in revs:
3389 3391 ui.write(b"%d\n" % c)
3390 3392
3391 3393
3392 3394 @command(
3393 3395 b'debugserve',
3394 3396 [
3395 3397 (
3396 3398 b'',
3397 3399 b'sshstdio',
3398 3400 False,
3399 3401 _(b'run an SSH server bound to process handles'),
3400 3402 ),
3401 3403 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3402 3404 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3403 3405 ],
3404 3406 b'',
3405 3407 )
3406 3408 def debugserve(ui, repo, **opts):
3407 3409 """run a server with advanced settings
3408 3410
3409 3411 This command is similar to :hg:`serve`. It exists partially as a
3410 3412 workaround to the fact that ``hg serve --stdio`` must have specific
3411 3413 arguments for security reasons.
3412 3414 """
3413 3415 opts = pycompat.byteskwargs(opts)
3414 3416
3415 3417 if not opts[b'sshstdio']:
3416 3418 raise error.Abort(_(b'only --sshstdio is currently supported'))
3417 3419
3418 3420 logfh = None
3419 3421
3420 3422 if opts[b'logiofd'] and opts[b'logiofile']:
3421 3423 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3422 3424
3423 3425 if opts[b'logiofd']:
3424 3426 # Ideally we would be line buffered. But line buffering in binary
3425 3427 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3426 3428 # buffering could have performance impacts. But since this isn't
3427 3429 # performance critical code, it should be fine.
3428 3430 try:
3429 3431 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3430 3432 except OSError as e:
3431 3433 if e.errno != errno.ESPIPE:
3432 3434 raise
3433 3435 # can't seek a pipe, so `ab` mode fails on py3
3434 3436 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3435 3437 elif opts[b'logiofile']:
3436 3438 logfh = open(opts[b'logiofile'], b'ab', 0)
3437 3439
3438 3440 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3439 3441 s.serve_forever()
3440 3442
3441 3443
3442 3444 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3443 3445 def debugsetparents(ui, repo, rev1, rev2=None):
3444 3446 """manually set the parents of the current working directory (DANGEROUS)
3445 3447
3446 3448 This command is not what you are looking for and should not be used. Using
3447 3449 this command will most certainly results in slight corruption of the file
3448 3450 level histories withing your repository. DO NOT USE THIS COMMAND.
3449 3451
3450 3452 The command update the p1 and p2 field in the dirstate, and not touching
3451 3453 anything else. This useful for writing repository conversion tools, but
3452 3454 should be used with extreme care. For example, neither the working
3453 3455 directory nor the dirstate is updated, so file status may be incorrect
3454 3456 after running this command. Only used if you are one of the few people that
3455 3457 deeply unstand both conversion tools and file level histories. If you are
3456 3458 reading this help, you are not one of this people (most of them sailed west
3457 3459 from Mithlond anyway.
3458 3460
3459 3461 So one last time DO NOT USE THIS COMMAND.
3460 3462
3461 3463 Returns 0 on success.
3462 3464 """
3463 3465
3464 3466 node1 = scmutil.revsingle(repo, rev1).node()
3465 3467 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3466 3468
3467 3469 with repo.wlock():
3468 3470 repo.setparents(node1, node2)
3469 3471
3470 3472
3471 3473 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3472 3474 def debugsidedata(ui, repo, file_, rev=None, **opts):
3473 3475 """dump the side data for a cl/manifest/file revision
3474 3476
3475 3477 Use --verbose to dump the sidedata content."""
3476 3478 opts = pycompat.byteskwargs(opts)
3477 3479 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3478 3480 if rev is not None:
3479 3481 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3480 3482 file_, rev = None, file_
3481 3483 elif rev is None:
3482 3484 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3483 3485 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3484 3486 r = getattr(r, '_revlog', r)
3485 3487 try:
3486 3488 sidedata = r.sidedata(r.lookup(rev))
3487 3489 except KeyError:
3488 3490 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3489 3491 if sidedata:
3490 3492 sidedata = list(sidedata.items())
3491 3493 sidedata.sort()
3492 3494 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3493 3495 for key, value in sidedata:
3494 3496 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3495 3497 if ui.verbose:
3496 3498 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3497 3499
3498 3500
3499 3501 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3500 3502 def debugssl(ui, repo, source=None, **opts):
3501 3503 """test a secure connection to a server
3502 3504
3503 3505 This builds the certificate chain for the server on Windows, installing the
3504 3506 missing intermediates and trusted root via Windows Update if necessary. It
3505 3507 does nothing on other platforms.
3506 3508
3507 3509 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3508 3510 that server is used. See :hg:`help urls` for more information.
3509 3511
3510 3512 If the update succeeds, retry the original operation. Otherwise, the cause
3511 3513 of the SSL error is likely another issue.
3512 3514 """
3513 3515 if not pycompat.iswindows:
3514 3516 raise error.Abort(
3515 3517 _(b'certificate chain building is only possible on Windows')
3516 3518 )
3517 3519
3518 3520 if not source:
3519 3521 if not repo:
3520 3522 raise error.Abort(
3521 3523 _(
3522 3524 b"there is no Mercurial repository here, and no "
3523 3525 b"server specified"
3524 3526 )
3525 3527 )
3526 3528 source = b"default"
3527 3529
3528 3530 source, branches = hg.parseurl(ui.expandpath(source))
3529 3531 url = util.url(source)
3530 3532
3531 3533 defaultport = {b'https': 443, b'ssh': 22}
3532 3534 if url.scheme in defaultport:
3533 3535 try:
3534 3536 addr = (url.host, int(url.port or defaultport[url.scheme]))
3535 3537 except ValueError:
3536 3538 raise error.Abort(_(b"malformed port number in URL"))
3537 3539 else:
3538 3540 raise error.Abort(_(b"only https and ssh connections are supported"))
3539 3541
3540 3542 from . import win32
3541 3543
3542 3544 s = ssl.wrap_socket(
3543 3545 socket.socket(),
3544 3546 ssl_version=ssl.PROTOCOL_TLS,
3545 3547 cert_reqs=ssl.CERT_NONE,
3546 3548 ca_certs=None,
3547 3549 )
3548 3550
3549 3551 try:
3550 3552 s.connect(addr)
3551 3553 cert = s.getpeercert(True)
3552 3554
3553 3555 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3554 3556
3555 3557 complete = win32.checkcertificatechain(cert, build=False)
3556 3558
3557 3559 if not complete:
3558 3560 ui.status(_(b'certificate chain is incomplete, updating... '))
3559 3561
3560 3562 if not win32.checkcertificatechain(cert):
3561 3563 ui.status(_(b'failed.\n'))
3562 3564 else:
3563 3565 ui.status(_(b'done.\n'))
3564 3566 else:
3565 3567 ui.status(_(b'full certificate chain is available\n'))
3566 3568 finally:
3567 3569 s.close()
3568 3570
3569 3571
3570 3572 @command(
3571 3573 b"debugbackupbundle",
3572 3574 [
3573 3575 (
3574 3576 b"",
3575 3577 b"recover",
3576 3578 b"",
3577 3579 b"brings the specified changeset back into the repository",
3578 3580 )
3579 3581 ]
3580 3582 + cmdutil.logopts,
3581 3583 _(b"hg debugbackupbundle [--recover HASH]"),
3582 3584 )
3583 3585 def debugbackupbundle(ui, repo, *pats, **opts):
3584 3586 """lists the changesets available in backup bundles
3585 3587
3586 3588 Without any arguments, this command prints a list of the changesets in each
3587 3589 backup bundle.
3588 3590
3589 3591 --recover takes a changeset hash and unbundles the first bundle that
3590 3592 contains that hash, which puts that changeset back in your repository.
3591 3593
3592 3594 --verbose will print the entire commit message and the bundle path for that
3593 3595 backup.
3594 3596 """
3595 3597 backups = list(
3596 3598 filter(
3597 3599 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3598 3600 )
3599 3601 )
3600 3602 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3601 3603
3602 3604 opts = pycompat.byteskwargs(opts)
3603 3605 opts[b"bundle"] = b""
3604 3606 opts[b"force"] = None
3605 3607 limit = logcmdutil.getlimit(opts)
3606 3608
3607 3609 def display(other, chlist, displayer):
3608 3610 if opts.get(b"newest_first"):
3609 3611 chlist.reverse()
3610 3612 count = 0
3611 3613 for n in chlist:
3612 3614 if limit is not None and count >= limit:
3613 3615 break
3614 3616 parents = [True for p in other.changelog.parents(n) if p != nullid]
3615 3617 if opts.get(b"no_merges") and len(parents) == 2:
3616 3618 continue
3617 3619 count += 1
3618 3620 displayer.show(other[n])
3619 3621
3620 3622 recovernode = opts.get(b"recover")
3621 3623 if recovernode:
3622 3624 if scmutil.isrevsymbol(repo, recovernode):
3623 3625 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3624 3626 return
3625 3627 elif backups:
3626 3628 msg = _(
3627 3629 b"Recover changesets using: hg debugbackupbundle --recover "
3628 3630 b"<changeset hash>\n\nAvailable backup changesets:"
3629 3631 )
3630 3632 ui.status(msg, label=b"status.removed")
3631 3633 else:
3632 3634 ui.status(_(b"no backup changesets found\n"))
3633 3635 return
3634 3636
3635 3637 for backup in backups:
3636 3638 # Much of this is copied from the hg incoming logic
3637 3639 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3638 3640 source, branches = hg.parseurl(source, opts.get(b"branch"))
3639 3641 try:
3640 3642 other = hg.peer(repo, opts, source)
3641 3643 except error.LookupError as ex:
3642 3644 msg = _(b"\nwarning: unable to open bundle %s") % source
3643 3645 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3644 3646 ui.warn(msg, hint=hint)
3645 3647 continue
3646 3648 revs, checkout = hg.addbranchrevs(
3647 3649 repo, other, branches, opts.get(b"rev")
3648 3650 )
3649 3651
3650 3652 if revs:
3651 3653 revs = [other.lookup(rev) for rev in revs]
3652 3654
3653 3655 quiet = ui.quiet
3654 3656 try:
3655 3657 ui.quiet = True
3656 3658 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3657 3659 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3658 3660 )
3659 3661 except error.LookupError:
3660 3662 continue
3661 3663 finally:
3662 3664 ui.quiet = quiet
3663 3665
3664 3666 try:
3665 3667 if not chlist:
3666 3668 continue
3667 3669 if recovernode:
3668 3670 with repo.lock(), repo.transaction(b"unbundle") as tr:
3669 3671 if scmutil.isrevsymbol(other, recovernode):
3670 3672 ui.status(_(b"Unbundling %s\n") % (recovernode))
3671 3673 f = hg.openpath(ui, source)
3672 3674 gen = exchange.readbundle(ui, f, source)
3673 3675 if isinstance(gen, bundle2.unbundle20):
3674 3676 bundle2.applybundle(
3675 3677 repo,
3676 3678 gen,
3677 3679 tr,
3678 3680 source=b"unbundle",
3679 3681 url=b"bundle:" + source,
3680 3682 )
3681 3683 else:
3682 3684 gen.apply(repo, b"unbundle", b"bundle:" + source)
3683 3685 break
3684 3686 else:
3685 3687 backupdate = encoding.strtolocal(
3686 3688 time.strftime(
3687 3689 "%a %H:%M, %Y-%m-%d",
3688 3690 time.localtime(os.path.getmtime(source)),
3689 3691 )
3690 3692 )
3691 3693 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3692 3694 if ui.verbose:
3693 3695 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3694 3696 else:
3695 3697 opts[
3696 3698 b"template"
3697 3699 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3698 3700 displayer = logcmdutil.changesetdisplayer(
3699 3701 ui, other, opts, False
3700 3702 )
3701 3703 display(other, chlist, displayer)
3702 3704 displayer.close()
3703 3705 finally:
3704 3706 cleanupfn()
3705 3707
3706 3708
3707 3709 @command(
3708 3710 b'debugsub',
3709 3711 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3710 3712 _(b'[-r REV] [REV]'),
3711 3713 )
3712 3714 def debugsub(ui, repo, rev=None):
3713 3715 ctx = scmutil.revsingle(repo, rev, None)
3714 3716 for k, v in sorted(ctx.substate.items()):
3715 3717 ui.writenoi18n(b'path %s\n' % k)
3716 3718 ui.writenoi18n(b' source %s\n' % v[0])
3717 3719 ui.writenoi18n(b' revision %s\n' % v[1])
3718 3720
3719 3721
3720 3722 @command(
3721 3723 b'debugsuccessorssets',
3722 3724 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3723 3725 _(b'[REV]'),
3724 3726 )
3725 3727 def debugsuccessorssets(ui, repo, *revs, **opts):
3726 3728 """show set of successors for revision
3727 3729
3728 3730 A successors set of changeset A is a consistent group of revisions that
3729 3731 succeed A. It contains non-obsolete changesets only unless closests
3730 3732 successors set is set.
3731 3733
3732 3734 In most cases a changeset A has a single successors set containing a single
3733 3735 successor (changeset A replaced by A').
3734 3736
3735 3737 A changeset that is made obsolete with no successors are called "pruned".
3736 3738 Such changesets have no successors sets at all.
3737 3739
3738 3740 A changeset that has been "split" will have a successors set containing
3739 3741 more than one successor.
3740 3742
3741 3743 A changeset that has been rewritten in multiple different ways is called
3742 3744 "divergent". Such changesets have multiple successor sets (each of which
3743 3745 may also be split, i.e. have multiple successors).
3744 3746
3745 3747 Results are displayed as follows::
3746 3748
3747 3749 <rev1>
3748 3750 <successors-1A>
3749 3751 <rev2>
3750 3752 <successors-2A>
3751 3753 <successors-2B1> <successors-2B2> <successors-2B3>
3752 3754
3753 3755 Here rev2 has two possible (i.e. divergent) successors sets. The first
3754 3756 holds one element, whereas the second holds three (i.e. the changeset has
3755 3757 been split).
3756 3758 """
3757 3759 # passed to successorssets caching computation from one call to another
3758 3760 cache = {}
3759 3761 ctx2str = bytes
3760 3762 node2str = short
3761 3763 for rev in scmutil.revrange(repo, revs):
3762 3764 ctx = repo[rev]
3763 3765 ui.write(b'%s\n' % ctx2str(ctx))
3764 3766 for succsset in obsutil.successorssets(
3765 3767 repo, ctx.node(), closest=opts['closest'], cache=cache
3766 3768 ):
3767 3769 if succsset:
3768 3770 ui.write(b' ')
3769 3771 ui.write(node2str(succsset[0]))
3770 3772 for node in succsset[1:]:
3771 3773 ui.write(b' ')
3772 3774 ui.write(node2str(node))
3773 3775 ui.write(b'\n')
3774 3776
3775 3777
3776 3778 @command(b'debugtagscache', [])
3777 3779 def debugtagscache(ui, repo):
3778 3780 """display the contents of .hg/cache/hgtagsfnodes1"""
3779 3781 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3780 3782 for r in repo:
3781 3783 node = repo[r].node()
3782 3784 tagsnode = cache.getfnode(node, computemissing=False)
3783 3785 tagsnodedisplay = hex(tagsnode) if tagsnode else b'missing/invalid'
3784 3786 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3785 3787
3786 3788
3787 3789 @command(
3788 3790 b'debugtemplate',
3789 3791 [
3790 3792 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3791 3793 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3792 3794 ],
3793 3795 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3794 3796 optionalrepo=True,
3795 3797 )
3796 3798 def debugtemplate(ui, repo, tmpl, **opts):
3797 3799 """parse and apply a template
3798 3800
3799 3801 If -r/--rev is given, the template is processed as a log template and
3800 3802 applied to the given changesets. Otherwise, it is processed as a generic
3801 3803 template.
3802 3804
3803 3805 Use --verbose to print the parsed tree.
3804 3806 """
3805 3807 revs = None
3806 3808 if opts['rev']:
3807 3809 if repo is None:
3808 3810 raise error.RepoError(
3809 3811 _(b'there is no Mercurial repository here (.hg not found)')
3810 3812 )
3811 3813 revs = scmutil.revrange(repo, opts['rev'])
3812 3814
3813 3815 props = {}
3814 3816 for d in opts['define']:
3815 3817 try:
3816 3818 k, v = (e.strip() for e in d.split(b'=', 1))
3817 3819 if not k or k == b'ui':
3818 3820 raise ValueError
3819 3821 props[k] = v
3820 3822 except ValueError:
3821 3823 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3822 3824
3823 3825 if ui.verbose:
3824 3826 aliases = ui.configitems(b'templatealias')
3825 3827 tree = templater.parse(tmpl)
3826 3828 ui.note(templater.prettyformat(tree), b'\n')
3827 3829 newtree = templater.expandaliases(tree, aliases)
3828 3830 if newtree != tree:
3829 3831 ui.notenoi18n(
3830 3832 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3831 3833 )
3832 3834
3833 3835 if revs is None:
3834 3836 tres = formatter.templateresources(ui, repo)
3835 3837 t = formatter.maketemplater(ui, tmpl, resources=tres)
3836 3838 if ui.verbose:
3837 3839 kwds, funcs = t.symbolsuseddefault()
3838 3840 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3839 3841 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3840 3842 ui.write(t.renderdefault(props))
3841 3843 else:
3842 3844 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3843 3845 if ui.verbose:
3844 3846 kwds, funcs = displayer.t.symbolsuseddefault()
3845 3847 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3846 3848 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3847 3849 for r in revs:
3848 3850 displayer.show(repo[r], **pycompat.strkwargs(props))
3849 3851 displayer.close()
3850 3852
3851 3853
3852 3854 @command(
3853 3855 b'debuguigetpass',
3854 3856 [
3855 3857 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3856 3858 ],
3857 3859 _(b'[-p TEXT]'),
3858 3860 norepo=True,
3859 3861 )
3860 3862 def debuguigetpass(ui, prompt=b''):
3861 3863 """show prompt to type password"""
3862 3864 r = ui.getpass(prompt)
3863 3865 if r is None:
3864 3866 r = b"<default response>"
3865 3867 ui.writenoi18n(b'response: %s\n' % r)
3866 3868
3867 3869
3868 3870 @command(
3869 3871 b'debuguiprompt',
3870 3872 [
3871 3873 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3872 3874 ],
3873 3875 _(b'[-p TEXT]'),
3874 3876 norepo=True,
3875 3877 )
3876 3878 def debuguiprompt(ui, prompt=b''):
3877 3879 """show plain prompt"""
3878 3880 r = ui.prompt(prompt)
3879 3881 ui.writenoi18n(b'response: %s\n' % r)
3880 3882
3881 3883
3882 3884 @command(b'debugupdatecaches', [])
3883 3885 def debugupdatecaches(ui, repo, *pats, **opts):
3884 3886 """warm all known caches in the repository"""
3885 3887 with repo.wlock(), repo.lock():
3886 3888 repo.updatecaches(full=True)
3887 3889
3888 3890
3889 3891 @command(
3890 3892 b'debugupgraderepo',
3891 3893 [
3892 3894 (
3893 3895 b'o',
3894 3896 b'optimize',
3895 3897 [],
3896 3898 _(b'extra optimization to perform'),
3897 3899 _(b'NAME'),
3898 3900 ),
3899 3901 (b'', b'run', False, _(b'performs an upgrade')),
3900 3902 (b'', b'backup', True, _(b'keep the old repository content around')),
3901 3903 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3902 3904 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3903 3905 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
3904 3906 ],
3905 3907 )
3906 3908 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3907 3909 """upgrade a repository to use different features
3908 3910
3909 3911 If no arguments are specified, the repository is evaluated for upgrade
3910 3912 and a list of problems and potential optimizations is printed.
3911 3913
3912 3914 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3913 3915 can be influenced via additional arguments. More details will be provided
3914 3916 by the command output when run without ``--run``.
3915 3917
3916 3918 During the upgrade, the repository will be locked and no writes will be
3917 3919 allowed.
3918 3920
3919 3921 At the end of the upgrade, the repository may not be readable while new
3920 3922 repository data is swapped in. This window will be as long as it takes to
3921 3923 rename some directories inside the ``.hg`` directory. On most machines, this
3922 3924 should complete almost instantaneously and the chances of a consumer being
3923 3925 unable to access the repository should be low.
3924 3926
3925 3927 By default, all revlog will be upgraded. You can restrict this using flag
3926 3928 such as `--manifest`:
3927 3929
3928 3930 * `--manifest`: only optimize the manifest
3929 3931 * `--no-manifest`: optimize all revlog but the manifest
3930 3932 * `--changelog`: optimize the changelog only
3931 3933 * `--no-changelog --no-manifest`: optimize filelogs only
3932 3934 * `--filelogs`: optimize the filelogs only
3933 3935 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
3934 3936 """
3935 3937 return upgrade.upgraderepo(
3936 3938 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
3937 3939 )
3938 3940
3939 3941
3940 3942 @command(
3941 3943 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3942 3944 )
3943 3945 def debugwalk(ui, repo, *pats, **opts):
3944 3946 """show how files match on given patterns"""
3945 3947 opts = pycompat.byteskwargs(opts)
3946 3948 m = scmutil.match(repo[None], pats, opts)
3947 3949 if ui.verbose:
3948 3950 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3949 3951 items = list(repo[None].walk(m))
3950 3952 if not items:
3951 3953 return
3952 3954 f = lambda fn: fn
3953 3955 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3954 3956 f = lambda fn: util.normpath(fn)
3955 3957 fmt = b'f %%-%ds %%-%ds %%s' % (
3956 3958 max([len(abs) for abs in items]),
3957 3959 max([len(repo.pathto(abs)) for abs in items]),
3958 3960 )
3959 3961 for abs in items:
3960 3962 line = fmt % (
3961 3963 abs,
3962 3964 f(repo.pathto(abs)),
3963 3965 m.exact(abs) and b'exact' or b'',
3964 3966 )
3965 3967 ui.write(b"%s\n" % line.rstrip())
3966 3968
3967 3969
3968 3970 @command(b'debugwhyunstable', [], _(b'REV'))
3969 3971 def debugwhyunstable(ui, repo, rev):
3970 3972 """explain instabilities of a changeset"""
3971 3973 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3972 3974 dnodes = b''
3973 3975 if entry.get(b'divergentnodes'):
3974 3976 dnodes = (
3975 3977 b' '.join(
3976 3978 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3977 3979 for ctx in entry[b'divergentnodes']
3978 3980 )
3979 3981 + b' '
3980 3982 )
3981 3983 ui.write(
3982 3984 b'%s: %s%s %s\n'
3983 3985 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3984 3986 )
3985 3987
3986 3988
3987 3989 @command(
3988 3990 b'debugwireargs',
3989 3991 [
3990 3992 (b'', b'three', b'', b'three'),
3991 3993 (b'', b'four', b'', b'four'),
3992 3994 (b'', b'five', b'', b'five'),
3993 3995 ]
3994 3996 + cmdutil.remoteopts,
3995 3997 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3996 3998 norepo=True,
3997 3999 )
3998 4000 def debugwireargs(ui, repopath, *vals, **opts):
3999 4001 opts = pycompat.byteskwargs(opts)
4000 4002 repo = hg.peer(ui, opts, repopath)
4001 4003 for opt in cmdutil.remoteopts:
4002 4004 del opts[opt[1]]
4003 4005 args = {}
4004 4006 for k, v in pycompat.iteritems(opts):
4005 4007 if v:
4006 4008 args[k] = v
4007 4009 args = pycompat.strkwargs(args)
4008 4010 # run twice to check that we don't mess up the stream for the next command
4009 4011 res1 = repo.debugwireargs(*vals, **args)
4010 4012 res2 = repo.debugwireargs(*vals, **args)
4011 4013 ui.write(b"%s\n" % res1)
4012 4014 if res1 != res2:
4013 4015 ui.warn(b"%s\n" % res2)
4014 4016
4015 4017
4016 4018 def _parsewirelangblocks(fh):
4017 4019 activeaction = None
4018 4020 blocklines = []
4019 4021 lastindent = 0
4020 4022
4021 4023 for line in fh:
4022 4024 line = line.rstrip()
4023 4025 if not line:
4024 4026 continue
4025 4027
4026 4028 if line.startswith(b'#'):
4027 4029 continue
4028 4030
4029 4031 if not line.startswith(b' '):
4030 4032 # New block. Flush previous one.
4031 4033 if activeaction:
4032 4034 yield activeaction, blocklines
4033 4035
4034 4036 activeaction = line
4035 4037 blocklines = []
4036 4038 lastindent = 0
4037 4039 continue
4038 4040
4039 4041 # Else we start with an indent.
4040 4042
4041 4043 if not activeaction:
4042 4044 raise error.Abort(_(b'indented line outside of block'))
4043 4045
4044 4046 indent = len(line) - len(line.lstrip())
4045 4047
4046 4048 # If this line is indented more than the last line, concatenate it.
4047 4049 if indent > lastindent and blocklines:
4048 4050 blocklines[-1] += line.lstrip()
4049 4051 else:
4050 4052 blocklines.append(line)
4051 4053 lastindent = indent
4052 4054
4053 4055 # Flush last block.
4054 4056 if activeaction:
4055 4057 yield activeaction, blocklines
4056 4058
4057 4059
4058 4060 @command(
4059 4061 b'debugwireproto',
4060 4062 [
4061 4063 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4062 4064 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4063 4065 (
4064 4066 b'',
4065 4067 b'noreadstderr',
4066 4068 False,
4067 4069 _(b'do not read from stderr of the remote'),
4068 4070 ),
4069 4071 (
4070 4072 b'',
4071 4073 b'nologhandshake',
4072 4074 False,
4073 4075 _(b'do not log I/O related to the peer handshake'),
4074 4076 ),
4075 4077 ]
4076 4078 + cmdutil.remoteopts,
4077 4079 _(b'[PATH]'),
4078 4080 optionalrepo=True,
4079 4081 )
4080 4082 def debugwireproto(ui, repo, path=None, **opts):
4081 4083 """send wire protocol commands to a server
4082 4084
4083 4085 This command can be used to issue wire protocol commands to remote
4084 4086 peers and to debug the raw data being exchanged.
4085 4087
4086 4088 ``--localssh`` will start an SSH server against the current repository
4087 4089 and connect to that. By default, the connection will perform a handshake
4088 4090 and establish an appropriate peer instance.
4089 4091
4090 4092 ``--peer`` can be used to bypass the handshake protocol and construct a
4091 4093 peer instance using the specified class type. Valid values are ``raw``,
4092 4094 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4093 4095 raw data payloads and don't support higher-level command actions.
4094 4096
4095 4097 ``--noreadstderr`` can be used to disable automatic reading from stderr
4096 4098 of the peer (for SSH connections only). Disabling automatic reading of
4097 4099 stderr is useful for making output more deterministic.
4098 4100
4099 4101 Commands are issued via a mini language which is specified via stdin.
4100 4102 The language consists of individual actions to perform. An action is
4101 4103 defined by a block. A block is defined as a line with no leading
4102 4104 space followed by 0 or more lines with leading space. Blocks are
4103 4105 effectively a high-level command with additional metadata.
4104 4106
4105 4107 Lines beginning with ``#`` are ignored.
4106 4108
4107 4109 The following sections denote available actions.
4108 4110
4109 4111 raw
4110 4112 ---
4111 4113
4112 4114 Send raw data to the server.
4113 4115
4114 4116 The block payload contains the raw data to send as one atomic send
4115 4117 operation. The data may not actually be delivered in a single system
4116 4118 call: it depends on the abilities of the transport being used.
4117 4119
4118 4120 Each line in the block is de-indented and concatenated. Then, that
4119 4121 value is evaluated as a Python b'' literal. This allows the use of
4120 4122 backslash escaping, etc.
4121 4123
4122 4124 raw+
4123 4125 ----
4124 4126
4125 4127 Behaves like ``raw`` except flushes output afterwards.
4126 4128
4127 4129 command <X>
4128 4130 -----------
4129 4131
4130 4132 Send a request to run a named command, whose name follows the ``command``
4131 4133 string.
4132 4134
4133 4135 Arguments to the command are defined as lines in this block. The format of
4134 4136 each line is ``<key> <value>``. e.g.::
4135 4137
4136 4138 command listkeys
4137 4139 namespace bookmarks
4138 4140
4139 4141 If the value begins with ``eval:``, it will be interpreted as a Python
4140 4142 literal expression. Otherwise values are interpreted as Python b'' literals.
4141 4143 This allows sending complex types and encoding special byte sequences via
4142 4144 backslash escaping.
4143 4145
4144 4146 The following arguments have special meaning:
4145 4147
4146 4148 ``PUSHFILE``
4147 4149 When defined, the *push* mechanism of the peer will be used instead
4148 4150 of the static request-response mechanism and the content of the
4149 4151 file specified in the value of this argument will be sent as the
4150 4152 command payload.
4151 4153
4152 4154 This can be used to submit a local bundle file to the remote.
4153 4155
4154 4156 batchbegin
4155 4157 ----------
4156 4158
4157 4159 Instruct the peer to begin a batched send.
4158 4160
4159 4161 All ``command`` blocks are queued for execution until the next
4160 4162 ``batchsubmit`` block.
4161 4163
4162 4164 batchsubmit
4163 4165 -----------
4164 4166
4165 4167 Submit previously queued ``command`` blocks as a batch request.
4166 4168
4167 4169 This action MUST be paired with a ``batchbegin`` action.
4168 4170
4169 4171 httprequest <method> <path>
4170 4172 ---------------------------
4171 4173
4172 4174 (HTTP peer only)
4173 4175
4174 4176 Send an HTTP request to the peer.
4175 4177
4176 4178 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4177 4179
4178 4180 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4179 4181 headers to add to the request. e.g. ``Accept: foo``.
4180 4182
4181 4183 The following arguments are special:
4182 4184
4183 4185 ``BODYFILE``
4184 4186 The content of the file defined as the value to this argument will be
4185 4187 transferred verbatim as the HTTP request body.
4186 4188
4187 4189 ``frame <type> <flags> <payload>``
4188 4190 Send a unified protocol frame as part of the request body.
4189 4191
4190 4192 All frames will be collected and sent as the body to the HTTP
4191 4193 request.
4192 4194
4193 4195 close
4194 4196 -----
4195 4197
4196 4198 Close the connection to the server.
4197 4199
4198 4200 flush
4199 4201 -----
4200 4202
4201 4203 Flush data written to the server.
4202 4204
4203 4205 readavailable
4204 4206 -------------
4205 4207
4206 4208 Close the write end of the connection and read all available data from
4207 4209 the server.
4208 4210
4209 4211 If the connection to the server encompasses multiple pipes, we poll both
4210 4212 pipes and read available data.
4211 4213
4212 4214 readline
4213 4215 --------
4214 4216
4215 4217 Read a line of output from the server. If there are multiple output
4216 4218 pipes, reads only the main pipe.
4217 4219
4218 4220 ereadline
4219 4221 ---------
4220 4222
4221 4223 Like ``readline``, but read from the stderr pipe, if available.
4222 4224
4223 4225 read <X>
4224 4226 --------
4225 4227
4226 4228 ``read()`` N bytes from the server's main output pipe.
4227 4229
4228 4230 eread <X>
4229 4231 ---------
4230 4232
4231 4233 ``read()`` N bytes from the server's stderr pipe, if available.
4232 4234
4233 4235 Specifying Unified Frame-Based Protocol Frames
4234 4236 ----------------------------------------------
4235 4237
4236 4238 It is possible to emit a *Unified Frame-Based Protocol* by using special
4237 4239 syntax.
4238 4240
4239 4241 A frame is composed as a type, flags, and payload. These can be parsed
4240 4242 from a string of the form:
4241 4243
4242 4244 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4243 4245
4244 4246 ``request-id`` and ``stream-id`` are integers defining the request and
4245 4247 stream identifiers.
4246 4248
4247 4249 ``type`` can be an integer value for the frame type or the string name
4248 4250 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4249 4251 ``command-name``.
4250 4252
4251 4253 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4252 4254 components. Each component (and there can be just one) can be an integer
4253 4255 or a flag name for stream flags or frame flags, respectively. Values are
4254 4256 resolved to integers and then bitwise OR'd together.
4255 4257
4256 4258 ``payload`` represents the raw frame payload. If it begins with
4257 4259 ``cbor:``, the following string is evaluated as Python code and the
4258 4260 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4259 4261 as a Python byte string literal.
4260 4262 """
4261 4263 opts = pycompat.byteskwargs(opts)
4262 4264
4263 4265 if opts[b'localssh'] and not repo:
4264 4266 raise error.Abort(_(b'--localssh requires a repository'))
4265 4267
4266 4268 if opts[b'peer'] and opts[b'peer'] not in (
4267 4269 b'raw',
4268 4270 b'http2',
4269 4271 b'ssh1',
4270 4272 b'ssh2',
4271 4273 ):
4272 4274 raise error.Abort(
4273 4275 _(b'invalid value for --peer'),
4274 4276 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4275 4277 )
4276 4278
4277 4279 if path and opts[b'localssh']:
4278 4280 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4279 4281
4280 4282 if ui.interactive():
4281 4283 ui.write(_(b'(waiting for commands on stdin)\n'))
4282 4284
4283 4285 blocks = list(_parsewirelangblocks(ui.fin))
4284 4286
4285 4287 proc = None
4286 4288 stdin = None
4287 4289 stdout = None
4288 4290 stderr = None
4289 4291 opener = None
4290 4292
4291 4293 if opts[b'localssh']:
4292 4294 # We start the SSH server in its own process so there is process
4293 4295 # separation. This prevents a whole class of potential bugs around
4294 4296 # shared state from interfering with server operation.
4295 4297 args = procutil.hgcmd() + [
4296 4298 b'-R',
4297 4299 repo.root,
4298 4300 b'debugserve',
4299 4301 b'--sshstdio',
4300 4302 ]
4301 4303 proc = subprocess.Popen(
4302 4304 pycompat.rapply(procutil.tonativestr, args),
4303 4305 stdin=subprocess.PIPE,
4304 4306 stdout=subprocess.PIPE,
4305 4307 stderr=subprocess.PIPE,
4306 4308 bufsize=0,
4307 4309 )
4308 4310
4309 4311 stdin = proc.stdin
4310 4312 stdout = proc.stdout
4311 4313 stderr = proc.stderr
4312 4314
4313 4315 # We turn the pipes into observers so we can log I/O.
4314 4316 if ui.verbose or opts[b'peer'] == b'raw':
4315 4317 stdin = util.makeloggingfileobject(
4316 4318 ui, proc.stdin, b'i', logdata=True
4317 4319 )
4318 4320 stdout = util.makeloggingfileobject(
4319 4321 ui, proc.stdout, b'o', logdata=True
4320 4322 )
4321 4323 stderr = util.makeloggingfileobject(
4322 4324 ui, proc.stderr, b'e', logdata=True
4323 4325 )
4324 4326
4325 4327 # --localssh also implies the peer connection settings.
4326 4328
4327 4329 url = b'ssh://localserver'
4328 4330 autoreadstderr = not opts[b'noreadstderr']
4329 4331
4330 4332 if opts[b'peer'] == b'ssh1':
4331 4333 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4332 4334 peer = sshpeer.sshv1peer(
4333 4335 ui,
4334 4336 url,
4335 4337 proc,
4336 4338 stdin,
4337 4339 stdout,
4338 4340 stderr,
4339 4341 None,
4340 4342 autoreadstderr=autoreadstderr,
4341 4343 )
4342 4344 elif opts[b'peer'] == b'ssh2':
4343 4345 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4344 4346 peer = sshpeer.sshv2peer(
4345 4347 ui,
4346 4348 url,
4347 4349 proc,
4348 4350 stdin,
4349 4351 stdout,
4350 4352 stderr,
4351 4353 None,
4352 4354 autoreadstderr=autoreadstderr,
4353 4355 )
4354 4356 elif opts[b'peer'] == b'raw':
4355 4357 ui.write(_(b'using raw connection to peer\n'))
4356 4358 peer = None
4357 4359 else:
4358 4360 ui.write(_(b'creating ssh peer from handshake results\n'))
4359 4361 peer = sshpeer.makepeer(
4360 4362 ui,
4361 4363 url,
4362 4364 proc,
4363 4365 stdin,
4364 4366 stdout,
4365 4367 stderr,
4366 4368 autoreadstderr=autoreadstderr,
4367 4369 )
4368 4370
4369 4371 elif path:
4370 4372 # We bypass hg.peer() so we can proxy the sockets.
4371 4373 # TODO consider not doing this because we skip
4372 4374 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4373 4375 u = util.url(path)
4374 4376 if u.scheme != b'http':
4375 4377 raise error.Abort(_(b'only http:// paths are currently supported'))
4376 4378
4377 4379 url, authinfo = u.authinfo()
4378 4380 openerargs = {
4379 4381 'useragent': b'Mercurial debugwireproto',
4380 4382 }
4381 4383
4382 4384 # Turn pipes/sockets into observers so we can log I/O.
4383 4385 if ui.verbose:
4384 4386 openerargs.update(
4385 4387 {
4386 4388 'loggingfh': ui,
4387 4389 'loggingname': b's',
4388 4390 'loggingopts': {
4389 4391 'logdata': True,
4390 4392 'logdataapis': False,
4391 4393 },
4392 4394 }
4393 4395 )
4394 4396
4395 4397 if ui.debugflag:
4396 4398 openerargs['loggingopts']['logdataapis'] = True
4397 4399
4398 4400 # Don't send default headers when in raw mode. This allows us to
4399 4401 # bypass most of the behavior of our URL handling code so we can
4400 4402 # have near complete control over what's sent on the wire.
4401 4403 if opts[b'peer'] == b'raw':
4402 4404 openerargs['sendaccept'] = False
4403 4405
4404 4406 opener = urlmod.opener(ui, authinfo, **openerargs)
4405 4407
4406 4408 if opts[b'peer'] == b'http2':
4407 4409 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4408 4410 # We go through makepeer() because we need an API descriptor for
4409 4411 # the peer instance to be useful.
4410 4412 with ui.configoverride(
4411 4413 {(b'experimental', b'httppeer.advertise-v2'): True}
4412 4414 ):
4413 4415 if opts[b'nologhandshake']:
4414 4416 ui.pushbuffer()
4415 4417
4416 4418 peer = httppeer.makepeer(ui, path, opener=opener)
4417 4419
4418 4420 if opts[b'nologhandshake']:
4419 4421 ui.popbuffer()
4420 4422
4421 4423 if not isinstance(peer, httppeer.httpv2peer):
4422 4424 raise error.Abort(
4423 4425 _(
4424 4426 b'could not instantiate HTTP peer for '
4425 4427 b'wire protocol version 2'
4426 4428 ),
4427 4429 hint=_(
4428 4430 b'the server may not have the feature '
4429 4431 b'enabled or is not allowing this '
4430 4432 b'client version'
4431 4433 ),
4432 4434 )
4433 4435
4434 4436 elif opts[b'peer'] == b'raw':
4435 4437 ui.write(_(b'using raw connection to peer\n'))
4436 4438 peer = None
4437 4439 elif opts[b'peer']:
4438 4440 raise error.Abort(
4439 4441 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4440 4442 )
4441 4443 else:
4442 4444 peer = httppeer.makepeer(ui, path, opener=opener)
4443 4445
4444 4446 # We /could/ populate stdin/stdout with sock.makefile()...
4445 4447 else:
4446 4448 raise error.Abort(_(b'unsupported connection configuration'))
4447 4449
4448 4450 batchedcommands = None
4449 4451
4450 4452 # Now perform actions based on the parsed wire language instructions.
4451 4453 for action, lines in blocks:
4452 4454 if action in (b'raw', b'raw+'):
4453 4455 if not stdin:
4454 4456 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4455 4457
4456 4458 # Concatenate the data together.
4457 4459 data = b''.join(l.lstrip() for l in lines)
4458 4460 data = stringutil.unescapestr(data)
4459 4461 stdin.write(data)
4460 4462
4461 4463 if action == b'raw+':
4462 4464 stdin.flush()
4463 4465 elif action == b'flush':
4464 4466 if not stdin:
4465 4467 raise error.Abort(_(b'cannot call flush on this peer'))
4466 4468 stdin.flush()
4467 4469 elif action.startswith(b'command'):
4468 4470 if not peer:
4469 4471 raise error.Abort(
4470 4472 _(
4471 4473 b'cannot send commands unless peer instance '
4472 4474 b'is available'
4473 4475 )
4474 4476 )
4475 4477
4476 4478 command = action.split(b' ', 1)[1]
4477 4479
4478 4480 args = {}
4479 4481 for line in lines:
4480 4482 # We need to allow empty values.
4481 4483 fields = line.lstrip().split(b' ', 1)
4482 4484 if len(fields) == 1:
4483 4485 key = fields[0]
4484 4486 value = b''
4485 4487 else:
4486 4488 key, value = fields
4487 4489
4488 4490 if value.startswith(b'eval:'):
4489 4491 value = stringutil.evalpythonliteral(value[5:])
4490 4492 else:
4491 4493 value = stringutil.unescapestr(value)
4492 4494
4493 4495 args[key] = value
4494 4496
4495 4497 if batchedcommands is not None:
4496 4498 batchedcommands.append((command, args))
4497 4499 continue
4498 4500
4499 4501 ui.status(_(b'sending %s command\n') % command)
4500 4502
4501 4503 if b'PUSHFILE' in args:
4502 4504 with open(args[b'PUSHFILE'], 'rb') as fh:
4503 4505 del args[b'PUSHFILE']
4504 4506 res, output = peer._callpush(
4505 4507 command, fh, **pycompat.strkwargs(args)
4506 4508 )
4507 4509 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4508 4510 ui.status(
4509 4511 _(b'remote output: %s\n') % stringutil.escapestr(output)
4510 4512 )
4511 4513 else:
4512 4514 with peer.commandexecutor() as e:
4513 4515 res = e.callcommand(command, args).result()
4514 4516
4515 4517 if isinstance(res, wireprotov2peer.commandresponse):
4516 4518 val = res.objects()
4517 4519 ui.status(
4518 4520 _(b'response: %s\n')
4519 4521 % stringutil.pprint(val, bprefix=True, indent=2)
4520 4522 )
4521 4523 else:
4522 4524 ui.status(
4523 4525 _(b'response: %s\n')
4524 4526 % stringutil.pprint(res, bprefix=True, indent=2)
4525 4527 )
4526 4528
4527 4529 elif action == b'batchbegin':
4528 4530 if batchedcommands is not None:
4529 4531 raise error.Abort(_(b'nested batchbegin not allowed'))
4530 4532
4531 4533 batchedcommands = []
4532 4534 elif action == b'batchsubmit':
4533 4535 # There is a batching API we could go through. But it would be
4534 4536 # difficult to normalize requests into function calls. It is easier
4535 4537 # to bypass this layer and normalize to commands + args.
4536 4538 ui.status(
4537 4539 _(b'sending batch with %d sub-commands\n')
4538 4540 % len(batchedcommands)
4539 4541 )
4540 4542 assert peer is not None
4541 4543 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4542 4544 ui.status(
4543 4545 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4544 4546 )
4545 4547
4546 4548 batchedcommands = None
4547 4549
4548 4550 elif action.startswith(b'httprequest '):
4549 4551 if not opener:
4550 4552 raise error.Abort(
4551 4553 _(b'cannot use httprequest without an HTTP peer')
4552 4554 )
4553 4555
4554 4556 request = action.split(b' ', 2)
4555 4557 if len(request) != 3:
4556 4558 raise error.Abort(
4557 4559 _(
4558 4560 b'invalid httprequest: expected format is '
4559 4561 b'"httprequest <method> <path>'
4560 4562 )
4561 4563 )
4562 4564
4563 4565 method, httppath = request[1:]
4564 4566 headers = {}
4565 4567 body = None
4566 4568 frames = []
4567 4569 for line in lines:
4568 4570 line = line.lstrip()
4569 4571 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4570 4572 if m:
4571 4573 # Headers need to use native strings.
4572 4574 key = pycompat.strurl(m.group(1))
4573 4575 value = pycompat.strurl(m.group(2))
4574 4576 headers[key] = value
4575 4577 continue
4576 4578
4577 4579 if line.startswith(b'BODYFILE '):
4578 4580 with open(line.split(b' ', 1), b'rb') as fh:
4579 4581 body = fh.read()
4580 4582 elif line.startswith(b'frame '):
4581 4583 frame = wireprotoframing.makeframefromhumanstring(
4582 4584 line[len(b'frame ') :]
4583 4585 )
4584 4586
4585 4587 frames.append(frame)
4586 4588 else:
4587 4589 raise error.Abort(
4588 4590 _(b'unknown argument to httprequest: %s') % line
4589 4591 )
4590 4592
4591 4593 url = path + httppath
4592 4594
4593 4595 if frames:
4594 4596 body = b''.join(bytes(f) for f in frames)
4595 4597
4596 4598 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4597 4599
4598 4600 # urllib.Request insists on using has_data() as a proxy for
4599 4601 # determining the request method. Override that to use our
4600 4602 # explicitly requested method.
4601 4603 req.get_method = lambda: pycompat.sysstr(method)
4602 4604
4603 4605 try:
4604 4606 res = opener.open(req)
4605 4607 body = res.read()
4606 4608 except util.urlerr.urlerror as e:
4607 4609 # read() method must be called, but only exists in Python 2
4608 4610 getattr(e, 'read', lambda: None)()
4609 4611 continue
4610 4612
4611 4613 ct = res.headers.get('Content-Type')
4612 4614 if ct == 'application/mercurial-cbor':
4613 4615 ui.write(
4614 4616 _(b'cbor> %s\n')
4615 4617 % stringutil.pprint(
4616 4618 cborutil.decodeall(body), bprefix=True, indent=2
4617 4619 )
4618 4620 )
4619 4621
4620 4622 elif action == b'close':
4621 4623 assert peer is not None
4622 4624 peer.close()
4623 4625 elif action == b'readavailable':
4624 4626 if not stdout or not stderr:
4625 4627 raise error.Abort(
4626 4628 _(b'readavailable not available on this peer')
4627 4629 )
4628 4630
4629 4631 stdin.close()
4630 4632 stdout.read()
4631 4633 stderr.read()
4632 4634
4633 4635 elif action == b'readline':
4634 4636 if not stdout:
4635 4637 raise error.Abort(_(b'readline not available on this peer'))
4636 4638 stdout.readline()
4637 4639 elif action == b'ereadline':
4638 4640 if not stderr:
4639 4641 raise error.Abort(_(b'ereadline not available on this peer'))
4640 4642 stderr.readline()
4641 4643 elif action.startswith(b'read '):
4642 4644 count = int(action.split(b' ', 1)[1])
4643 4645 if not stdout:
4644 4646 raise error.Abort(_(b'read not available on this peer'))
4645 4647 stdout.read(count)
4646 4648 elif action.startswith(b'eread '):
4647 4649 count = int(action.split(b' ', 1)[1])
4648 4650 if not stderr:
4649 4651 raise error.Abort(_(b'eread not available on this peer'))
4650 4652 stderr.read(count)
4651 4653 else:
4652 4654 raise error.Abort(_(b'unknown action: %s') % action)
4653 4655
4654 4656 if batchedcommands is not None:
4655 4657 raise error.Abort(_(b'unclosed "batchbegin" request'))
4656 4658
4657 4659 if peer:
4658 4660 peer.close()
4659 4661
4660 4662 if proc:
4661 4663 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now