##// END OF EJS Templates
debugdiscovery: document relevant config option...
marmoute -
r47560:13d97369 default
parent child Browse files
Show More
@@ -1,4781 +1,4810 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullid,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 wireprotov2peer,
94 94 )
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 )
102 102
103 103 from .revlogutils import (
104 104 deltas as deltautil,
105 105 nodemap,
106 106 sidedata,
107 107 )
108 108
109 109 release = lockmod.release
110 110
111 111 table = {}
112 112 table.update(strip.command._table)
113 113 command = registrar.command(table)
114 114
115 115
116 116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 117 def debugancestor(ui, repo, *args):
118 118 """find the ancestor revision of two revisions in a given index"""
119 119 if len(args) == 3:
120 120 index, rev1, rev2 = args
121 121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 122 lookup = r.lookup
123 123 elif len(args) == 2:
124 124 if not repo:
125 125 raise error.Abort(
126 126 _(b'there is no Mercurial repository here (.hg not found)')
127 127 )
128 128 rev1, rev2 = args
129 129 r = repo.changelog
130 130 lookup = repo.lookup
131 131 else:
132 132 raise error.Abort(_(b'either two or three arguments required'))
133 133 a = r.ancestor(lookup(rev1), lookup(rev2))
134 134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135 135
136 136
137 137 @command(b'debugantivirusrunning', [])
138 138 def debugantivirusrunning(ui, repo):
139 139 """attempt to trigger an antivirus scanner to see if one is active"""
140 140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 141 f.write(
142 142 util.b85decode(
143 143 # This is a base85-armored version of the EICAR test file. See
144 144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 147 )
148 148 )
149 149 # Give an AV engine time to scan the file.
150 150 time.sleep(2)
151 151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152 152
153 153
154 154 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 155 def debugapplystreamclonebundle(ui, repo, fname):
156 156 """apply a stream clone bundle file"""
157 157 f = hg.openpath(ui, fname)
158 158 gen = exchange.readbundle(ui, f, fname)
159 159 gen.apply(repo)
160 160
161 161
162 162 @command(
163 163 b'debugbuilddag',
164 164 [
165 165 (
166 166 b'm',
167 167 b'mergeable-file',
168 168 None,
169 169 _(b'add single file mergeable changes'),
170 170 ),
171 171 (
172 172 b'o',
173 173 b'overwritten-file',
174 174 None,
175 175 _(b'add single file all revs overwrite'),
176 176 ),
177 177 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 178 ],
179 179 _(b'[OPTION]... [TEXT]'),
180 180 )
181 181 def debugbuilddag(
182 182 ui,
183 183 repo,
184 184 text=None,
185 185 mergeable_file=False,
186 186 overwritten_file=False,
187 187 new_file=False,
188 188 ):
189 189 """builds a repo with a given DAG from scratch in the current empty repo
190 190
191 191 The description of the DAG is read from stdin if not given on the
192 192 command line.
193 193
194 194 Elements:
195 195
196 196 - "+n" is a linear run of n nodes based on the current default parent
197 197 - "." is a single node based on the current default parent
198 198 - "$" resets the default parent to null (implied at the start);
199 199 otherwise the default parent is always the last node created
200 200 - "<p" sets the default parent to the backref p
201 201 - "*p" is a fork at parent p, which is a backref
202 202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 203 - "/p2" is a merge of the preceding node and p2
204 204 - ":tag" defines a local tag for the preceding node
205 205 - "@branch" sets the named branch for subsequent nodes
206 206 - "#...\\n" is a comment up to the end of the line
207 207
208 208 Whitespace between the above elements is ignored.
209 209
210 210 A backref is either
211 211
212 212 - a number n, which references the node curr-n, where curr is the current
213 213 node, or
214 214 - the name of a local tag you placed earlier using ":tag", or
215 215 - empty to denote the default parent.
216 216
217 217 All string valued-elements are either strictly alphanumeric, or must
218 218 be enclosed in double quotes ("..."), with "\\" as escape character.
219 219 """
220 220
221 221 if text is None:
222 222 ui.status(_(b"reading DAG from stdin\n"))
223 223 text = ui.fin.read()
224 224
225 225 cl = repo.changelog
226 226 if len(cl) > 0:
227 227 raise error.Abort(_(b'repository is not empty'))
228 228
229 229 # determine number of revs in DAG
230 230 total = 0
231 231 for type, data in dagparser.parsedag(text):
232 232 if type == b'n':
233 233 total += 1
234 234
235 235 if mergeable_file:
236 236 linesperrev = 2
237 237 # make a file with k lines per rev
238 238 initialmergedlines = [
239 239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 240 ]
241 241 initialmergedlines.append(b"")
242 242
243 243 tags = []
244 244 progress = ui.makeprogress(
245 245 _(b'building'), unit=_(b'revisions'), total=total
246 246 )
247 247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 248 at = -1
249 249 atbranch = b'default'
250 250 nodeids = []
251 251 id = 0
252 252 progress.update(id)
253 253 for type, data in dagparser.parsedag(text):
254 254 if type == b'n':
255 255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 256 id, ps = data
257 257
258 258 files = []
259 259 filecontent = {}
260 260
261 261 p2 = None
262 262 if mergeable_file:
263 263 fn = b"mf"
264 264 p1 = repo[ps[0]]
265 265 if len(ps) > 1:
266 266 p2 = repo[ps[1]]
267 267 pa = p1.ancestor(p2)
268 268 base, local, other = [
269 269 x[fn].data() for x in (pa, p1, p2)
270 270 ]
271 271 m3 = simplemerge.Merge3Text(base, local, other)
272 272 ml = [l.strip() for l in m3.merge_lines()]
273 273 ml.append(b"")
274 274 elif at > 0:
275 275 ml = p1[fn].data().split(b"\n")
276 276 else:
277 277 ml = initialmergedlines
278 278 ml[id * linesperrev] += b" r%i" % id
279 279 mergedtext = b"\n".join(ml)
280 280 files.append(fn)
281 281 filecontent[fn] = mergedtext
282 282
283 283 if overwritten_file:
284 284 fn = b"of"
285 285 files.append(fn)
286 286 filecontent[fn] = b"r%i\n" % id
287 287
288 288 if new_file:
289 289 fn = b"nf%i" % id
290 290 files.append(fn)
291 291 filecontent[fn] = b"r%i\n" % id
292 292 if len(ps) > 1:
293 293 if not p2:
294 294 p2 = repo[ps[1]]
295 295 for fn in p2:
296 296 if fn.startswith(b"nf"):
297 297 files.append(fn)
298 298 filecontent[fn] = p2[fn].data()
299 299
300 300 def fctxfn(repo, cx, path):
301 301 if path in filecontent:
302 302 return context.memfilectx(
303 303 repo, cx, path, filecontent[path]
304 304 )
305 305 return None
306 306
307 307 if len(ps) == 0 or ps[0] < 0:
308 308 pars = [None, None]
309 309 elif len(ps) == 1:
310 310 pars = [nodeids[ps[0]], None]
311 311 else:
312 312 pars = [nodeids[p] for p in ps]
313 313 cx = context.memctx(
314 314 repo,
315 315 pars,
316 316 b"r%i" % id,
317 317 files,
318 318 fctxfn,
319 319 date=(id, 0),
320 320 user=b"debugbuilddag",
321 321 extra={b'branch': atbranch},
322 322 )
323 323 nodeid = repo.commitctx(cx)
324 324 nodeids.append(nodeid)
325 325 at = id
326 326 elif type == b'l':
327 327 id, name = data
328 328 ui.note((b'tag %s\n' % name))
329 329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 330 elif type == b'a':
331 331 ui.note((b'branch %s\n' % data))
332 332 atbranch = data
333 333 progress.update(id)
334 334
335 335 if tags:
336 336 repo.vfs.write(b"localtags", b"".join(tags))
337 337
338 338
339 339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 340 indent_string = b' ' * indent
341 341 if all:
342 342 ui.writenoi18n(
343 343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 344 % indent_string
345 345 )
346 346
347 347 def showchunks(named):
348 348 ui.write(b"\n%s%s\n" % (indent_string, named))
349 349 for deltadata in gen.deltaiter():
350 350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 351 ui.write(
352 352 b"%s%s %s %s %s %s %d\n"
353 353 % (
354 354 indent_string,
355 355 hex(node),
356 356 hex(p1),
357 357 hex(p2),
358 358 hex(cs),
359 359 hex(deltabase),
360 360 len(delta),
361 361 )
362 362 )
363 363
364 364 gen.changelogheader()
365 365 showchunks(b"changelog")
366 366 gen.manifestheader()
367 367 showchunks(b"manifest")
368 368 for chunkdata in iter(gen.filelogheader, {}):
369 369 fname = chunkdata[b'filename']
370 370 showchunks(fname)
371 371 else:
372 372 if isinstance(gen, bundle2.unbundle20):
373 373 raise error.Abort(_(b'use debugbundle2 for this file'))
374 374 gen.changelogheader()
375 375 for deltadata in gen.deltaiter():
376 376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378 378
379 379
380 380 def _debugobsmarkers(ui, part, indent=0, **opts):
381 381 """display version and markers contained in 'data'"""
382 382 opts = pycompat.byteskwargs(opts)
383 383 data = part.read()
384 384 indent_string = b' ' * indent
385 385 try:
386 386 version, markers = obsolete._readmarkers(data)
387 387 except error.UnknownVersion as exc:
388 388 msg = b"%sunsupported version: %s (%d bytes)\n"
389 389 msg %= indent_string, exc.version, len(data)
390 390 ui.write(msg)
391 391 else:
392 392 msg = b"%sversion: %d (%d bytes)\n"
393 393 msg %= indent_string, version, len(data)
394 394 ui.write(msg)
395 395 fm = ui.formatter(b'debugobsolete', opts)
396 396 for rawmarker in sorted(markers):
397 397 m = obsutil.marker(None, rawmarker)
398 398 fm.startitem()
399 399 fm.plain(indent_string)
400 400 cmdutil.showmarker(fm, m)
401 401 fm.end()
402 402
403 403
404 404 def _debugphaseheads(ui, data, indent=0):
405 405 """display version and markers contained in 'data'"""
406 406 indent_string = b' ' * indent
407 407 headsbyphase = phases.binarydecode(data)
408 408 for phase in phases.allphases:
409 409 for head in headsbyphase[phase]:
410 410 ui.write(indent_string)
411 411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412 412
413 413
414 414 def _quasirepr(thing):
415 415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 416 return b'{%s}' % (
417 417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 418 )
419 419 return pycompat.bytestr(repr(thing))
420 420
421 421
422 422 def _debugbundle2(ui, gen, all=None, **opts):
423 423 """lists the contents of a bundle2"""
424 424 if not isinstance(gen, bundle2.unbundle20):
425 425 raise error.Abort(_(b'not a bundle2 file'))
426 426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 427 parttypes = opts.get('part_type', [])
428 428 for part in gen.iterparts():
429 429 if parttypes and part.type not in parttypes:
430 430 continue
431 431 msg = b'%s -- %s (mandatory: %r)\n'
432 432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 433 if part.type == b'changegroup':
434 434 version = part.params.get(b'version', b'01')
435 435 cg = changegroup.getunbundler(version, part, b'UN')
436 436 if not ui.quiet:
437 437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 438 if part.type == b'obsmarkers':
439 439 if not ui.quiet:
440 440 _debugobsmarkers(ui, part, indent=4, **opts)
441 441 if part.type == b'phase-heads':
442 442 if not ui.quiet:
443 443 _debugphaseheads(ui, part, indent=4)
444 444
445 445
446 446 @command(
447 447 b'debugbundle',
448 448 [
449 449 (b'a', b'all', None, _(b'show all details')),
450 450 (b'', b'part-type', [], _(b'show only the named part type')),
451 451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 452 ],
453 453 _(b'FILE'),
454 454 norepo=True,
455 455 )
456 456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 457 """lists the contents of a bundle"""
458 458 with hg.openpath(ui, bundlepath) as f:
459 459 if spec:
460 460 spec = exchange.getbundlespec(ui, f)
461 461 ui.write(b'%s\n' % spec)
462 462 return
463 463
464 464 gen = exchange.readbundle(ui, f, bundlepath)
465 465 if isinstance(gen, bundle2.unbundle20):
466 466 return _debugbundle2(ui, gen, all=all, **opts)
467 467 _debugchangegroup(ui, gen, all=all, **opts)
468 468
469 469
470 470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 471 def debugcapabilities(ui, path, **opts):
472 472 """lists the capabilities of a remote peer"""
473 473 opts = pycompat.byteskwargs(opts)
474 474 peer = hg.peer(ui, opts, path)
475 475 try:
476 476 caps = peer.capabilities()
477 477 ui.writenoi18n(b'Main capabilities:\n')
478 478 for c in sorted(caps):
479 479 ui.write(b' %s\n' % c)
480 480 b2caps = bundle2.bundle2caps(peer)
481 481 if b2caps:
482 482 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 483 for key, values in sorted(pycompat.iteritems(b2caps)):
484 484 ui.write(b' %s\n' % key)
485 485 for v in values:
486 486 ui.write(b' %s\n' % v)
487 487 finally:
488 488 peer.close()
489 489
490 490
491 491 @command(
492 492 b'debugchangedfiles',
493 493 [
494 494 (
495 495 b'',
496 496 b'compute',
497 497 False,
498 498 b"compute information instead of reading it from storage",
499 499 ),
500 500 ],
501 501 b'REV',
502 502 )
503 503 def debugchangedfiles(ui, repo, rev, **opts):
504 504 """list the stored files changes for a revision"""
505 505 ctx = scmutil.revsingle(repo, rev, None)
506 506 files = None
507 507
508 508 if opts['compute']:
509 509 files = metadata.compute_all_files_changes(ctx)
510 510 else:
511 511 sd = repo.changelog.sidedata(ctx.rev())
512 512 files_block = sd.get(sidedata.SD_FILES)
513 513 if files_block is not None:
514 514 files = metadata.decode_files_sidedata(sd)
515 515 if files is not None:
516 516 for f in sorted(files.touched):
517 517 if f in files.added:
518 518 action = b"added"
519 519 elif f in files.removed:
520 520 action = b"removed"
521 521 elif f in files.merged:
522 522 action = b"merged"
523 523 elif f in files.salvaged:
524 524 action = b"salvaged"
525 525 else:
526 526 action = b"touched"
527 527
528 528 copy_parent = b""
529 529 copy_source = b""
530 530 if f in files.copied_from_p1:
531 531 copy_parent = b"p1"
532 532 copy_source = files.copied_from_p1[f]
533 533 elif f in files.copied_from_p2:
534 534 copy_parent = b"p2"
535 535 copy_source = files.copied_from_p2[f]
536 536
537 537 data = (action, copy_parent, f, copy_source)
538 538 template = b"%-8s %2s: %s, %s;\n"
539 539 ui.write(template % data)
540 540
541 541
542 542 @command(b'debugcheckstate', [], b'')
543 543 def debugcheckstate(ui, repo):
544 544 """validate the correctness of the current dirstate"""
545 545 parent1, parent2 = repo.dirstate.parents()
546 546 m1 = repo[parent1].manifest()
547 547 m2 = repo[parent2].manifest()
548 548 errors = 0
549 549 for f in repo.dirstate:
550 550 state = repo.dirstate[f]
551 551 if state in b"nr" and f not in m1:
552 552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 553 errors += 1
554 554 if state in b"a" and f in m1:
555 555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 556 errors += 1
557 557 if state in b"m" and f not in m1 and f not in m2:
558 558 ui.warn(
559 559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 560 )
561 561 errors += 1
562 562 for f in m1:
563 563 state = repo.dirstate[f]
564 564 if state not in b"nrm":
565 565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 566 errors += 1
567 567 if errors:
568 568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 569 raise error.Abort(errstr)
570 570
571 571
572 572 @command(
573 573 b'debugcolor',
574 574 [(b'', b'style', None, _(b'show all configured styles'))],
575 575 b'hg debugcolor',
576 576 )
577 577 def debugcolor(ui, repo, **opts):
578 578 """show available color, effects or style"""
579 579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 580 if opts.get('style'):
581 581 return _debugdisplaystyle(ui)
582 582 else:
583 583 return _debugdisplaycolor(ui)
584 584
585 585
586 586 def _debugdisplaycolor(ui):
587 587 ui = ui.copy()
588 588 ui._styles.clear()
589 589 for effect in color._activeeffects(ui).keys():
590 590 ui._styles[effect] = effect
591 591 if ui._terminfoparams:
592 592 for k, v in ui.configitems(b'color'):
593 593 if k.startswith(b'color.'):
594 594 ui._styles[k] = k[6:]
595 595 elif k.startswith(b'terminfo.'):
596 596 ui._styles[k] = k[9:]
597 597 ui.write(_(b'available colors:\n'))
598 598 # sort label with a '_' after the other to group '_background' entry.
599 599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 600 for colorname, label in items:
601 601 ui.write(b'%s\n' % colorname, label=label)
602 602
603 603
604 604 def _debugdisplaystyle(ui):
605 605 ui.write(_(b'available style:\n'))
606 606 if not ui._styles:
607 607 return
608 608 width = max(len(s) for s in ui._styles)
609 609 for label, effects in sorted(ui._styles.items()):
610 610 ui.write(b'%s' % label, label=label)
611 611 if effects:
612 612 # 50
613 613 ui.write(b': ')
614 614 ui.write(b' ' * (max(0, width - len(label))))
615 615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 616 ui.write(b'\n')
617 617
618 618
619 619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 620 def debugcreatestreamclonebundle(ui, repo, fname):
621 621 """create a stream clone bundle file
622 622
623 623 Stream bundles are special bundles that are essentially archives of
624 624 revlog files. They are commonly used for cloning very quickly.
625 625 """
626 626 # TODO we may want to turn this into an abort when this functionality
627 627 # is moved into `hg bundle`.
628 628 if phases.hassecret(repo):
629 629 ui.warn(
630 630 _(
631 631 b'(warning: stream clone bundle will contain secret '
632 632 b'revisions)\n'
633 633 )
634 634 )
635 635
636 636 requirements, gen = streamclone.generatebundlev1(repo)
637 637 changegroup.writechunks(ui, gen, fname)
638 638
639 639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 640
641 641
642 642 @command(
643 643 b'debugdag',
644 644 [
645 645 (b't', b'tags', None, _(b'use tags as labels')),
646 646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 647 (b'', b'dots', None, _(b'use dots for runs')),
648 648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 649 ],
650 650 _(b'[OPTION]... [FILE [REV]...]'),
651 651 optionalrepo=True,
652 652 )
653 653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 654 """format the changelog or an index DAG as a concise textual description
655 655
656 656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 657 revision numbers, they get labeled in the output as rN.
658 658
659 659 Otherwise, the changelog DAG of the current repo is emitted.
660 660 """
661 661 spaces = opts.get('spaces')
662 662 dots = opts.get('dots')
663 663 if file_:
664 664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 665 revs = {int(r) for r in revs}
666 666
667 667 def events():
668 668 for r in rlog:
669 669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 670 if r in revs:
671 671 yield b'l', (r, b"r%i" % r)
672 672
673 673 elif repo:
674 674 cl = repo.changelog
675 675 tags = opts.get('tags')
676 676 branches = opts.get('branches')
677 677 if tags:
678 678 labels = {}
679 679 for l, n in repo.tags().items():
680 680 labels.setdefault(cl.rev(n), []).append(l)
681 681
682 682 def events():
683 683 b = b"default"
684 684 for r in cl:
685 685 if branches:
686 686 newb = cl.read(cl.node(r))[5][b'branch']
687 687 if newb != b:
688 688 yield b'a', newb
689 689 b = newb
690 690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 691 if tags:
692 692 ls = labels.get(r)
693 693 if ls:
694 694 for l in ls:
695 695 yield b'l', (r, l)
696 696
697 697 else:
698 698 raise error.Abort(_(b'need repo for changelog dag'))
699 699
700 700 for line in dagparser.dagtextlines(
701 701 events(),
702 702 addspaces=spaces,
703 703 wraplabels=True,
704 704 wrapannotations=True,
705 705 wrapnonlinear=dots,
706 706 usedots=dots,
707 707 maxlinewidth=70,
708 708 ):
709 709 ui.write(line)
710 710 ui.write(b"\n")
711 711
712 712
713 713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 714 def debugdata(ui, repo, file_, rev=None, **opts):
715 715 """dump the contents of a data file revision"""
716 716 opts = pycompat.byteskwargs(opts)
717 717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 718 if rev is not None:
719 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 720 file_, rev = None, file_
721 721 elif rev is None:
722 722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 724 try:
725 725 ui.write(r.rawdata(r.lookup(rev)))
726 726 except KeyError:
727 727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 728
729 729
730 730 @command(
731 731 b'debugdate',
732 732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 733 _(b'[-e] DATE [RANGE]'),
734 734 norepo=True,
735 735 optionalrepo=True,
736 736 )
737 737 def debugdate(ui, date, range=None, **opts):
738 738 """parse and display a date"""
739 739 if opts["extended"]:
740 740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 741 else:
742 742 d = dateutil.parsedate(date)
743 743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 745 if range:
746 746 m = dateutil.matchdate(range)
747 747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 748
749 749
750 750 @command(
751 751 b'debugdeltachain',
752 752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 753 _(b'-c|-m|FILE'),
754 754 optionalrepo=True,
755 755 )
756 756 def debugdeltachain(ui, repo, file_=None, **opts):
757 757 """dump information about delta chains in a revlog
758 758
759 759 Output can be templatized. Available template keywords are:
760 760
761 761 :``rev``: revision number
762 762 :``chainid``: delta chain identifier (numbered by unique base)
763 763 :``chainlen``: delta chain length to this revision
764 764 :``prevrev``: previous revision in delta chain
765 765 :``deltatype``: role of delta / how it was computed
766 766 :``compsize``: compressed size of revision
767 767 :``uncompsize``: uncompressed size of revision
768 768 :``chainsize``: total size of compressed revisions in chain
769 769 :``chainratio``: total chain size divided by uncompressed revision size
770 770 (new delta chains typically start at ratio 2.00)
771 771 :``lindist``: linear distance from base revision in delta chain to end
772 772 of this revision
773 773 :``extradist``: total size of revisions not part of this delta chain from
774 774 base of delta chain to end of this revision; a measurement
775 775 of how much extra data we need to read/seek across to read
776 776 the delta chain for this revision
777 777 :``extraratio``: extradist divided by chainsize; another representation of
778 778 how much unrelated data is needed to load this delta chain
779 779
780 780 If the repository is configured to use the sparse read, additional keywords
781 781 are available:
782 782
783 783 :``readsize``: total size of data read from the disk for a revision
784 784 (sum of the sizes of all the blocks)
785 785 :``largestblock``: size of the largest block of data read from the disk
786 786 :``readdensity``: density of useful bytes in the data read from the disk
787 787 :``srchunks``: in how many data hunks the whole revision would be read
788 788
789 789 The sparse read can be enabled with experimental.sparse-read = True
790 790 """
791 791 opts = pycompat.byteskwargs(opts)
792 792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 793 index = r.index
794 794 start = r.start
795 795 length = r.length
796 796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
797 797 withsparseread = getattr(r, '_withsparseread', False)
798 798
799 799 def revinfo(rev):
800 800 e = index[rev]
801 801 compsize = e[1]
802 802 uncompsize = e[2]
803 803 chainsize = 0
804 804
805 805 if generaldelta:
806 806 if e[3] == e[5]:
807 807 deltatype = b'p1'
808 808 elif e[3] == e[6]:
809 809 deltatype = b'p2'
810 810 elif e[3] == rev - 1:
811 811 deltatype = b'prev'
812 812 elif e[3] == rev:
813 813 deltatype = b'base'
814 814 else:
815 815 deltatype = b'other'
816 816 else:
817 817 if e[3] == rev:
818 818 deltatype = b'base'
819 819 else:
820 820 deltatype = b'prev'
821 821
822 822 chain = r._deltachain(rev)[0]
823 823 for iterrev in chain:
824 824 e = index[iterrev]
825 825 chainsize += e[1]
826 826
827 827 return compsize, uncompsize, deltatype, chain, chainsize
828 828
829 829 fm = ui.formatter(b'debugdeltachain', opts)
830 830
831 831 fm.plain(
832 832 b' rev chain# chainlen prev delta '
833 833 b'size rawsize chainsize ratio lindist extradist '
834 834 b'extraratio'
835 835 )
836 836 if withsparseread:
837 837 fm.plain(b' readsize largestblk rddensity srchunks')
838 838 fm.plain(b'\n')
839 839
840 840 chainbases = {}
841 841 for rev in r:
842 842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 843 chainbase = chain[0]
844 844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 845 basestart = start(chainbase)
846 846 revstart = start(rev)
847 847 lineardist = revstart + comp - basestart
848 848 extradist = lineardist - chainsize
849 849 try:
850 850 prevrev = chain[-2]
851 851 except IndexError:
852 852 prevrev = -1
853 853
854 854 if uncomp != 0:
855 855 chainratio = float(chainsize) / float(uncomp)
856 856 else:
857 857 chainratio = chainsize
858 858
859 859 if chainsize != 0:
860 860 extraratio = float(extradist) / float(chainsize)
861 861 else:
862 862 extraratio = extradist
863 863
864 864 fm.startitem()
865 865 fm.write(
866 866 b'rev chainid chainlen prevrev deltatype compsize '
867 867 b'uncompsize chainsize chainratio lindist extradist '
868 868 b'extraratio',
869 869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 870 rev,
871 871 chainid,
872 872 len(chain),
873 873 prevrev,
874 874 deltatype,
875 875 comp,
876 876 uncomp,
877 877 chainsize,
878 878 chainratio,
879 879 lineardist,
880 880 extradist,
881 881 extraratio,
882 882 rev=rev,
883 883 chainid=chainid,
884 884 chainlen=len(chain),
885 885 prevrev=prevrev,
886 886 deltatype=deltatype,
887 887 compsize=comp,
888 888 uncompsize=uncomp,
889 889 chainsize=chainsize,
890 890 chainratio=chainratio,
891 891 lindist=lineardist,
892 892 extradist=extradist,
893 893 extraratio=extraratio,
894 894 )
895 895 if withsparseread:
896 896 readsize = 0
897 897 largestblock = 0
898 898 srchunks = 0
899 899
900 900 for revschunk in deltautil.slicechunk(r, chain):
901 901 srchunks += 1
902 902 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 903 blksize = blkend - start(revschunk[0])
904 904
905 905 readsize += blksize
906 906 if largestblock < blksize:
907 907 largestblock = blksize
908 908
909 909 if readsize:
910 910 readdensity = float(chainsize) / float(readsize)
911 911 else:
912 912 readdensity = 1
913 913
914 914 fm.write(
915 915 b'readsize largestblock readdensity srchunks',
916 916 b' %10d %10d %9.5f %8d',
917 917 readsize,
918 918 largestblock,
919 919 readdensity,
920 920 srchunks,
921 921 readsize=readsize,
922 922 largestblock=largestblock,
923 923 readdensity=readdensity,
924 924 srchunks=srchunks,
925 925 )
926 926
927 927 fm.plain(b'\n')
928 928
929 929 fm.end()
930 930
931 931
932 932 @command(
933 933 b'debugdirstate|debugstate',
934 934 [
935 935 (
936 936 b'',
937 937 b'nodates',
938 938 None,
939 939 _(b'do not display the saved mtime (DEPRECATED)'),
940 940 ),
941 941 (b'', b'dates', True, _(b'display the saved mtime')),
942 942 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 943 ],
944 944 _(b'[OPTION]...'),
945 945 )
946 946 def debugstate(ui, repo, **opts):
947 947 """show the contents of the current dirstate"""
948 948
949 949 nodates = not opts['dates']
950 950 if opts.get('nodates') is not None:
951 951 nodates = True
952 952 datesort = opts.get('datesort')
953 953
954 954 if datesort:
955 955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 956 else:
957 957 keyfunc = None # sort by filename
958 958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 959 if ent[3] == -1:
960 960 timestr = b'unset '
961 961 elif nodates:
962 962 timestr = b'set '
963 963 else:
964 964 timestr = time.strftime(
965 965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 966 )
967 967 timestr = encoding.strtolocal(timestr)
968 968 if ent[1] & 0o20000:
969 969 mode = b'lnk'
970 970 else:
971 971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 973 for f in repo.dirstate.copies():
974 974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975 975
976 976
977 977 @command(
978 978 b'debugdiscovery',
979 979 [
980 980 (b'', b'old', None, _(b'use old-style discovery')),
981 981 (
982 982 b'',
983 983 b'nonheads',
984 984 None,
985 985 _(b'use old-style discovery with non-heads included'),
986 986 ),
987 987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 989 (
990 990 b'',
991 991 b'local-as-revs',
992 992 b"",
993 993 b'treat local has having these revisions only',
994 994 ),
995 995 (
996 996 b'',
997 997 b'remote-as-revs',
998 998 b"",
999 999 b'use local as remote, with only these these revisions',
1000 1000 ),
1001 1001 ]
1002 1002 + cmdutil.remoteopts
1003 1003 + cmdutil.formatteropts,
1004 1004 _(b'[--rev REV] [OTHER]'),
1005 1005 )
1006 1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 1007 """runs the changeset discovery protocol in isolation
1008 1008
1009 1009 The local peer can be "replaced" by a subset of the local repository by
1010 1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 1011 be "replaced" by a subset of the local repository using the
1012 1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 1013 discovery situation.
1014
1015 The following developer oriented config are relevant for people playing with this command:
1016
1017 * devel.discovery.exchange-heads=True
1018
1019 If False, the discovery will not start with
1020 remote head fetching and local head querying.
1021
1022 * devel.discovery.grow-sample=True
1023
1024 If False, the sample size used in set discovery will not be increased
1025 through the process
1026
1027 * devel.discovery.grow-sample.rate=1.05
1028
1029 the rate at which the sample grow
1030
1031 * devel.discovery.randomize=True
1032
1033 If andom sampling during discovery are deterministic. It is meant for
1034 integration tests.
1035
1036 * devel.discovery.sample-size=200
1037
1038 Control the initial size of the discovery sample
1039
1040 * devel.discovery.sample-size.initial=100
1041
1042 Control the initial size of the discovery for initial change
1014 1043 """
1015 1044 opts = pycompat.byteskwargs(opts)
1016 1045 unfi = repo.unfiltered()
1017 1046
1018 1047 # setup potential extra filtering
1019 1048 local_revs = opts[b"local_as_revs"]
1020 1049 remote_revs = opts[b"remote_as_revs"]
1021 1050
1022 1051 # make sure tests are repeatable
1023 1052 random.seed(int(opts[b'seed']))
1024 1053
1025 1054 if not remote_revs:
1026 1055
1027 1056 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1028 1057 remote = hg.peer(repo, opts, remoteurl)
1029 1058 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1030 1059 else:
1031 1060 branches = (None, [])
1032 1061 remote_filtered_revs = scmutil.revrange(
1033 1062 unfi, [b"not (::(%s))" % remote_revs]
1034 1063 )
1035 1064 remote_filtered_revs = frozenset(remote_filtered_revs)
1036 1065
1037 1066 def remote_func(x):
1038 1067 return remote_filtered_revs
1039 1068
1040 1069 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1041 1070
1042 1071 remote = repo.peer()
1043 1072 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1044 1073
1045 1074 if local_revs:
1046 1075 local_filtered_revs = scmutil.revrange(
1047 1076 unfi, [b"not (::(%s))" % local_revs]
1048 1077 )
1049 1078 local_filtered_revs = frozenset(local_filtered_revs)
1050 1079
1051 1080 def local_func(x):
1052 1081 return local_filtered_revs
1053 1082
1054 1083 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1055 1084 repo = repo.filtered(b'debug-discovery-local-filter')
1056 1085
1057 1086 data = {}
1058 1087 if opts.get(b'old'):
1059 1088
1060 1089 def doit(pushedrevs, remoteheads, remote=remote):
1061 1090 if not util.safehasattr(remote, b'branches'):
1062 1091 # enable in-client legacy support
1063 1092 remote = localrepo.locallegacypeer(remote.local())
1064 1093 common, _in, hds = treediscovery.findcommonincoming(
1065 1094 repo, remote, force=True, audit=data
1066 1095 )
1067 1096 common = set(common)
1068 1097 if not opts.get(b'nonheads'):
1069 1098 ui.writenoi18n(
1070 1099 b"unpruned common: %s\n"
1071 1100 % b" ".join(sorted(short(n) for n in common))
1072 1101 )
1073 1102
1074 1103 clnode = repo.changelog.node
1075 1104 common = repo.revs(b'heads(::%ln)', common)
1076 1105 common = {clnode(r) for r in common}
1077 1106 return common, hds
1078 1107
1079 1108 else:
1080 1109
1081 1110 def doit(pushedrevs, remoteheads, remote=remote):
1082 1111 nodes = None
1083 1112 if pushedrevs:
1084 1113 revs = scmutil.revrange(repo, pushedrevs)
1085 1114 nodes = [repo[r].node() for r in revs]
1086 1115 common, any, hds = setdiscovery.findcommonheads(
1087 1116 ui, repo, remote, ancestorsof=nodes, audit=data
1088 1117 )
1089 1118 return common, hds
1090 1119
1091 1120 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1092 1121 localrevs = opts[b'rev']
1093 1122
1094 1123 fm = ui.formatter(b'debugdiscovery', opts)
1095 1124 if fm.strict_format:
1096 1125
1097 1126 @contextlib.contextmanager
1098 1127 def may_capture_output():
1099 1128 ui.pushbuffer()
1100 1129 yield
1101 1130 data[b'output'] = ui.popbuffer()
1102 1131
1103 1132 else:
1104 1133 may_capture_output = util.nullcontextmanager
1105 1134 with may_capture_output():
1106 1135 with util.timedcm('debug-discovery') as t:
1107 1136 common, hds = doit(localrevs, remoterevs)
1108 1137
1109 1138 # compute all statistics
1110 1139 heads_common = set(common)
1111 1140 heads_remote = set(hds)
1112 1141 heads_local = set(repo.heads())
1113 1142 # note: they cannot be a local or remote head that is in common and not
1114 1143 # itself a head of common.
1115 1144 heads_common_local = heads_common & heads_local
1116 1145 heads_common_remote = heads_common & heads_remote
1117 1146 heads_common_both = heads_common & heads_remote & heads_local
1118 1147
1119 1148 all = repo.revs(b'all()')
1120 1149 common = repo.revs(b'::%ln', common)
1121 1150 roots_common = repo.revs(b'roots(::%ld)', common)
1122 1151 missing = repo.revs(b'not ::%ld', common)
1123 1152 heads_missing = repo.revs(b'heads(%ld)', missing)
1124 1153 roots_missing = repo.revs(b'roots(%ld)', missing)
1125 1154 assert len(common) + len(missing) == len(all)
1126 1155
1127 1156 initial_undecided = repo.revs(
1128 1157 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1129 1158 )
1130 1159 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1131 1160 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1132 1161 common_initial_undecided = initial_undecided & common
1133 1162 missing_initial_undecided = initial_undecided & missing
1134 1163
1135 1164 data[b'elapsed'] = t.elapsed
1136 1165 data[b'nb-common-heads'] = len(heads_common)
1137 1166 data[b'nb-common-heads-local'] = len(heads_common_local)
1138 1167 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1139 1168 data[b'nb-common-heads-both'] = len(heads_common_both)
1140 1169 data[b'nb-common-roots'] = len(roots_common)
1141 1170 data[b'nb-head-local'] = len(heads_local)
1142 1171 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1143 1172 data[b'nb-head-remote'] = len(heads_remote)
1144 1173 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1145 1174 heads_common_remote
1146 1175 )
1147 1176 data[b'nb-revs'] = len(all)
1148 1177 data[b'nb-revs-common'] = len(common)
1149 1178 data[b'nb-revs-missing'] = len(missing)
1150 1179 data[b'nb-missing-heads'] = len(heads_missing)
1151 1180 data[b'nb-missing-roots'] = len(roots_missing)
1152 1181 data[b'nb-ini_und'] = len(initial_undecided)
1153 1182 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1154 1183 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1155 1184 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1156 1185 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1157 1186
1158 1187 fm.startitem()
1159 1188 fm.data(**pycompat.strkwargs(data))
1160 1189 # display discovery summary
1161 1190 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1162 1191 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1163 1192 fm.plain(b"heads summary:\n")
1164 1193 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1165 1194 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1166 1195 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1167 1196 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1168 1197 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1169 1198 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1170 1199 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1171 1200 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1172 1201 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1173 1202 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1174 1203 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1175 1204 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1176 1205 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1177 1206 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1178 1207 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1179 1208 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1180 1209 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1181 1210 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1182 1211 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1183 1212 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1184 1213 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1185 1214 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1186 1215
1187 1216 if ui.verbose:
1188 1217 fm.plain(
1189 1218 b"common heads: %s\n"
1190 1219 % b" ".join(sorted(short(n) for n in heads_common))
1191 1220 )
1192 1221 fm.end()
1193 1222
1194 1223
1195 1224 _chunksize = 4 << 10
1196 1225
1197 1226
1198 1227 @command(
1199 1228 b'debugdownload',
1200 1229 [
1201 1230 (b'o', b'output', b'', _(b'path')),
1202 1231 ],
1203 1232 optionalrepo=True,
1204 1233 )
1205 1234 def debugdownload(ui, repo, url, output=None, **opts):
1206 1235 """download a resource using Mercurial logic and config"""
1207 1236 fh = urlmod.open(ui, url, output)
1208 1237
1209 1238 dest = ui
1210 1239 if output:
1211 1240 dest = open(output, b"wb", _chunksize)
1212 1241 try:
1213 1242 data = fh.read(_chunksize)
1214 1243 while data:
1215 1244 dest.write(data)
1216 1245 data = fh.read(_chunksize)
1217 1246 finally:
1218 1247 if output:
1219 1248 dest.close()
1220 1249
1221 1250
1222 1251 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1223 1252 def debugextensions(ui, repo, **opts):
1224 1253 '''show information about active extensions'''
1225 1254 opts = pycompat.byteskwargs(opts)
1226 1255 exts = extensions.extensions(ui)
1227 1256 hgver = util.version()
1228 1257 fm = ui.formatter(b'debugextensions', opts)
1229 1258 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1230 1259 isinternal = extensions.ismoduleinternal(extmod)
1231 1260 extsource = None
1232 1261
1233 1262 if util.safehasattr(extmod, '__file__'):
1234 1263 extsource = pycompat.fsencode(extmod.__file__)
1235 1264 elif getattr(sys, 'oxidized', False):
1236 1265 extsource = pycompat.sysexecutable
1237 1266 if isinternal:
1238 1267 exttestedwith = [] # never expose magic string to users
1239 1268 else:
1240 1269 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1241 1270 extbuglink = getattr(extmod, 'buglink', None)
1242 1271
1243 1272 fm.startitem()
1244 1273
1245 1274 if ui.quiet or ui.verbose:
1246 1275 fm.write(b'name', b'%s\n', extname)
1247 1276 else:
1248 1277 fm.write(b'name', b'%s', extname)
1249 1278 if isinternal or hgver in exttestedwith:
1250 1279 fm.plain(b'\n')
1251 1280 elif not exttestedwith:
1252 1281 fm.plain(_(b' (untested!)\n'))
1253 1282 else:
1254 1283 lasttestedversion = exttestedwith[-1]
1255 1284 fm.plain(b' (%s!)\n' % lasttestedversion)
1256 1285
1257 1286 fm.condwrite(
1258 1287 ui.verbose and extsource,
1259 1288 b'source',
1260 1289 _(b' location: %s\n'),
1261 1290 extsource or b"",
1262 1291 )
1263 1292
1264 1293 if ui.verbose:
1265 1294 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1266 1295 fm.data(bundled=isinternal)
1267 1296
1268 1297 fm.condwrite(
1269 1298 ui.verbose and exttestedwith,
1270 1299 b'testedwith',
1271 1300 _(b' tested with: %s\n'),
1272 1301 fm.formatlist(exttestedwith, name=b'ver'),
1273 1302 )
1274 1303
1275 1304 fm.condwrite(
1276 1305 ui.verbose and extbuglink,
1277 1306 b'buglink',
1278 1307 _(b' bug reporting: %s\n'),
1279 1308 extbuglink or b"",
1280 1309 )
1281 1310
1282 1311 fm.end()
1283 1312
1284 1313
1285 1314 @command(
1286 1315 b'debugfileset',
1287 1316 [
1288 1317 (
1289 1318 b'r',
1290 1319 b'rev',
1291 1320 b'',
1292 1321 _(b'apply the filespec on this revision'),
1293 1322 _(b'REV'),
1294 1323 ),
1295 1324 (
1296 1325 b'',
1297 1326 b'all-files',
1298 1327 False,
1299 1328 _(b'test files from all revisions and working directory'),
1300 1329 ),
1301 1330 (
1302 1331 b's',
1303 1332 b'show-matcher',
1304 1333 None,
1305 1334 _(b'print internal representation of matcher'),
1306 1335 ),
1307 1336 (
1308 1337 b'p',
1309 1338 b'show-stage',
1310 1339 [],
1311 1340 _(b'print parsed tree at the given stage'),
1312 1341 _(b'NAME'),
1313 1342 ),
1314 1343 ],
1315 1344 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1316 1345 )
1317 1346 def debugfileset(ui, repo, expr, **opts):
1318 1347 '''parse and apply a fileset specification'''
1319 1348 from . import fileset
1320 1349
1321 1350 fileset.symbols # force import of fileset so we have predicates to optimize
1322 1351 opts = pycompat.byteskwargs(opts)
1323 1352 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1324 1353
1325 1354 stages = [
1326 1355 (b'parsed', pycompat.identity),
1327 1356 (b'analyzed', filesetlang.analyze),
1328 1357 (b'optimized', filesetlang.optimize),
1329 1358 ]
1330 1359 stagenames = {n for n, f in stages}
1331 1360
1332 1361 showalways = set()
1333 1362 if ui.verbose and not opts[b'show_stage']:
1334 1363 # show parsed tree by --verbose (deprecated)
1335 1364 showalways.add(b'parsed')
1336 1365 if opts[b'show_stage'] == [b'all']:
1337 1366 showalways.update(stagenames)
1338 1367 else:
1339 1368 for n in opts[b'show_stage']:
1340 1369 if n not in stagenames:
1341 1370 raise error.Abort(_(b'invalid stage name: %s') % n)
1342 1371 showalways.update(opts[b'show_stage'])
1343 1372
1344 1373 tree = filesetlang.parse(expr)
1345 1374 for n, f in stages:
1346 1375 tree = f(tree)
1347 1376 if n in showalways:
1348 1377 if opts[b'show_stage'] or n != b'parsed':
1349 1378 ui.write(b"* %s:\n" % n)
1350 1379 ui.write(filesetlang.prettyformat(tree), b"\n")
1351 1380
1352 1381 files = set()
1353 1382 if opts[b'all_files']:
1354 1383 for r in repo:
1355 1384 c = repo[r]
1356 1385 files.update(c.files())
1357 1386 files.update(c.substate)
1358 1387 if opts[b'all_files'] or ctx.rev() is None:
1359 1388 wctx = repo[None]
1360 1389 files.update(
1361 1390 repo.dirstate.walk(
1362 1391 scmutil.matchall(repo),
1363 1392 subrepos=list(wctx.substate),
1364 1393 unknown=True,
1365 1394 ignored=True,
1366 1395 )
1367 1396 )
1368 1397 files.update(wctx.substate)
1369 1398 else:
1370 1399 files.update(ctx.files())
1371 1400 files.update(ctx.substate)
1372 1401
1373 1402 m = ctx.matchfileset(repo.getcwd(), expr)
1374 1403 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1375 1404 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1376 1405 for f in sorted(files):
1377 1406 if not m(f):
1378 1407 continue
1379 1408 ui.write(b"%s\n" % f)
1380 1409
1381 1410
1382 1411 @command(b'debugformat', [] + cmdutil.formatteropts)
1383 1412 def debugformat(ui, repo, **opts):
1384 1413 """display format information about the current repository
1385 1414
1386 1415 Use --verbose to get extra information about current config value and
1387 1416 Mercurial default."""
1388 1417 opts = pycompat.byteskwargs(opts)
1389 1418 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1390 1419 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1391 1420
1392 1421 def makeformatname(name):
1393 1422 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1394 1423
1395 1424 fm = ui.formatter(b'debugformat', opts)
1396 1425 if fm.isplain():
1397 1426
1398 1427 def formatvalue(value):
1399 1428 if util.safehasattr(value, b'startswith'):
1400 1429 return value
1401 1430 if value:
1402 1431 return b'yes'
1403 1432 else:
1404 1433 return b'no'
1405 1434
1406 1435 else:
1407 1436 formatvalue = pycompat.identity
1408 1437
1409 1438 fm.plain(b'format-variant')
1410 1439 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1411 1440 fm.plain(b' repo')
1412 1441 if ui.verbose:
1413 1442 fm.plain(b' config default')
1414 1443 fm.plain(b'\n')
1415 1444 for fv in upgrade.allformatvariant:
1416 1445 fm.startitem()
1417 1446 repovalue = fv.fromrepo(repo)
1418 1447 configvalue = fv.fromconfig(repo)
1419 1448
1420 1449 if repovalue != configvalue:
1421 1450 namelabel = b'formatvariant.name.mismatchconfig'
1422 1451 repolabel = b'formatvariant.repo.mismatchconfig'
1423 1452 elif repovalue != fv.default:
1424 1453 namelabel = b'formatvariant.name.mismatchdefault'
1425 1454 repolabel = b'formatvariant.repo.mismatchdefault'
1426 1455 else:
1427 1456 namelabel = b'formatvariant.name.uptodate'
1428 1457 repolabel = b'formatvariant.repo.uptodate'
1429 1458
1430 1459 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1431 1460 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1432 1461 if fv.default != configvalue:
1433 1462 configlabel = b'formatvariant.config.special'
1434 1463 else:
1435 1464 configlabel = b'formatvariant.config.default'
1436 1465 fm.condwrite(
1437 1466 ui.verbose,
1438 1467 b'config',
1439 1468 b' %6s',
1440 1469 formatvalue(configvalue),
1441 1470 label=configlabel,
1442 1471 )
1443 1472 fm.condwrite(
1444 1473 ui.verbose,
1445 1474 b'default',
1446 1475 b' %7s',
1447 1476 formatvalue(fv.default),
1448 1477 label=b'formatvariant.default',
1449 1478 )
1450 1479 fm.plain(b'\n')
1451 1480 fm.end()
1452 1481
1453 1482
1454 1483 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1455 1484 def debugfsinfo(ui, path=b"."):
1456 1485 """show information detected about current filesystem"""
1457 1486 ui.writenoi18n(b'path: %s\n' % path)
1458 1487 ui.writenoi18n(
1459 1488 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1460 1489 )
1461 1490 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1462 1491 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1463 1492 ui.writenoi18n(
1464 1493 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1465 1494 )
1466 1495 ui.writenoi18n(
1467 1496 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1468 1497 )
1469 1498 casesensitive = b'(unknown)'
1470 1499 try:
1471 1500 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1472 1501 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1473 1502 except OSError:
1474 1503 pass
1475 1504 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1476 1505
1477 1506
1478 1507 @command(
1479 1508 b'debuggetbundle',
1480 1509 [
1481 1510 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1482 1511 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1483 1512 (
1484 1513 b't',
1485 1514 b'type',
1486 1515 b'bzip2',
1487 1516 _(b'bundle compression type to use'),
1488 1517 _(b'TYPE'),
1489 1518 ),
1490 1519 ],
1491 1520 _(b'REPO FILE [-H|-C ID]...'),
1492 1521 norepo=True,
1493 1522 )
1494 1523 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1495 1524 """retrieves a bundle from a repo
1496 1525
1497 1526 Every ID must be a full-length hex node id string. Saves the bundle to the
1498 1527 given file.
1499 1528 """
1500 1529 opts = pycompat.byteskwargs(opts)
1501 1530 repo = hg.peer(ui, opts, repopath)
1502 1531 if not repo.capable(b'getbundle'):
1503 1532 raise error.Abort(b"getbundle() not supported by target repository")
1504 1533 args = {}
1505 1534 if common:
1506 1535 args['common'] = [bin(s) for s in common]
1507 1536 if head:
1508 1537 args['heads'] = [bin(s) for s in head]
1509 1538 # TODO: get desired bundlecaps from command line.
1510 1539 args['bundlecaps'] = None
1511 1540 bundle = repo.getbundle(b'debug', **args)
1512 1541
1513 1542 bundletype = opts.get(b'type', b'bzip2').lower()
1514 1543 btypes = {
1515 1544 b'none': b'HG10UN',
1516 1545 b'bzip2': b'HG10BZ',
1517 1546 b'gzip': b'HG10GZ',
1518 1547 b'bundle2': b'HG20',
1519 1548 }
1520 1549 bundletype = btypes.get(bundletype)
1521 1550 if bundletype not in bundle2.bundletypes:
1522 1551 raise error.Abort(_(b'unknown bundle type specified with --type'))
1523 1552 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1524 1553
1525 1554
1526 1555 @command(b'debugignore', [], b'[FILE]')
1527 1556 def debugignore(ui, repo, *files, **opts):
1528 1557 """display the combined ignore pattern and information about ignored files
1529 1558
1530 1559 With no argument display the combined ignore pattern.
1531 1560
1532 1561 Given space separated file names, shows if the given file is ignored and
1533 1562 if so, show the ignore rule (file and line number) that matched it.
1534 1563 """
1535 1564 ignore = repo.dirstate._ignore
1536 1565 if not files:
1537 1566 # Show all the patterns
1538 1567 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1539 1568 else:
1540 1569 m = scmutil.match(repo[None], pats=files)
1541 1570 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1542 1571 for f in m.files():
1543 1572 nf = util.normpath(f)
1544 1573 ignored = None
1545 1574 ignoredata = None
1546 1575 if nf != b'.':
1547 1576 if ignore(nf):
1548 1577 ignored = nf
1549 1578 ignoredata = repo.dirstate._ignorefileandline(nf)
1550 1579 else:
1551 1580 for p in pathutil.finddirs(nf):
1552 1581 if ignore(p):
1553 1582 ignored = p
1554 1583 ignoredata = repo.dirstate._ignorefileandline(p)
1555 1584 break
1556 1585 if ignored:
1557 1586 if ignored == nf:
1558 1587 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1559 1588 else:
1560 1589 ui.write(
1561 1590 _(
1562 1591 b"%s is ignored because of "
1563 1592 b"containing directory %s\n"
1564 1593 )
1565 1594 % (uipathfn(f), ignored)
1566 1595 )
1567 1596 ignorefile, lineno, line = ignoredata
1568 1597 ui.write(
1569 1598 _(b"(ignore rule in %s, line %d: '%s')\n")
1570 1599 % (ignorefile, lineno, line)
1571 1600 )
1572 1601 else:
1573 1602 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1574 1603
1575 1604
1576 1605 @command(
1577 1606 b'debugindex',
1578 1607 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1579 1608 _(b'-c|-m|FILE'),
1580 1609 )
1581 1610 def debugindex(ui, repo, file_=None, **opts):
1582 1611 """dump index data for a storage primitive"""
1583 1612 opts = pycompat.byteskwargs(opts)
1584 1613 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1585 1614
1586 1615 if ui.debugflag:
1587 1616 shortfn = hex
1588 1617 else:
1589 1618 shortfn = short
1590 1619
1591 1620 idlen = 12
1592 1621 for i in store:
1593 1622 idlen = len(shortfn(store.node(i)))
1594 1623 break
1595 1624
1596 1625 fm = ui.formatter(b'debugindex', opts)
1597 1626 fm.plain(
1598 1627 b' rev linkrev %s %s p2\n'
1599 1628 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1600 1629 )
1601 1630
1602 1631 for rev in store:
1603 1632 node = store.node(rev)
1604 1633 parents = store.parents(node)
1605 1634
1606 1635 fm.startitem()
1607 1636 fm.write(b'rev', b'%6d ', rev)
1608 1637 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1609 1638 fm.write(b'node', b'%s ', shortfn(node))
1610 1639 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1611 1640 fm.write(b'p2', b'%s', shortfn(parents[1]))
1612 1641 fm.plain(b'\n')
1613 1642
1614 1643 fm.end()
1615 1644
1616 1645
1617 1646 @command(
1618 1647 b'debugindexdot',
1619 1648 cmdutil.debugrevlogopts,
1620 1649 _(b'-c|-m|FILE'),
1621 1650 optionalrepo=True,
1622 1651 )
1623 1652 def debugindexdot(ui, repo, file_=None, **opts):
1624 1653 """dump an index DAG as a graphviz dot file"""
1625 1654 opts = pycompat.byteskwargs(opts)
1626 1655 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1627 1656 ui.writenoi18n(b"digraph G {\n")
1628 1657 for i in r:
1629 1658 node = r.node(i)
1630 1659 pp = r.parents(node)
1631 1660 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1632 1661 if pp[1] != nullid:
1633 1662 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1634 1663 ui.write(b"}\n")
1635 1664
1636 1665
1637 1666 @command(b'debugindexstats', [])
1638 1667 def debugindexstats(ui, repo):
1639 1668 """show stats related to the changelog index"""
1640 1669 repo.changelog.shortest(nullid, 1)
1641 1670 index = repo.changelog.index
1642 1671 if not util.safehasattr(index, b'stats'):
1643 1672 raise error.Abort(_(b'debugindexstats only works with native code'))
1644 1673 for k, v in sorted(index.stats().items()):
1645 1674 ui.write(b'%s: %d\n' % (k, v))
1646 1675
1647 1676
1648 1677 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1649 1678 def debuginstall(ui, **opts):
1650 1679 """test Mercurial installation
1651 1680
1652 1681 Returns 0 on success.
1653 1682 """
1654 1683 opts = pycompat.byteskwargs(opts)
1655 1684
1656 1685 problems = 0
1657 1686
1658 1687 fm = ui.formatter(b'debuginstall', opts)
1659 1688 fm.startitem()
1660 1689
1661 1690 # encoding might be unknown or wrong. don't translate these messages.
1662 1691 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1663 1692 err = None
1664 1693 try:
1665 1694 codecs.lookup(pycompat.sysstr(encoding.encoding))
1666 1695 except LookupError as inst:
1667 1696 err = stringutil.forcebytestr(inst)
1668 1697 problems += 1
1669 1698 fm.condwrite(
1670 1699 err,
1671 1700 b'encodingerror',
1672 1701 b" %s\n (check that your locale is properly set)\n",
1673 1702 err,
1674 1703 )
1675 1704
1676 1705 # Python
1677 1706 pythonlib = None
1678 1707 if util.safehasattr(os, '__file__'):
1679 1708 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1680 1709 elif getattr(sys, 'oxidized', False):
1681 1710 pythonlib = pycompat.sysexecutable
1682 1711
1683 1712 fm.write(
1684 1713 b'pythonexe',
1685 1714 _(b"checking Python executable (%s)\n"),
1686 1715 pycompat.sysexecutable or _(b"unknown"),
1687 1716 )
1688 1717 fm.write(
1689 1718 b'pythonimplementation',
1690 1719 _(b"checking Python implementation (%s)\n"),
1691 1720 pycompat.sysbytes(platform.python_implementation()),
1692 1721 )
1693 1722 fm.write(
1694 1723 b'pythonver',
1695 1724 _(b"checking Python version (%s)\n"),
1696 1725 (b"%d.%d.%d" % sys.version_info[:3]),
1697 1726 )
1698 1727 fm.write(
1699 1728 b'pythonlib',
1700 1729 _(b"checking Python lib (%s)...\n"),
1701 1730 pythonlib or _(b"unknown"),
1702 1731 )
1703 1732
1704 1733 try:
1705 1734 from . import rustext # pytype: disable=import-error
1706 1735
1707 1736 rustext.__doc__ # trigger lazy import
1708 1737 except ImportError:
1709 1738 rustext = None
1710 1739
1711 1740 security = set(sslutil.supportedprotocols)
1712 1741 if sslutil.hassni:
1713 1742 security.add(b'sni')
1714 1743
1715 1744 fm.write(
1716 1745 b'pythonsecurity',
1717 1746 _(b"checking Python security support (%s)\n"),
1718 1747 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1719 1748 )
1720 1749
1721 1750 # These are warnings, not errors. So don't increment problem count. This
1722 1751 # may change in the future.
1723 1752 if b'tls1.2' not in security:
1724 1753 fm.plain(
1725 1754 _(
1726 1755 b' TLS 1.2 not supported by Python install; '
1727 1756 b'network connections lack modern security\n'
1728 1757 )
1729 1758 )
1730 1759 if b'sni' not in security:
1731 1760 fm.plain(
1732 1761 _(
1733 1762 b' SNI not supported by Python install; may have '
1734 1763 b'connectivity issues with some servers\n'
1735 1764 )
1736 1765 )
1737 1766
1738 1767 fm.plain(
1739 1768 _(
1740 1769 b"checking Rust extensions (%s)\n"
1741 1770 % (b'missing' if rustext is None else b'installed')
1742 1771 ),
1743 1772 )
1744 1773
1745 1774 # TODO print CA cert info
1746 1775
1747 1776 # hg version
1748 1777 hgver = util.version()
1749 1778 fm.write(
1750 1779 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1751 1780 )
1752 1781 fm.write(
1753 1782 b'hgverextra',
1754 1783 _(b"checking Mercurial custom build (%s)\n"),
1755 1784 b'+'.join(hgver.split(b'+')[1:]),
1756 1785 )
1757 1786
1758 1787 # compiled modules
1759 1788 hgmodules = None
1760 1789 if util.safehasattr(sys.modules[__name__], '__file__'):
1761 1790 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1762 1791 elif getattr(sys, 'oxidized', False):
1763 1792 hgmodules = pycompat.sysexecutable
1764 1793
1765 1794 fm.write(
1766 1795 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1767 1796 )
1768 1797 fm.write(
1769 1798 b'hgmodules',
1770 1799 _(b"checking installed modules (%s)...\n"),
1771 1800 hgmodules or _(b"unknown"),
1772 1801 )
1773 1802
1774 1803 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1775 1804 rustext = rustandc # for now, that's the only case
1776 1805 cext = policy.policy in (b'c', b'allow') or rustandc
1777 1806 nopure = cext or rustext
1778 1807 if nopure:
1779 1808 err = None
1780 1809 try:
1781 1810 if cext:
1782 1811 from .cext import ( # pytype: disable=import-error
1783 1812 base85,
1784 1813 bdiff,
1785 1814 mpatch,
1786 1815 osutil,
1787 1816 )
1788 1817
1789 1818 # quiet pyflakes
1790 1819 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1791 1820 if rustext:
1792 1821 from .rustext import ( # pytype: disable=import-error
1793 1822 ancestor,
1794 1823 dirstate,
1795 1824 )
1796 1825
1797 1826 dir(ancestor), dir(dirstate) # quiet pyflakes
1798 1827 except Exception as inst:
1799 1828 err = stringutil.forcebytestr(inst)
1800 1829 problems += 1
1801 1830 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1802 1831
1803 1832 compengines = util.compengines._engines.values()
1804 1833 fm.write(
1805 1834 b'compengines',
1806 1835 _(b'checking registered compression engines (%s)\n'),
1807 1836 fm.formatlist(
1808 1837 sorted(e.name() for e in compengines),
1809 1838 name=b'compengine',
1810 1839 fmt=b'%s',
1811 1840 sep=b', ',
1812 1841 ),
1813 1842 )
1814 1843 fm.write(
1815 1844 b'compenginesavail',
1816 1845 _(b'checking available compression engines (%s)\n'),
1817 1846 fm.formatlist(
1818 1847 sorted(e.name() for e in compengines if e.available()),
1819 1848 name=b'compengine',
1820 1849 fmt=b'%s',
1821 1850 sep=b', ',
1822 1851 ),
1823 1852 )
1824 1853 wirecompengines = compression.compengines.supportedwireengines(
1825 1854 compression.SERVERROLE
1826 1855 )
1827 1856 fm.write(
1828 1857 b'compenginesserver',
1829 1858 _(
1830 1859 b'checking available compression engines '
1831 1860 b'for wire protocol (%s)\n'
1832 1861 ),
1833 1862 fm.formatlist(
1834 1863 [e.name() for e in wirecompengines if e.wireprotosupport()],
1835 1864 name=b'compengine',
1836 1865 fmt=b'%s',
1837 1866 sep=b', ',
1838 1867 ),
1839 1868 )
1840 1869 re2 = b'missing'
1841 1870 if util._re2:
1842 1871 re2 = b'available'
1843 1872 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1844 1873 fm.data(re2=bool(util._re2))
1845 1874
1846 1875 # templates
1847 1876 p = templater.templatedir()
1848 1877 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1849 1878 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1850 1879 if p:
1851 1880 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1852 1881 if m:
1853 1882 # template found, check if it is working
1854 1883 err = None
1855 1884 try:
1856 1885 templater.templater.frommapfile(m)
1857 1886 except Exception as inst:
1858 1887 err = stringutil.forcebytestr(inst)
1859 1888 p = None
1860 1889 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1861 1890 else:
1862 1891 p = None
1863 1892 fm.condwrite(
1864 1893 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1865 1894 )
1866 1895 fm.condwrite(
1867 1896 not m,
1868 1897 b'defaulttemplatenotfound',
1869 1898 _(b" template '%s' not found\n"),
1870 1899 b"default",
1871 1900 )
1872 1901 if not p:
1873 1902 problems += 1
1874 1903 fm.condwrite(
1875 1904 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1876 1905 )
1877 1906
1878 1907 # editor
1879 1908 editor = ui.geteditor()
1880 1909 editor = util.expandpath(editor)
1881 1910 editorbin = procutil.shellsplit(editor)[0]
1882 1911 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1883 1912 cmdpath = procutil.findexe(editorbin)
1884 1913 fm.condwrite(
1885 1914 not cmdpath and editor == b'vi',
1886 1915 b'vinotfound',
1887 1916 _(
1888 1917 b" No commit editor set and can't find %s in PATH\n"
1889 1918 b" (specify a commit editor in your configuration"
1890 1919 b" file)\n"
1891 1920 ),
1892 1921 not cmdpath and editor == b'vi' and editorbin,
1893 1922 )
1894 1923 fm.condwrite(
1895 1924 not cmdpath and editor != b'vi',
1896 1925 b'editornotfound',
1897 1926 _(
1898 1927 b" Can't find editor '%s' in PATH\n"
1899 1928 b" (specify a commit editor in your configuration"
1900 1929 b" file)\n"
1901 1930 ),
1902 1931 not cmdpath and editorbin,
1903 1932 )
1904 1933 if not cmdpath and editor != b'vi':
1905 1934 problems += 1
1906 1935
1907 1936 # check username
1908 1937 username = None
1909 1938 err = None
1910 1939 try:
1911 1940 username = ui.username()
1912 1941 except error.Abort as e:
1913 1942 err = e.message
1914 1943 problems += 1
1915 1944
1916 1945 fm.condwrite(
1917 1946 username, b'username', _(b"checking username (%s)\n"), username
1918 1947 )
1919 1948 fm.condwrite(
1920 1949 err,
1921 1950 b'usernameerror',
1922 1951 _(
1923 1952 b"checking username...\n %s\n"
1924 1953 b" (specify a username in your configuration file)\n"
1925 1954 ),
1926 1955 err,
1927 1956 )
1928 1957
1929 1958 for name, mod in extensions.extensions():
1930 1959 handler = getattr(mod, 'debuginstall', None)
1931 1960 if handler is not None:
1932 1961 problems += handler(ui, fm)
1933 1962
1934 1963 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1935 1964 if not problems:
1936 1965 fm.data(problems=problems)
1937 1966 fm.condwrite(
1938 1967 problems,
1939 1968 b'problems',
1940 1969 _(b"%d problems detected, please check your install!\n"),
1941 1970 problems,
1942 1971 )
1943 1972 fm.end()
1944 1973
1945 1974 return problems
1946 1975
1947 1976
1948 1977 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1949 1978 def debugknown(ui, repopath, *ids, **opts):
1950 1979 """test whether node ids are known to a repo
1951 1980
1952 1981 Every ID must be a full-length hex node id string. Returns a list of 0s
1953 1982 and 1s indicating unknown/known.
1954 1983 """
1955 1984 opts = pycompat.byteskwargs(opts)
1956 1985 repo = hg.peer(ui, opts, repopath)
1957 1986 if not repo.capable(b'known'):
1958 1987 raise error.Abort(b"known() not supported by target repository")
1959 1988 flags = repo.known([bin(s) for s in ids])
1960 1989 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1961 1990
1962 1991
1963 1992 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1964 1993 def debuglabelcomplete(ui, repo, *args):
1965 1994 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1966 1995 debugnamecomplete(ui, repo, *args)
1967 1996
1968 1997
1969 1998 @command(
1970 1999 b'debuglocks',
1971 2000 [
1972 2001 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1973 2002 (
1974 2003 b'W',
1975 2004 b'force-free-wlock',
1976 2005 None,
1977 2006 _(b'free the working state lock (DANGEROUS)'),
1978 2007 ),
1979 2008 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1980 2009 (
1981 2010 b'S',
1982 2011 b'set-wlock',
1983 2012 None,
1984 2013 _(b'set the working state lock until stopped'),
1985 2014 ),
1986 2015 ],
1987 2016 _(b'[OPTION]...'),
1988 2017 )
1989 2018 def debuglocks(ui, repo, **opts):
1990 2019 """show or modify state of locks
1991 2020
1992 2021 By default, this command will show which locks are held. This
1993 2022 includes the user and process holding the lock, the amount of time
1994 2023 the lock has been held, and the machine name where the process is
1995 2024 running if it's not local.
1996 2025
1997 2026 Locks protect the integrity of Mercurial's data, so should be
1998 2027 treated with care. System crashes or other interruptions may cause
1999 2028 locks to not be properly released, though Mercurial will usually
2000 2029 detect and remove such stale locks automatically.
2001 2030
2002 2031 However, detecting stale locks may not always be possible (for
2003 2032 instance, on a shared filesystem). Removing locks may also be
2004 2033 blocked by filesystem permissions.
2005 2034
2006 2035 Setting a lock will prevent other commands from changing the data.
2007 2036 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2008 2037 The set locks are removed when the command exits.
2009 2038
2010 2039 Returns 0 if no locks are held.
2011 2040
2012 2041 """
2013 2042
2014 2043 if opts.get('force_free_lock'):
2015 2044 repo.svfs.unlink(b'lock')
2016 2045 if opts.get('force_free_wlock'):
2017 2046 repo.vfs.unlink(b'wlock')
2018 2047 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2019 2048 return 0
2020 2049
2021 2050 locks = []
2022 2051 try:
2023 2052 if opts.get('set_wlock'):
2024 2053 try:
2025 2054 locks.append(repo.wlock(False))
2026 2055 except error.LockHeld:
2027 2056 raise error.Abort(_(b'wlock is already held'))
2028 2057 if opts.get('set_lock'):
2029 2058 try:
2030 2059 locks.append(repo.lock(False))
2031 2060 except error.LockHeld:
2032 2061 raise error.Abort(_(b'lock is already held'))
2033 2062 if len(locks):
2034 2063 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2035 2064 return 0
2036 2065 finally:
2037 2066 release(*locks)
2038 2067
2039 2068 now = time.time()
2040 2069 held = 0
2041 2070
2042 2071 def report(vfs, name, method):
2043 2072 # this causes stale locks to get reaped for more accurate reporting
2044 2073 try:
2045 2074 l = method(False)
2046 2075 except error.LockHeld:
2047 2076 l = None
2048 2077
2049 2078 if l:
2050 2079 l.release()
2051 2080 else:
2052 2081 try:
2053 2082 st = vfs.lstat(name)
2054 2083 age = now - st[stat.ST_MTIME]
2055 2084 user = util.username(st.st_uid)
2056 2085 locker = vfs.readlock(name)
2057 2086 if b":" in locker:
2058 2087 host, pid = locker.split(b':')
2059 2088 if host == socket.gethostname():
2060 2089 locker = b'user %s, process %s' % (user or b'None', pid)
2061 2090 else:
2062 2091 locker = b'user %s, process %s, host %s' % (
2063 2092 user or b'None',
2064 2093 pid,
2065 2094 host,
2066 2095 )
2067 2096 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2068 2097 return 1
2069 2098 except OSError as e:
2070 2099 if e.errno != errno.ENOENT:
2071 2100 raise
2072 2101
2073 2102 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2074 2103 return 0
2075 2104
2076 2105 held += report(repo.svfs, b"lock", repo.lock)
2077 2106 held += report(repo.vfs, b"wlock", repo.wlock)
2078 2107
2079 2108 return held
2080 2109
2081 2110
2082 2111 @command(
2083 2112 b'debugmanifestfulltextcache',
2084 2113 [
2085 2114 (b'', b'clear', False, _(b'clear the cache')),
2086 2115 (
2087 2116 b'a',
2088 2117 b'add',
2089 2118 [],
2090 2119 _(b'add the given manifest nodes to the cache'),
2091 2120 _(b'NODE'),
2092 2121 ),
2093 2122 ],
2094 2123 b'',
2095 2124 )
2096 2125 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2097 2126 """show, clear or amend the contents of the manifest fulltext cache"""
2098 2127
2099 2128 def getcache():
2100 2129 r = repo.manifestlog.getstorage(b'')
2101 2130 try:
2102 2131 return r._fulltextcache
2103 2132 except AttributeError:
2104 2133 msg = _(
2105 2134 b"Current revlog implementation doesn't appear to have a "
2106 2135 b"manifest fulltext cache\n"
2107 2136 )
2108 2137 raise error.Abort(msg)
2109 2138
2110 2139 if opts.get('clear'):
2111 2140 with repo.wlock():
2112 2141 cache = getcache()
2113 2142 cache.clear(clear_persisted_data=True)
2114 2143 return
2115 2144
2116 2145 if add:
2117 2146 with repo.wlock():
2118 2147 m = repo.manifestlog
2119 2148 store = m.getstorage(b'')
2120 2149 for n in add:
2121 2150 try:
2122 2151 manifest = m[store.lookup(n)]
2123 2152 except error.LookupError as e:
2124 2153 raise error.Abort(
2125 2154 bytes(e), hint=b"Check your manifest node id"
2126 2155 )
2127 2156 manifest.read() # stores revisision in cache too
2128 2157 return
2129 2158
2130 2159 cache = getcache()
2131 2160 if not len(cache):
2132 2161 ui.write(_(b'cache empty\n'))
2133 2162 else:
2134 2163 ui.write(
2135 2164 _(
2136 2165 b'cache contains %d manifest entries, in order of most to '
2137 2166 b'least recent:\n'
2138 2167 )
2139 2168 % (len(cache),)
2140 2169 )
2141 2170 totalsize = 0
2142 2171 for nodeid in cache:
2143 2172 # Use cache.get to not update the LRU order
2144 2173 data = cache.peek(nodeid)
2145 2174 size = len(data)
2146 2175 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2147 2176 ui.write(
2148 2177 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2149 2178 )
2150 2179 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2151 2180 ui.write(
2152 2181 _(b'total cache data size %s, on-disk %s\n')
2153 2182 % (util.bytecount(totalsize), util.bytecount(ondisk))
2154 2183 )
2155 2184
2156 2185
2157 2186 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2158 2187 def debugmergestate(ui, repo, *args, **opts):
2159 2188 """print merge state
2160 2189
2161 2190 Use --verbose to print out information about whether v1 or v2 merge state
2162 2191 was chosen."""
2163 2192
2164 2193 if ui.verbose:
2165 2194 ms = mergestatemod.mergestate(repo)
2166 2195
2167 2196 # sort so that reasonable information is on top
2168 2197 v1records = ms._readrecordsv1()
2169 2198 v2records = ms._readrecordsv2()
2170 2199
2171 2200 if not v1records and not v2records:
2172 2201 pass
2173 2202 elif not v2records:
2174 2203 ui.writenoi18n(b'no version 2 merge state\n')
2175 2204 elif ms._v1v2match(v1records, v2records):
2176 2205 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2177 2206 else:
2178 2207 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2179 2208
2180 2209 opts = pycompat.byteskwargs(opts)
2181 2210 if not opts[b'template']:
2182 2211 opts[b'template'] = (
2183 2212 b'{if(commits, "", "no merge state found\n")}'
2184 2213 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2185 2214 b'{files % "file: {path} (state \\"{state}\\")\n'
2186 2215 b'{if(local_path, "'
2187 2216 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2188 2217 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2189 2218 b' other path: {other_path} (node {other_node})\n'
2190 2219 b'")}'
2191 2220 b'{if(rename_side, "'
2192 2221 b' rename side: {rename_side}\n'
2193 2222 b' renamed path: {renamed_path}\n'
2194 2223 b'")}'
2195 2224 b'{extras % " extra: {key} = {value}\n"}'
2196 2225 b'"}'
2197 2226 b'{extras % "extra: {file} ({key} = {value})\n"}'
2198 2227 )
2199 2228
2200 2229 ms = mergestatemod.mergestate.read(repo)
2201 2230
2202 2231 fm = ui.formatter(b'debugmergestate', opts)
2203 2232 fm.startitem()
2204 2233
2205 2234 fm_commits = fm.nested(b'commits')
2206 2235 if ms.active():
2207 2236 for name, node, label_index in (
2208 2237 (b'local', ms.local, 0),
2209 2238 (b'other', ms.other, 1),
2210 2239 ):
2211 2240 fm_commits.startitem()
2212 2241 fm_commits.data(name=name)
2213 2242 fm_commits.data(node=hex(node))
2214 2243 if ms._labels and len(ms._labels) > label_index:
2215 2244 fm_commits.data(label=ms._labels[label_index])
2216 2245 fm_commits.end()
2217 2246
2218 2247 fm_files = fm.nested(b'files')
2219 2248 if ms.active():
2220 2249 for f in ms:
2221 2250 fm_files.startitem()
2222 2251 fm_files.data(path=f)
2223 2252 state = ms._state[f]
2224 2253 fm_files.data(state=state[0])
2225 2254 if state[0] in (
2226 2255 mergestatemod.MERGE_RECORD_UNRESOLVED,
2227 2256 mergestatemod.MERGE_RECORD_RESOLVED,
2228 2257 ):
2229 2258 fm_files.data(local_key=state[1])
2230 2259 fm_files.data(local_path=state[2])
2231 2260 fm_files.data(ancestor_path=state[3])
2232 2261 fm_files.data(ancestor_node=state[4])
2233 2262 fm_files.data(other_path=state[5])
2234 2263 fm_files.data(other_node=state[6])
2235 2264 fm_files.data(local_flags=state[7])
2236 2265 elif state[0] in (
2237 2266 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2238 2267 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2239 2268 ):
2240 2269 fm_files.data(renamed_path=state[1])
2241 2270 fm_files.data(rename_side=state[2])
2242 2271 fm_extras = fm_files.nested(b'extras')
2243 2272 for k, v in sorted(ms.extras(f).items()):
2244 2273 fm_extras.startitem()
2245 2274 fm_extras.data(key=k)
2246 2275 fm_extras.data(value=v)
2247 2276 fm_extras.end()
2248 2277
2249 2278 fm_files.end()
2250 2279
2251 2280 fm_extras = fm.nested(b'extras')
2252 2281 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2253 2282 if f in ms:
2254 2283 # If file is in mergestate, we have already processed it's extras
2255 2284 continue
2256 2285 for k, v in pycompat.iteritems(d):
2257 2286 fm_extras.startitem()
2258 2287 fm_extras.data(file=f)
2259 2288 fm_extras.data(key=k)
2260 2289 fm_extras.data(value=v)
2261 2290 fm_extras.end()
2262 2291
2263 2292 fm.end()
2264 2293
2265 2294
2266 2295 @command(b'debugnamecomplete', [], _(b'NAME...'))
2267 2296 def debugnamecomplete(ui, repo, *args):
2268 2297 '''complete "names" - tags, open branch names, bookmark names'''
2269 2298
2270 2299 names = set()
2271 2300 # since we previously only listed open branches, we will handle that
2272 2301 # specially (after this for loop)
2273 2302 for name, ns in pycompat.iteritems(repo.names):
2274 2303 if name != b'branches':
2275 2304 names.update(ns.listnames(repo))
2276 2305 names.update(
2277 2306 tag
2278 2307 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2279 2308 if not closed
2280 2309 )
2281 2310 completions = set()
2282 2311 if not args:
2283 2312 args = [b'']
2284 2313 for a in args:
2285 2314 completions.update(n for n in names if n.startswith(a))
2286 2315 ui.write(b'\n'.join(sorted(completions)))
2287 2316 ui.write(b'\n')
2288 2317
2289 2318
2290 2319 @command(
2291 2320 b'debugnodemap',
2292 2321 [
2293 2322 (
2294 2323 b'',
2295 2324 b'dump-new',
2296 2325 False,
2297 2326 _(b'write a (new) persistent binary nodemap on stdout'),
2298 2327 ),
2299 2328 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2300 2329 (
2301 2330 b'',
2302 2331 b'check',
2303 2332 False,
2304 2333 _(b'check that the data on disk data are correct.'),
2305 2334 ),
2306 2335 (
2307 2336 b'',
2308 2337 b'metadata',
2309 2338 False,
2310 2339 _(b'display the on disk meta data for the nodemap'),
2311 2340 ),
2312 2341 ],
2313 2342 )
2314 2343 def debugnodemap(ui, repo, **opts):
2315 2344 """write and inspect on disk nodemap"""
2316 2345 if opts['dump_new']:
2317 2346 unfi = repo.unfiltered()
2318 2347 cl = unfi.changelog
2319 2348 if util.safehasattr(cl.index, "nodemap_data_all"):
2320 2349 data = cl.index.nodemap_data_all()
2321 2350 else:
2322 2351 data = nodemap.persistent_data(cl.index)
2323 2352 ui.write(data)
2324 2353 elif opts['dump_disk']:
2325 2354 unfi = repo.unfiltered()
2326 2355 cl = unfi.changelog
2327 2356 nm_data = nodemap.persisted_data(cl)
2328 2357 if nm_data is not None:
2329 2358 docket, data = nm_data
2330 2359 ui.write(data[:])
2331 2360 elif opts['check']:
2332 2361 unfi = repo.unfiltered()
2333 2362 cl = unfi.changelog
2334 2363 nm_data = nodemap.persisted_data(cl)
2335 2364 if nm_data is not None:
2336 2365 docket, data = nm_data
2337 2366 return nodemap.check_data(ui, cl.index, data)
2338 2367 elif opts['metadata']:
2339 2368 unfi = repo.unfiltered()
2340 2369 cl = unfi.changelog
2341 2370 nm_data = nodemap.persisted_data(cl)
2342 2371 if nm_data is not None:
2343 2372 docket, data = nm_data
2344 2373 ui.write((b"uid: %s\n") % docket.uid)
2345 2374 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2346 2375 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2347 2376 ui.write((b"data-length: %d\n") % docket.data_length)
2348 2377 ui.write((b"data-unused: %d\n") % docket.data_unused)
2349 2378 unused_perc = docket.data_unused * 100.0 / docket.data_length
2350 2379 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2351 2380
2352 2381
2353 2382 @command(
2354 2383 b'debugobsolete',
2355 2384 [
2356 2385 (b'', b'flags', 0, _(b'markers flag')),
2357 2386 (
2358 2387 b'',
2359 2388 b'record-parents',
2360 2389 False,
2361 2390 _(b'record parent information for the precursor'),
2362 2391 ),
2363 2392 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2364 2393 (
2365 2394 b'',
2366 2395 b'exclusive',
2367 2396 False,
2368 2397 _(b'restrict display to markers only relevant to REV'),
2369 2398 ),
2370 2399 (b'', b'index', False, _(b'display index of the marker')),
2371 2400 (b'', b'delete', [], _(b'delete markers specified by indices')),
2372 2401 ]
2373 2402 + cmdutil.commitopts2
2374 2403 + cmdutil.formatteropts,
2375 2404 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2376 2405 )
2377 2406 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2378 2407 """create arbitrary obsolete marker
2379 2408
2380 2409 With no arguments, displays the list of obsolescence markers."""
2381 2410
2382 2411 opts = pycompat.byteskwargs(opts)
2383 2412
2384 2413 def parsenodeid(s):
2385 2414 try:
2386 2415 # We do not use revsingle/revrange functions here to accept
2387 2416 # arbitrary node identifiers, possibly not present in the
2388 2417 # local repository.
2389 2418 n = bin(s)
2390 2419 if len(n) != len(nullid):
2391 2420 raise TypeError()
2392 2421 return n
2393 2422 except TypeError:
2394 2423 raise error.InputError(
2395 2424 b'changeset references must be full hexadecimal '
2396 2425 b'node identifiers'
2397 2426 )
2398 2427
2399 2428 if opts.get(b'delete'):
2400 2429 indices = []
2401 2430 for v in opts.get(b'delete'):
2402 2431 try:
2403 2432 indices.append(int(v))
2404 2433 except ValueError:
2405 2434 raise error.InputError(
2406 2435 _(b'invalid index value: %r') % v,
2407 2436 hint=_(b'use integers for indices'),
2408 2437 )
2409 2438
2410 2439 if repo.currenttransaction():
2411 2440 raise error.Abort(
2412 2441 _(b'cannot delete obsmarkers in the middle of transaction.')
2413 2442 )
2414 2443
2415 2444 with repo.lock():
2416 2445 n = repair.deleteobsmarkers(repo.obsstore, indices)
2417 2446 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2418 2447
2419 2448 return
2420 2449
2421 2450 if precursor is not None:
2422 2451 if opts[b'rev']:
2423 2452 raise error.InputError(
2424 2453 b'cannot select revision when creating marker'
2425 2454 )
2426 2455 metadata = {}
2427 2456 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2428 2457 succs = tuple(parsenodeid(succ) for succ in successors)
2429 2458 l = repo.lock()
2430 2459 try:
2431 2460 tr = repo.transaction(b'debugobsolete')
2432 2461 try:
2433 2462 date = opts.get(b'date')
2434 2463 if date:
2435 2464 date = dateutil.parsedate(date)
2436 2465 else:
2437 2466 date = None
2438 2467 prec = parsenodeid(precursor)
2439 2468 parents = None
2440 2469 if opts[b'record_parents']:
2441 2470 if prec not in repo.unfiltered():
2442 2471 raise error.Abort(
2443 2472 b'cannot used --record-parents on '
2444 2473 b'unknown changesets'
2445 2474 )
2446 2475 parents = repo.unfiltered()[prec].parents()
2447 2476 parents = tuple(p.node() for p in parents)
2448 2477 repo.obsstore.create(
2449 2478 tr,
2450 2479 prec,
2451 2480 succs,
2452 2481 opts[b'flags'],
2453 2482 parents=parents,
2454 2483 date=date,
2455 2484 metadata=metadata,
2456 2485 ui=ui,
2457 2486 )
2458 2487 tr.close()
2459 2488 except ValueError as exc:
2460 2489 raise error.Abort(
2461 2490 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2462 2491 )
2463 2492 finally:
2464 2493 tr.release()
2465 2494 finally:
2466 2495 l.release()
2467 2496 else:
2468 2497 if opts[b'rev']:
2469 2498 revs = scmutil.revrange(repo, opts[b'rev'])
2470 2499 nodes = [repo[r].node() for r in revs]
2471 2500 markers = list(
2472 2501 obsutil.getmarkers(
2473 2502 repo, nodes=nodes, exclusive=opts[b'exclusive']
2474 2503 )
2475 2504 )
2476 2505 markers.sort(key=lambda x: x._data)
2477 2506 else:
2478 2507 markers = obsutil.getmarkers(repo)
2479 2508
2480 2509 markerstoiter = markers
2481 2510 isrelevant = lambda m: True
2482 2511 if opts.get(b'rev') and opts.get(b'index'):
2483 2512 markerstoiter = obsutil.getmarkers(repo)
2484 2513 markerset = set(markers)
2485 2514 isrelevant = lambda m: m in markerset
2486 2515
2487 2516 fm = ui.formatter(b'debugobsolete', opts)
2488 2517 for i, m in enumerate(markerstoiter):
2489 2518 if not isrelevant(m):
2490 2519 # marker can be irrelevant when we're iterating over a set
2491 2520 # of markers (markerstoiter) which is bigger than the set
2492 2521 # of markers we want to display (markers)
2493 2522 # this can happen if both --index and --rev options are
2494 2523 # provided and thus we need to iterate over all of the markers
2495 2524 # to get the correct indices, but only display the ones that
2496 2525 # are relevant to --rev value
2497 2526 continue
2498 2527 fm.startitem()
2499 2528 ind = i if opts.get(b'index') else None
2500 2529 cmdutil.showmarker(fm, m, index=ind)
2501 2530 fm.end()
2502 2531
2503 2532
2504 2533 @command(
2505 2534 b'debugp1copies',
2506 2535 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2507 2536 _(b'[-r REV]'),
2508 2537 )
2509 2538 def debugp1copies(ui, repo, **opts):
2510 2539 """dump copy information compared to p1"""
2511 2540
2512 2541 opts = pycompat.byteskwargs(opts)
2513 2542 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2514 2543 for dst, src in ctx.p1copies().items():
2515 2544 ui.write(b'%s -> %s\n' % (src, dst))
2516 2545
2517 2546
2518 2547 @command(
2519 2548 b'debugp2copies',
2520 2549 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2521 2550 _(b'[-r REV]'),
2522 2551 )
2523 2552 def debugp1copies(ui, repo, **opts):
2524 2553 """dump copy information compared to p2"""
2525 2554
2526 2555 opts = pycompat.byteskwargs(opts)
2527 2556 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2528 2557 for dst, src in ctx.p2copies().items():
2529 2558 ui.write(b'%s -> %s\n' % (src, dst))
2530 2559
2531 2560
2532 2561 @command(
2533 2562 b'debugpathcomplete',
2534 2563 [
2535 2564 (b'f', b'full', None, _(b'complete an entire path')),
2536 2565 (b'n', b'normal', None, _(b'show only normal files')),
2537 2566 (b'a', b'added', None, _(b'show only added files')),
2538 2567 (b'r', b'removed', None, _(b'show only removed files')),
2539 2568 ],
2540 2569 _(b'FILESPEC...'),
2541 2570 )
2542 2571 def debugpathcomplete(ui, repo, *specs, **opts):
2543 2572 """complete part or all of a tracked path
2544 2573
2545 2574 This command supports shells that offer path name completion. It
2546 2575 currently completes only files already known to the dirstate.
2547 2576
2548 2577 Completion extends only to the next path segment unless
2549 2578 --full is specified, in which case entire paths are used."""
2550 2579
2551 2580 def complete(path, acceptable):
2552 2581 dirstate = repo.dirstate
2553 2582 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2554 2583 rootdir = repo.root + pycompat.ossep
2555 2584 if spec != repo.root and not spec.startswith(rootdir):
2556 2585 return [], []
2557 2586 if os.path.isdir(spec):
2558 2587 spec += b'/'
2559 2588 spec = spec[len(rootdir) :]
2560 2589 fixpaths = pycompat.ossep != b'/'
2561 2590 if fixpaths:
2562 2591 spec = spec.replace(pycompat.ossep, b'/')
2563 2592 speclen = len(spec)
2564 2593 fullpaths = opts['full']
2565 2594 files, dirs = set(), set()
2566 2595 adddir, addfile = dirs.add, files.add
2567 2596 for f, st in pycompat.iteritems(dirstate):
2568 2597 if f.startswith(spec) and st[0] in acceptable:
2569 2598 if fixpaths:
2570 2599 f = f.replace(b'/', pycompat.ossep)
2571 2600 if fullpaths:
2572 2601 addfile(f)
2573 2602 continue
2574 2603 s = f.find(pycompat.ossep, speclen)
2575 2604 if s >= 0:
2576 2605 adddir(f[:s])
2577 2606 else:
2578 2607 addfile(f)
2579 2608 return files, dirs
2580 2609
2581 2610 acceptable = b''
2582 2611 if opts['normal']:
2583 2612 acceptable += b'nm'
2584 2613 if opts['added']:
2585 2614 acceptable += b'a'
2586 2615 if opts['removed']:
2587 2616 acceptable += b'r'
2588 2617 cwd = repo.getcwd()
2589 2618 if not specs:
2590 2619 specs = [b'.']
2591 2620
2592 2621 files, dirs = set(), set()
2593 2622 for spec in specs:
2594 2623 f, d = complete(spec, acceptable or b'nmar')
2595 2624 files.update(f)
2596 2625 dirs.update(d)
2597 2626 files.update(dirs)
2598 2627 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2599 2628 ui.write(b'\n')
2600 2629
2601 2630
2602 2631 @command(
2603 2632 b'debugpathcopies',
2604 2633 cmdutil.walkopts,
2605 2634 b'hg debugpathcopies REV1 REV2 [FILE]',
2606 2635 inferrepo=True,
2607 2636 )
2608 2637 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2609 2638 """show copies between two revisions"""
2610 2639 ctx1 = scmutil.revsingle(repo, rev1)
2611 2640 ctx2 = scmutil.revsingle(repo, rev2)
2612 2641 m = scmutil.match(ctx1, pats, opts)
2613 2642 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2614 2643 ui.write(b'%s -> %s\n' % (src, dst))
2615 2644
2616 2645
2617 2646 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2618 2647 def debugpeer(ui, path):
2619 2648 """establish a connection to a peer repository"""
2620 2649 # Always enable peer request logging. Requires --debug to display
2621 2650 # though.
2622 2651 overrides = {
2623 2652 (b'devel', b'debug.peer-request'): True,
2624 2653 }
2625 2654
2626 2655 with ui.configoverride(overrides):
2627 2656 peer = hg.peer(ui, {}, path)
2628 2657
2629 2658 try:
2630 2659 local = peer.local() is not None
2631 2660 canpush = peer.canpush()
2632 2661
2633 2662 ui.write(_(b'url: %s\n') % peer.url())
2634 2663 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2635 2664 ui.write(
2636 2665 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2637 2666 )
2638 2667 finally:
2639 2668 peer.close()
2640 2669
2641 2670
2642 2671 @command(
2643 2672 b'debugpickmergetool',
2644 2673 [
2645 2674 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2646 2675 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2647 2676 ]
2648 2677 + cmdutil.walkopts
2649 2678 + cmdutil.mergetoolopts,
2650 2679 _(b'[PATTERN]...'),
2651 2680 inferrepo=True,
2652 2681 )
2653 2682 def debugpickmergetool(ui, repo, *pats, **opts):
2654 2683 """examine which merge tool is chosen for specified file
2655 2684
2656 2685 As described in :hg:`help merge-tools`, Mercurial examines
2657 2686 configurations below in this order to decide which merge tool is
2658 2687 chosen for specified file.
2659 2688
2660 2689 1. ``--tool`` option
2661 2690 2. ``HGMERGE`` environment variable
2662 2691 3. configurations in ``merge-patterns`` section
2663 2692 4. configuration of ``ui.merge``
2664 2693 5. configurations in ``merge-tools`` section
2665 2694 6. ``hgmerge`` tool (for historical reason only)
2666 2695 7. default tool for fallback (``:merge`` or ``:prompt``)
2667 2696
2668 2697 This command writes out examination result in the style below::
2669 2698
2670 2699 FILE = MERGETOOL
2671 2700
2672 2701 By default, all files known in the first parent context of the
2673 2702 working directory are examined. Use file patterns and/or -I/-X
2674 2703 options to limit target files. -r/--rev is also useful to examine
2675 2704 files in another context without actual updating to it.
2676 2705
2677 2706 With --debug, this command shows warning messages while matching
2678 2707 against ``merge-patterns`` and so on, too. It is recommended to
2679 2708 use this option with explicit file patterns and/or -I/-X options,
2680 2709 because this option increases amount of output per file according
2681 2710 to configurations in hgrc.
2682 2711
2683 2712 With -v/--verbose, this command shows configurations below at
2684 2713 first (only if specified).
2685 2714
2686 2715 - ``--tool`` option
2687 2716 - ``HGMERGE`` environment variable
2688 2717 - configuration of ``ui.merge``
2689 2718
2690 2719 If merge tool is chosen before matching against
2691 2720 ``merge-patterns``, this command can't show any helpful
2692 2721 information, even with --debug. In such case, information above is
2693 2722 useful to know why a merge tool is chosen.
2694 2723 """
2695 2724 opts = pycompat.byteskwargs(opts)
2696 2725 overrides = {}
2697 2726 if opts[b'tool']:
2698 2727 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2699 2728 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2700 2729
2701 2730 with ui.configoverride(overrides, b'debugmergepatterns'):
2702 2731 hgmerge = encoding.environ.get(b"HGMERGE")
2703 2732 if hgmerge is not None:
2704 2733 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2705 2734 uimerge = ui.config(b"ui", b"merge")
2706 2735 if uimerge:
2707 2736 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2708 2737
2709 2738 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2710 2739 m = scmutil.match(ctx, pats, opts)
2711 2740 changedelete = opts[b'changedelete']
2712 2741 for path in ctx.walk(m):
2713 2742 fctx = ctx[path]
2714 2743 try:
2715 2744 if not ui.debugflag:
2716 2745 ui.pushbuffer(error=True)
2717 2746 tool, toolpath = filemerge._picktool(
2718 2747 repo,
2719 2748 ui,
2720 2749 path,
2721 2750 fctx.isbinary(),
2722 2751 b'l' in fctx.flags(),
2723 2752 changedelete,
2724 2753 )
2725 2754 finally:
2726 2755 if not ui.debugflag:
2727 2756 ui.popbuffer()
2728 2757 ui.write(b'%s = %s\n' % (path, tool))
2729 2758
2730 2759
2731 2760 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2732 2761 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2733 2762 """access the pushkey key/value protocol
2734 2763
2735 2764 With two args, list the keys in the given namespace.
2736 2765
2737 2766 With five args, set a key to new if it currently is set to old.
2738 2767 Reports success or failure.
2739 2768 """
2740 2769
2741 2770 target = hg.peer(ui, {}, repopath)
2742 2771 try:
2743 2772 if keyinfo:
2744 2773 key, old, new = keyinfo
2745 2774 with target.commandexecutor() as e:
2746 2775 r = e.callcommand(
2747 2776 b'pushkey',
2748 2777 {
2749 2778 b'namespace': namespace,
2750 2779 b'key': key,
2751 2780 b'old': old,
2752 2781 b'new': new,
2753 2782 },
2754 2783 ).result()
2755 2784
2756 2785 ui.status(pycompat.bytestr(r) + b'\n')
2757 2786 return not r
2758 2787 else:
2759 2788 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2760 2789 ui.write(
2761 2790 b"%s\t%s\n"
2762 2791 % (stringutil.escapestr(k), stringutil.escapestr(v))
2763 2792 )
2764 2793 finally:
2765 2794 target.close()
2766 2795
2767 2796
2768 2797 @command(b'debugpvec', [], _(b'A B'))
2769 2798 def debugpvec(ui, repo, a, b=None):
2770 2799 ca = scmutil.revsingle(repo, a)
2771 2800 cb = scmutil.revsingle(repo, b)
2772 2801 pa = pvec.ctxpvec(ca)
2773 2802 pb = pvec.ctxpvec(cb)
2774 2803 if pa == pb:
2775 2804 rel = b"="
2776 2805 elif pa > pb:
2777 2806 rel = b">"
2778 2807 elif pa < pb:
2779 2808 rel = b"<"
2780 2809 elif pa | pb:
2781 2810 rel = b"|"
2782 2811 ui.write(_(b"a: %s\n") % pa)
2783 2812 ui.write(_(b"b: %s\n") % pb)
2784 2813 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2785 2814 ui.write(
2786 2815 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2787 2816 % (
2788 2817 abs(pa._depth - pb._depth),
2789 2818 pvec._hamming(pa._vec, pb._vec),
2790 2819 pa.distance(pb),
2791 2820 rel,
2792 2821 )
2793 2822 )
2794 2823
2795 2824
2796 2825 @command(
2797 2826 b'debugrebuilddirstate|debugrebuildstate',
2798 2827 [
2799 2828 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2800 2829 (
2801 2830 b'',
2802 2831 b'minimal',
2803 2832 None,
2804 2833 _(
2805 2834 b'only rebuild files that are inconsistent with '
2806 2835 b'the working copy parent'
2807 2836 ),
2808 2837 ),
2809 2838 ],
2810 2839 _(b'[-r REV]'),
2811 2840 )
2812 2841 def debugrebuilddirstate(ui, repo, rev, **opts):
2813 2842 """rebuild the dirstate as it would look like for the given revision
2814 2843
2815 2844 If no revision is specified the first current parent will be used.
2816 2845
2817 2846 The dirstate will be set to the files of the given revision.
2818 2847 The actual working directory content or existing dirstate
2819 2848 information such as adds or removes is not considered.
2820 2849
2821 2850 ``minimal`` will only rebuild the dirstate status for files that claim to be
2822 2851 tracked but are not in the parent manifest, or that exist in the parent
2823 2852 manifest but are not in the dirstate. It will not change adds, removes, or
2824 2853 modified files that are in the working copy parent.
2825 2854
2826 2855 One use of this command is to make the next :hg:`status` invocation
2827 2856 check the actual file content.
2828 2857 """
2829 2858 ctx = scmutil.revsingle(repo, rev)
2830 2859 with repo.wlock():
2831 2860 dirstate = repo.dirstate
2832 2861 changedfiles = None
2833 2862 # See command doc for what minimal does.
2834 2863 if opts.get('minimal'):
2835 2864 manifestfiles = set(ctx.manifest().keys())
2836 2865 dirstatefiles = set(dirstate)
2837 2866 manifestonly = manifestfiles - dirstatefiles
2838 2867 dsonly = dirstatefiles - manifestfiles
2839 2868 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2840 2869 changedfiles = manifestonly | dsnotadded
2841 2870
2842 2871 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2843 2872
2844 2873
2845 2874 @command(b'debugrebuildfncache', [], b'')
2846 2875 def debugrebuildfncache(ui, repo):
2847 2876 """rebuild the fncache file"""
2848 2877 repair.rebuildfncache(ui, repo)
2849 2878
2850 2879
2851 2880 @command(
2852 2881 b'debugrename',
2853 2882 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2854 2883 _(b'[-r REV] [FILE]...'),
2855 2884 )
2856 2885 def debugrename(ui, repo, *pats, **opts):
2857 2886 """dump rename information"""
2858 2887
2859 2888 opts = pycompat.byteskwargs(opts)
2860 2889 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2861 2890 m = scmutil.match(ctx, pats, opts)
2862 2891 for abs in ctx.walk(m):
2863 2892 fctx = ctx[abs]
2864 2893 o = fctx.filelog().renamed(fctx.filenode())
2865 2894 rel = repo.pathto(abs)
2866 2895 if o:
2867 2896 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2868 2897 else:
2869 2898 ui.write(_(b"%s not renamed\n") % rel)
2870 2899
2871 2900
2872 2901 @command(b'debugrequires|debugrequirements', [], b'')
2873 2902 def debugrequirements(ui, repo):
2874 2903 """ print the current repo requirements """
2875 2904 for r in sorted(repo.requirements):
2876 2905 ui.write(b"%s\n" % r)
2877 2906
2878 2907
2879 2908 @command(
2880 2909 b'debugrevlog',
2881 2910 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2882 2911 _(b'-c|-m|FILE'),
2883 2912 optionalrepo=True,
2884 2913 )
2885 2914 def debugrevlog(ui, repo, file_=None, **opts):
2886 2915 """show data and statistics about a revlog"""
2887 2916 opts = pycompat.byteskwargs(opts)
2888 2917 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2889 2918
2890 2919 if opts.get(b"dump"):
2891 2920 numrevs = len(r)
2892 2921 ui.write(
2893 2922 (
2894 2923 b"# rev p1rev p2rev start end deltastart base p1 p2"
2895 2924 b" rawsize totalsize compression heads chainlen\n"
2896 2925 )
2897 2926 )
2898 2927 ts = 0
2899 2928 heads = set()
2900 2929
2901 2930 for rev in pycompat.xrange(numrevs):
2902 2931 dbase = r.deltaparent(rev)
2903 2932 if dbase == -1:
2904 2933 dbase = rev
2905 2934 cbase = r.chainbase(rev)
2906 2935 clen = r.chainlen(rev)
2907 2936 p1, p2 = r.parentrevs(rev)
2908 2937 rs = r.rawsize(rev)
2909 2938 ts = ts + rs
2910 2939 heads -= set(r.parentrevs(rev))
2911 2940 heads.add(rev)
2912 2941 try:
2913 2942 compression = ts / r.end(rev)
2914 2943 except ZeroDivisionError:
2915 2944 compression = 0
2916 2945 ui.write(
2917 2946 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2918 2947 b"%11d %5d %8d\n"
2919 2948 % (
2920 2949 rev,
2921 2950 p1,
2922 2951 p2,
2923 2952 r.start(rev),
2924 2953 r.end(rev),
2925 2954 r.start(dbase),
2926 2955 r.start(cbase),
2927 2956 r.start(p1),
2928 2957 r.start(p2),
2929 2958 rs,
2930 2959 ts,
2931 2960 compression,
2932 2961 len(heads),
2933 2962 clen,
2934 2963 )
2935 2964 )
2936 2965 return 0
2937 2966
2938 2967 v = r.version
2939 2968 format = v & 0xFFFF
2940 2969 flags = []
2941 2970 gdelta = False
2942 2971 if v & revlog.FLAG_INLINE_DATA:
2943 2972 flags.append(b'inline')
2944 2973 if v & revlog.FLAG_GENERALDELTA:
2945 2974 gdelta = True
2946 2975 flags.append(b'generaldelta')
2947 2976 if not flags:
2948 2977 flags = [b'(none)']
2949 2978
2950 2979 ### tracks merge vs single parent
2951 2980 nummerges = 0
2952 2981
2953 2982 ### tracks ways the "delta" are build
2954 2983 # nodelta
2955 2984 numempty = 0
2956 2985 numemptytext = 0
2957 2986 numemptydelta = 0
2958 2987 # full file content
2959 2988 numfull = 0
2960 2989 # intermediate snapshot against a prior snapshot
2961 2990 numsemi = 0
2962 2991 # snapshot count per depth
2963 2992 numsnapdepth = collections.defaultdict(lambda: 0)
2964 2993 # delta against previous revision
2965 2994 numprev = 0
2966 2995 # delta against first or second parent (not prev)
2967 2996 nump1 = 0
2968 2997 nump2 = 0
2969 2998 # delta against neither prev nor parents
2970 2999 numother = 0
2971 3000 # delta against prev that are also first or second parent
2972 3001 # (details of `numprev`)
2973 3002 nump1prev = 0
2974 3003 nump2prev = 0
2975 3004
2976 3005 # data about delta chain of each revs
2977 3006 chainlengths = []
2978 3007 chainbases = []
2979 3008 chainspans = []
2980 3009
2981 3010 # data about each revision
2982 3011 datasize = [None, 0, 0]
2983 3012 fullsize = [None, 0, 0]
2984 3013 semisize = [None, 0, 0]
2985 3014 # snapshot count per depth
2986 3015 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2987 3016 deltasize = [None, 0, 0]
2988 3017 chunktypecounts = {}
2989 3018 chunktypesizes = {}
2990 3019
2991 3020 def addsize(size, l):
2992 3021 if l[0] is None or size < l[0]:
2993 3022 l[0] = size
2994 3023 if size > l[1]:
2995 3024 l[1] = size
2996 3025 l[2] += size
2997 3026
2998 3027 numrevs = len(r)
2999 3028 for rev in pycompat.xrange(numrevs):
3000 3029 p1, p2 = r.parentrevs(rev)
3001 3030 delta = r.deltaparent(rev)
3002 3031 if format > 0:
3003 3032 addsize(r.rawsize(rev), datasize)
3004 3033 if p2 != nullrev:
3005 3034 nummerges += 1
3006 3035 size = r.length(rev)
3007 3036 if delta == nullrev:
3008 3037 chainlengths.append(0)
3009 3038 chainbases.append(r.start(rev))
3010 3039 chainspans.append(size)
3011 3040 if size == 0:
3012 3041 numempty += 1
3013 3042 numemptytext += 1
3014 3043 else:
3015 3044 numfull += 1
3016 3045 numsnapdepth[0] += 1
3017 3046 addsize(size, fullsize)
3018 3047 addsize(size, snapsizedepth[0])
3019 3048 else:
3020 3049 chainlengths.append(chainlengths[delta] + 1)
3021 3050 baseaddr = chainbases[delta]
3022 3051 revaddr = r.start(rev)
3023 3052 chainbases.append(baseaddr)
3024 3053 chainspans.append((revaddr - baseaddr) + size)
3025 3054 if size == 0:
3026 3055 numempty += 1
3027 3056 numemptydelta += 1
3028 3057 elif r.issnapshot(rev):
3029 3058 addsize(size, semisize)
3030 3059 numsemi += 1
3031 3060 depth = r.snapshotdepth(rev)
3032 3061 numsnapdepth[depth] += 1
3033 3062 addsize(size, snapsizedepth[depth])
3034 3063 else:
3035 3064 addsize(size, deltasize)
3036 3065 if delta == rev - 1:
3037 3066 numprev += 1
3038 3067 if delta == p1:
3039 3068 nump1prev += 1
3040 3069 elif delta == p2:
3041 3070 nump2prev += 1
3042 3071 elif delta == p1:
3043 3072 nump1 += 1
3044 3073 elif delta == p2:
3045 3074 nump2 += 1
3046 3075 elif delta != nullrev:
3047 3076 numother += 1
3048 3077
3049 3078 # Obtain data on the raw chunks in the revlog.
3050 3079 if util.safehasattr(r, b'_getsegmentforrevs'):
3051 3080 segment = r._getsegmentforrevs(rev, rev)[1]
3052 3081 else:
3053 3082 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3054 3083 if segment:
3055 3084 chunktype = bytes(segment[0:1])
3056 3085 else:
3057 3086 chunktype = b'empty'
3058 3087
3059 3088 if chunktype not in chunktypecounts:
3060 3089 chunktypecounts[chunktype] = 0
3061 3090 chunktypesizes[chunktype] = 0
3062 3091
3063 3092 chunktypecounts[chunktype] += 1
3064 3093 chunktypesizes[chunktype] += size
3065 3094
3066 3095 # Adjust size min value for empty cases
3067 3096 for size in (datasize, fullsize, semisize, deltasize):
3068 3097 if size[0] is None:
3069 3098 size[0] = 0
3070 3099
3071 3100 numdeltas = numrevs - numfull - numempty - numsemi
3072 3101 numoprev = numprev - nump1prev - nump2prev
3073 3102 totalrawsize = datasize[2]
3074 3103 datasize[2] /= numrevs
3075 3104 fulltotal = fullsize[2]
3076 3105 if numfull == 0:
3077 3106 fullsize[2] = 0
3078 3107 else:
3079 3108 fullsize[2] /= numfull
3080 3109 semitotal = semisize[2]
3081 3110 snaptotal = {}
3082 3111 if numsemi > 0:
3083 3112 semisize[2] /= numsemi
3084 3113 for depth in snapsizedepth:
3085 3114 snaptotal[depth] = snapsizedepth[depth][2]
3086 3115 snapsizedepth[depth][2] /= numsnapdepth[depth]
3087 3116
3088 3117 deltatotal = deltasize[2]
3089 3118 if numdeltas > 0:
3090 3119 deltasize[2] /= numdeltas
3091 3120 totalsize = fulltotal + semitotal + deltatotal
3092 3121 avgchainlen = sum(chainlengths) / numrevs
3093 3122 maxchainlen = max(chainlengths)
3094 3123 maxchainspan = max(chainspans)
3095 3124 compratio = 1
3096 3125 if totalsize:
3097 3126 compratio = totalrawsize / totalsize
3098 3127
3099 3128 basedfmtstr = b'%%%dd\n'
3100 3129 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3101 3130
3102 3131 def dfmtstr(max):
3103 3132 return basedfmtstr % len(str(max))
3104 3133
3105 3134 def pcfmtstr(max, padding=0):
3106 3135 return basepcfmtstr % (len(str(max)), b' ' * padding)
3107 3136
3108 3137 def pcfmt(value, total):
3109 3138 if total:
3110 3139 return (value, 100 * float(value) / total)
3111 3140 else:
3112 3141 return value, 100.0
3113 3142
3114 3143 ui.writenoi18n(b'format : %d\n' % format)
3115 3144 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3116 3145
3117 3146 ui.write(b'\n')
3118 3147 fmt = pcfmtstr(totalsize)
3119 3148 fmt2 = dfmtstr(totalsize)
3120 3149 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3121 3150 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3122 3151 ui.writenoi18n(
3123 3152 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3124 3153 )
3125 3154 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3126 3155 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3127 3156 ui.writenoi18n(
3128 3157 b' text : '
3129 3158 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3130 3159 )
3131 3160 ui.writenoi18n(
3132 3161 b' delta : '
3133 3162 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3134 3163 )
3135 3164 ui.writenoi18n(
3136 3165 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3137 3166 )
3138 3167 for depth in sorted(numsnapdepth):
3139 3168 ui.write(
3140 3169 (b' lvl-%-3d : ' % depth)
3141 3170 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3142 3171 )
3143 3172 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3144 3173 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3145 3174 ui.writenoi18n(
3146 3175 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3147 3176 )
3148 3177 for depth in sorted(numsnapdepth):
3149 3178 ui.write(
3150 3179 (b' lvl-%-3d : ' % depth)
3151 3180 + fmt % pcfmt(snaptotal[depth], totalsize)
3152 3181 )
3153 3182 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3154 3183
3155 3184 def fmtchunktype(chunktype):
3156 3185 if chunktype == b'empty':
3157 3186 return b' %s : ' % chunktype
3158 3187 elif chunktype in pycompat.bytestr(string.ascii_letters):
3159 3188 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3160 3189 else:
3161 3190 return b' 0x%s : ' % hex(chunktype)
3162 3191
3163 3192 ui.write(b'\n')
3164 3193 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3165 3194 for chunktype in sorted(chunktypecounts):
3166 3195 ui.write(fmtchunktype(chunktype))
3167 3196 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3168 3197 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3169 3198 for chunktype in sorted(chunktypecounts):
3170 3199 ui.write(fmtchunktype(chunktype))
3171 3200 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3172 3201
3173 3202 ui.write(b'\n')
3174 3203 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3175 3204 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3176 3205 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3177 3206 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3178 3207 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3179 3208
3180 3209 if format > 0:
3181 3210 ui.write(b'\n')
3182 3211 ui.writenoi18n(
3183 3212 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3184 3213 % tuple(datasize)
3185 3214 )
3186 3215 ui.writenoi18n(
3187 3216 b'full revision size (min/max/avg) : %d / %d / %d\n'
3188 3217 % tuple(fullsize)
3189 3218 )
3190 3219 ui.writenoi18n(
3191 3220 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3192 3221 % tuple(semisize)
3193 3222 )
3194 3223 for depth in sorted(snapsizedepth):
3195 3224 if depth == 0:
3196 3225 continue
3197 3226 ui.writenoi18n(
3198 3227 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3199 3228 % ((depth,) + tuple(snapsizedepth[depth]))
3200 3229 )
3201 3230 ui.writenoi18n(
3202 3231 b'delta size (min/max/avg) : %d / %d / %d\n'
3203 3232 % tuple(deltasize)
3204 3233 )
3205 3234
3206 3235 if numdeltas > 0:
3207 3236 ui.write(b'\n')
3208 3237 fmt = pcfmtstr(numdeltas)
3209 3238 fmt2 = pcfmtstr(numdeltas, 4)
3210 3239 ui.writenoi18n(
3211 3240 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3212 3241 )
3213 3242 if numprev > 0:
3214 3243 ui.writenoi18n(
3215 3244 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3216 3245 )
3217 3246 ui.writenoi18n(
3218 3247 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3219 3248 )
3220 3249 ui.writenoi18n(
3221 3250 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3222 3251 )
3223 3252 if gdelta:
3224 3253 ui.writenoi18n(
3225 3254 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3226 3255 )
3227 3256 ui.writenoi18n(
3228 3257 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3229 3258 )
3230 3259 ui.writenoi18n(
3231 3260 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3232 3261 )
3233 3262
3234 3263
3235 3264 @command(
3236 3265 b'debugrevlogindex',
3237 3266 cmdutil.debugrevlogopts
3238 3267 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3239 3268 _(b'[-f FORMAT] -c|-m|FILE'),
3240 3269 optionalrepo=True,
3241 3270 )
3242 3271 def debugrevlogindex(ui, repo, file_=None, **opts):
3243 3272 """dump the contents of a revlog index"""
3244 3273 opts = pycompat.byteskwargs(opts)
3245 3274 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3246 3275 format = opts.get(b'format', 0)
3247 3276 if format not in (0, 1):
3248 3277 raise error.Abort(_(b"unknown format %d") % format)
3249 3278
3250 3279 if ui.debugflag:
3251 3280 shortfn = hex
3252 3281 else:
3253 3282 shortfn = short
3254 3283
3255 3284 # There might not be anything in r, so have a sane default
3256 3285 idlen = 12
3257 3286 for i in r:
3258 3287 idlen = len(shortfn(r.node(i)))
3259 3288 break
3260 3289
3261 3290 if format == 0:
3262 3291 if ui.verbose:
3263 3292 ui.writenoi18n(
3264 3293 b" rev offset length linkrev %s %s p2\n"
3265 3294 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3266 3295 )
3267 3296 else:
3268 3297 ui.writenoi18n(
3269 3298 b" rev linkrev %s %s p2\n"
3270 3299 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3271 3300 )
3272 3301 elif format == 1:
3273 3302 if ui.verbose:
3274 3303 ui.writenoi18n(
3275 3304 (
3276 3305 b" rev flag offset length size link p1"
3277 3306 b" p2 %s\n"
3278 3307 )
3279 3308 % b"nodeid".rjust(idlen)
3280 3309 )
3281 3310 else:
3282 3311 ui.writenoi18n(
3283 3312 b" rev flag size link p1 p2 %s\n"
3284 3313 % b"nodeid".rjust(idlen)
3285 3314 )
3286 3315
3287 3316 for i in r:
3288 3317 node = r.node(i)
3289 3318 if format == 0:
3290 3319 try:
3291 3320 pp = r.parents(node)
3292 3321 except Exception:
3293 3322 pp = [nullid, nullid]
3294 3323 if ui.verbose:
3295 3324 ui.write(
3296 3325 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3297 3326 % (
3298 3327 i,
3299 3328 r.start(i),
3300 3329 r.length(i),
3301 3330 r.linkrev(i),
3302 3331 shortfn(node),
3303 3332 shortfn(pp[0]),
3304 3333 shortfn(pp[1]),
3305 3334 )
3306 3335 )
3307 3336 else:
3308 3337 ui.write(
3309 3338 b"% 6d % 7d %s %s %s\n"
3310 3339 % (
3311 3340 i,
3312 3341 r.linkrev(i),
3313 3342 shortfn(node),
3314 3343 shortfn(pp[0]),
3315 3344 shortfn(pp[1]),
3316 3345 )
3317 3346 )
3318 3347 elif format == 1:
3319 3348 pr = r.parentrevs(i)
3320 3349 if ui.verbose:
3321 3350 ui.write(
3322 3351 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3323 3352 % (
3324 3353 i,
3325 3354 r.flags(i),
3326 3355 r.start(i),
3327 3356 r.length(i),
3328 3357 r.rawsize(i),
3329 3358 r.linkrev(i),
3330 3359 pr[0],
3331 3360 pr[1],
3332 3361 shortfn(node),
3333 3362 )
3334 3363 )
3335 3364 else:
3336 3365 ui.write(
3337 3366 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3338 3367 % (
3339 3368 i,
3340 3369 r.flags(i),
3341 3370 r.rawsize(i),
3342 3371 r.linkrev(i),
3343 3372 pr[0],
3344 3373 pr[1],
3345 3374 shortfn(node),
3346 3375 )
3347 3376 )
3348 3377
3349 3378
3350 3379 @command(
3351 3380 b'debugrevspec',
3352 3381 [
3353 3382 (
3354 3383 b'',
3355 3384 b'optimize',
3356 3385 None,
3357 3386 _(b'print parsed tree after optimizing (DEPRECATED)'),
3358 3387 ),
3359 3388 (
3360 3389 b'',
3361 3390 b'show-revs',
3362 3391 True,
3363 3392 _(b'print list of result revisions (default)'),
3364 3393 ),
3365 3394 (
3366 3395 b's',
3367 3396 b'show-set',
3368 3397 None,
3369 3398 _(b'print internal representation of result set'),
3370 3399 ),
3371 3400 (
3372 3401 b'p',
3373 3402 b'show-stage',
3374 3403 [],
3375 3404 _(b'print parsed tree at the given stage'),
3376 3405 _(b'NAME'),
3377 3406 ),
3378 3407 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3379 3408 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3380 3409 ],
3381 3410 b'REVSPEC',
3382 3411 )
3383 3412 def debugrevspec(ui, repo, expr, **opts):
3384 3413 """parse and apply a revision specification
3385 3414
3386 3415 Use -p/--show-stage option to print the parsed tree at the given stages.
3387 3416 Use -p all to print tree at every stage.
3388 3417
3389 3418 Use --no-show-revs option with -s or -p to print only the set
3390 3419 representation or the parsed tree respectively.
3391 3420
3392 3421 Use --verify-optimized to compare the optimized result with the unoptimized
3393 3422 one. Returns 1 if the optimized result differs.
3394 3423 """
3395 3424 opts = pycompat.byteskwargs(opts)
3396 3425 aliases = ui.configitems(b'revsetalias')
3397 3426 stages = [
3398 3427 (b'parsed', lambda tree: tree),
3399 3428 (
3400 3429 b'expanded',
3401 3430 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3402 3431 ),
3403 3432 (b'concatenated', revsetlang.foldconcat),
3404 3433 (b'analyzed', revsetlang.analyze),
3405 3434 (b'optimized', revsetlang.optimize),
3406 3435 ]
3407 3436 if opts[b'no_optimized']:
3408 3437 stages = stages[:-1]
3409 3438 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3410 3439 raise error.Abort(
3411 3440 _(b'cannot use --verify-optimized with --no-optimized')
3412 3441 )
3413 3442 stagenames = {n for n, f in stages}
3414 3443
3415 3444 showalways = set()
3416 3445 showchanged = set()
3417 3446 if ui.verbose and not opts[b'show_stage']:
3418 3447 # show parsed tree by --verbose (deprecated)
3419 3448 showalways.add(b'parsed')
3420 3449 showchanged.update([b'expanded', b'concatenated'])
3421 3450 if opts[b'optimize']:
3422 3451 showalways.add(b'optimized')
3423 3452 if opts[b'show_stage'] and opts[b'optimize']:
3424 3453 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3425 3454 if opts[b'show_stage'] == [b'all']:
3426 3455 showalways.update(stagenames)
3427 3456 else:
3428 3457 for n in opts[b'show_stage']:
3429 3458 if n not in stagenames:
3430 3459 raise error.Abort(_(b'invalid stage name: %s') % n)
3431 3460 showalways.update(opts[b'show_stage'])
3432 3461
3433 3462 treebystage = {}
3434 3463 printedtree = None
3435 3464 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3436 3465 for n, f in stages:
3437 3466 treebystage[n] = tree = f(tree)
3438 3467 if n in showalways or (n in showchanged and tree != printedtree):
3439 3468 if opts[b'show_stage'] or n != b'parsed':
3440 3469 ui.write(b"* %s:\n" % n)
3441 3470 ui.write(revsetlang.prettyformat(tree), b"\n")
3442 3471 printedtree = tree
3443 3472
3444 3473 if opts[b'verify_optimized']:
3445 3474 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3446 3475 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3447 3476 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3448 3477 ui.writenoi18n(
3449 3478 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3450 3479 )
3451 3480 ui.writenoi18n(
3452 3481 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3453 3482 )
3454 3483 arevs = list(arevs)
3455 3484 brevs = list(brevs)
3456 3485 if arevs == brevs:
3457 3486 return 0
3458 3487 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3459 3488 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3460 3489 sm = difflib.SequenceMatcher(None, arevs, brevs)
3461 3490 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3462 3491 if tag in ('delete', 'replace'):
3463 3492 for c in arevs[alo:ahi]:
3464 3493 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3465 3494 if tag in ('insert', 'replace'):
3466 3495 for c in brevs[blo:bhi]:
3467 3496 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3468 3497 if tag == 'equal':
3469 3498 for c in arevs[alo:ahi]:
3470 3499 ui.write(b' %d\n' % c)
3471 3500 return 1
3472 3501
3473 3502 func = revset.makematcher(tree)
3474 3503 revs = func(repo)
3475 3504 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3476 3505 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3477 3506 if not opts[b'show_revs']:
3478 3507 return
3479 3508 for c in revs:
3480 3509 ui.write(b"%d\n" % c)
3481 3510
3482 3511
3483 3512 @command(
3484 3513 b'debugserve',
3485 3514 [
3486 3515 (
3487 3516 b'',
3488 3517 b'sshstdio',
3489 3518 False,
3490 3519 _(b'run an SSH server bound to process handles'),
3491 3520 ),
3492 3521 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3493 3522 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3494 3523 ],
3495 3524 b'',
3496 3525 )
3497 3526 def debugserve(ui, repo, **opts):
3498 3527 """run a server with advanced settings
3499 3528
3500 3529 This command is similar to :hg:`serve`. It exists partially as a
3501 3530 workaround to the fact that ``hg serve --stdio`` must have specific
3502 3531 arguments for security reasons.
3503 3532 """
3504 3533 opts = pycompat.byteskwargs(opts)
3505 3534
3506 3535 if not opts[b'sshstdio']:
3507 3536 raise error.Abort(_(b'only --sshstdio is currently supported'))
3508 3537
3509 3538 logfh = None
3510 3539
3511 3540 if opts[b'logiofd'] and opts[b'logiofile']:
3512 3541 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3513 3542
3514 3543 if opts[b'logiofd']:
3515 3544 # Ideally we would be line buffered. But line buffering in binary
3516 3545 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3517 3546 # buffering could have performance impacts. But since this isn't
3518 3547 # performance critical code, it should be fine.
3519 3548 try:
3520 3549 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3521 3550 except OSError as e:
3522 3551 if e.errno != errno.ESPIPE:
3523 3552 raise
3524 3553 # can't seek a pipe, so `ab` mode fails on py3
3525 3554 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3526 3555 elif opts[b'logiofile']:
3527 3556 logfh = open(opts[b'logiofile'], b'ab', 0)
3528 3557
3529 3558 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3530 3559 s.serve_forever()
3531 3560
3532 3561
3533 3562 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3534 3563 def debugsetparents(ui, repo, rev1, rev2=None):
3535 3564 """manually set the parents of the current working directory (DANGEROUS)
3536 3565
3537 3566 This command is not what you are looking for and should not be used. Using
3538 3567 this command will most certainly results in slight corruption of the file
3539 3568 level histories withing your repository. DO NOT USE THIS COMMAND.
3540 3569
3541 3570 The command update the p1 and p2 field in the dirstate, and not touching
3542 3571 anything else. This useful for writing repository conversion tools, but
3543 3572 should be used with extreme care. For example, neither the working
3544 3573 directory nor the dirstate is updated, so file status may be incorrect
3545 3574 after running this command. Only used if you are one of the few people that
3546 3575 deeply unstand both conversion tools and file level histories. If you are
3547 3576 reading this help, you are not one of this people (most of them sailed west
3548 3577 from Mithlond anyway.
3549 3578
3550 3579 So one last time DO NOT USE THIS COMMAND.
3551 3580
3552 3581 Returns 0 on success.
3553 3582 """
3554 3583
3555 3584 node1 = scmutil.revsingle(repo, rev1).node()
3556 3585 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3557 3586
3558 3587 with repo.wlock():
3559 3588 repo.setparents(node1, node2)
3560 3589
3561 3590
3562 3591 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3563 3592 def debugsidedata(ui, repo, file_, rev=None, **opts):
3564 3593 """dump the side data for a cl/manifest/file revision
3565 3594
3566 3595 Use --verbose to dump the sidedata content."""
3567 3596 opts = pycompat.byteskwargs(opts)
3568 3597 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3569 3598 if rev is not None:
3570 3599 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3571 3600 file_, rev = None, file_
3572 3601 elif rev is None:
3573 3602 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3574 3603 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3575 3604 r = getattr(r, '_revlog', r)
3576 3605 try:
3577 3606 sidedata = r.sidedata(r.lookup(rev))
3578 3607 except KeyError:
3579 3608 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3580 3609 if sidedata:
3581 3610 sidedata = list(sidedata.items())
3582 3611 sidedata.sort()
3583 3612 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3584 3613 for key, value in sidedata:
3585 3614 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3586 3615 if ui.verbose:
3587 3616 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3588 3617
3589 3618
3590 3619 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3591 3620 def debugssl(ui, repo, source=None, **opts):
3592 3621 """test a secure connection to a server
3593 3622
3594 3623 This builds the certificate chain for the server on Windows, installing the
3595 3624 missing intermediates and trusted root via Windows Update if necessary. It
3596 3625 does nothing on other platforms.
3597 3626
3598 3627 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3599 3628 that server is used. See :hg:`help urls` for more information.
3600 3629
3601 3630 If the update succeeds, retry the original operation. Otherwise, the cause
3602 3631 of the SSL error is likely another issue.
3603 3632 """
3604 3633 if not pycompat.iswindows:
3605 3634 raise error.Abort(
3606 3635 _(b'certificate chain building is only possible on Windows')
3607 3636 )
3608 3637
3609 3638 if not source:
3610 3639 if not repo:
3611 3640 raise error.Abort(
3612 3641 _(
3613 3642 b"there is no Mercurial repository here, and no "
3614 3643 b"server specified"
3615 3644 )
3616 3645 )
3617 3646 source = b"default"
3618 3647
3619 3648 source, branches = hg.parseurl(ui.expandpath(source))
3620 3649 url = util.url(source)
3621 3650
3622 3651 defaultport = {b'https': 443, b'ssh': 22}
3623 3652 if url.scheme in defaultport:
3624 3653 try:
3625 3654 addr = (url.host, int(url.port or defaultport[url.scheme]))
3626 3655 except ValueError:
3627 3656 raise error.Abort(_(b"malformed port number in URL"))
3628 3657 else:
3629 3658 raise error.Abort(_(b"only https and ssh connections are supported"))
3630 3659
3631 3660 from . import win32
3632 3661
3633 3662 s = ssl.wrap_socket(
3634 3663 socket.socket(),
3635 3664 ssl_version=ssl.PROTOCOL_TLS,
3636 3665 cert_reqs=ssl.CERT_NONE,
3637 3666 ca_certs=None,
3638 3667 )
3639 3668
3640 3669 try:
3641 3670 s.connect(addr)
3642 3671 cert = s.getpeercert(True)
3643 3672
3644 3673 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3645 3674
3646 3675 complete = win32.checkcertificatechain(cert, build=False)
3647 3676
3648 3677 if not complete:
3649 3678 ui.status(_(b'certificate chain is incomplete, updating... '))
3650 3679
3651 3680 if not win32.checkcertificatechain(cert):
3652 3681 ui.status(_(b'failed.\n'))
3653 3682 else:
3654 3683 ui.status(_(b'done.\n'))
3655 3684 else:
3656 3685 ui.status(_(b'full certificate chain is available\n'))
3657 3686 finally:
3658 3687 s.close()
3659 3688
3660 3689
3661 3690 @command(
3662 3691 b"debugbackupbundle",
3663 3692 [
3664 3693 (
3665 3694 b"",
3666 3695 b"recover",
3667 3696 b"",
3668 3697 b"brings the specified changeset back into the repository",
3669 3698 )
3670 3699 ]
3671 3700 + cmdutil.logopts,
3672 3701 _(b"hg debugbackupbundle [--recover HASH]"),
3673 3702 )
3674 3703 def debugbackupbundle(ui, repo, *pats, **opts):
3675 3704 """lists the changesets available in backup bundles
3676 3705
3677 3706 Without any arguments, this command prints a list of the changesets in each
3678 3707 backup bundle.
3679 3708
3680 3709 --recover takes a changeset hash and unbundles the first bundle that
3681 3710 contains that hash, which puts that changeset back in your repository.
3682 3711
3683 3712 --verbose will print the entire commit message and the bundle path for that
3684 3713 backup.
3685 3714 """
3686 3715 backups = list(
3687 3716 filter(
3688 3717 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3689 3718 )
3690 3719 )
3691 3720 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3692 3721
3693 3722 opts = pycompat.byteskwargs(opts)
3694 3723 opts[b"bundle"] = b""
3695 3724 opts[b"force"] = None
3696 3725 limit = logcmdutil.getlimit(opts)
3697 3726
3698 3727 def display(other, chlist, displayer):
3699 3728 if opts.get(b"newest_first"):
3700 3729 chlist.reverse()
3701 3730 count = 0
3702 3731 for n in chlist:
3703 3732 if limit is not None and count >= limit:
3704 3733 break
3705 3734 parents = [True for p in other.changelog.parents(n) if p != nullid]
3706 3735 if opts.get(b"no_merges") and len(parents) == 2:
3707 3736 continue
3708 3737 count += 1
3709 3738 displayer.show(other[n])
3710 3739
3711 3740 recovernode = opts.get(b"recover")
3712 3741 if recovernode:
3713 3742 if scmutil.isrevsymbol(repo, recovernode):
3714 3743 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3715 3744 return
3716 3745 elif backups:
3717 3746 msg = _(
3718 3747 b"Recover changesets using: hg debugbackupbundle --recover "
3719 3748 b"<changeset hash>\n\nAvailable backup changesets:"
3720 3749 )
3721 3750 ui.status(msg, label=b"status.removed")
3722 3751 else:
3723 3752 ui.status(_(b"no backup changesets found\n"))
3724 3753 return
3725 3754
3726 3755 for backup in backups:
3727 3756 # Much of this is copied from the hg incoming logic
3728 3757 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3729 3758 source, branches = hg.parseurl(source, opts.get(b"branch"))
3730 3759 try:
3731 3760 other = hg.peer(repo, opts, source)
3732 3761 except error.LookupError as ex:
3733 3762 msg = _(b"\nwarning: unable to open bundle %s") % source
3734 3763 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3735 3764 ui.warn(msg, hint=hint)
3736 3765 continue
3737 3766 revs, checkout = hg.addbranchrevs(
3738 3767 repo, other, branches, opts.get(b"rev")
3739 3768 )
3740 3769
3741 3770 if revs:
3742 3771 revs = [other.lookup(rev) for rev in revs]
3743 3772
3744 3773 quiet = ui.quiet
3745 3774 try:
3746 3775 ui.quiet = True
3747 3776 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3748 3777 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3749 3778 )
3750 3779 except error.LookupError:
3751 3780 continue
3752 3781 finally:
3753 3782 ui.quiet = quiet
3754 3783
3755 3784 try:
3756 3785 if not chlist:
3757 3786 continue
3758 3787 if recovernode:
3759 3788 with repo.lock(), repo.transaction(b"unbundle") as tr:
3760 3789 if scmutil.isrevsymbol(other, recovernode):
3761 3790 ui.status(_(b"Unbundling %s\n") % (recovernode))
3762 3791 f = hg.openpath(ui, source)
3763 3792 gen = exchange.readbundle(ui, f, source)
3764 3793 if isinstance(gen, bundle2.unbundle20):
3765 3794 bundle2.applybundle(
3766 3795 repo,
3767 3796 gen,
3768 3797 tr,
3769 3798 source=b"unbundle",
3770 3799 url=b"bundle:" + source,
3771 3800 )
3772 3801 else:
3773 3802 gen.apply(repo, b"unbundle", b"bundle:" + source)
3774 3803 break
3775 3804 else:
3776 3805 backupdate = encoding.strtolocal(
3777 3806 time.strftime(
3778 3807 "%a %H:%M, %Y-%m-%d",
3779 3808 time.localtime(os.path.getmtime(source)),
3780 3809 )
3781 3810 )
3782 3811 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3783 3812 if ui.verbose:
3784 3813 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3785 3814 else:
3786 3815 opts[
3787 3816 b"template"
3788 3817 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3789 3818 displayer = logcmdutil.changesetdisplayer(
3790 3819 ui, other, opts, False
3791 3820 )
3792 3821 display(other, chlist, displayer)
3793 3822 displayer.close()
3794 3823 finally:
3795 3824 cleanupfn()
3796 3825
3797 3826
3798 3827 @command(
3799 3828 b'debugsub',
3800 3829 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3801 3830 _(b'[-r REV] [REV]'),
3802 3831 )
3803 3832 def debugsub(ui, repo, rev=None):
3804 3833 ctx = scmutil.revsingle(repo, rev, None)
3805 3834 for k, v in sorted(ctx.substate.items()):
3806 3835 ui.writenoi18n(b'path %s\n' % k)
3807 3836 ui.writenoi18n(b' source %s\n' % v[0])
3808 3837 ui.writenoi18n(b' revision %s\n' % v[1])
3809 3838
3810 3839
3811 3840 @command(b'debugshell', optionalrepo=True)
3812 3841 def debugshell(ui, repo):
3813 3842 """run an interactive Python interpreter
3814 3843
3815 3844 The local namespace is provided with a reference to the ui and
3816 3845 the repo instance (if available).
3817 3846 """
3818 3847 import code
3819 3848
3820 3849 imported_objects = {
3821 3850 'ui': ui,
3822 3851 'repo': repo,
3823 3852 }
3824 3853
3825 3854 code.interact(local=imported_objects)
3826 3855
3827 3856
3828 3857 @command(
3829 3858 b'debugsuccessorssets',
3830 3859 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3831 3860 _(b'[REV]'),
3832 3861 )
3833 3862 def debugsuccessorssets(ui, repo, *revs, **opts):
3834 3863 """show set of successors for revision
3835 3864
3836 3865 A successors set of changeset A is a consistent group of revisions that
3837 3866 succeed A. It contains non-obsolete changesets only unless closests
3838 3867 successors set is set.
3839 3868
3840 3869 In most cases a changeset A has a single successors set containing a single
3841 3870 successor (changeset A replaced by A').
3842 3871
3843 3872 A changeset that is made obsolete with no successors are called "pruned".
3844 3873 Such changesets have no successors sets at all.
3845 3874
3846 3875 A changeset that has been "split" will have a successors set containing
3847 3876 more than one successor.
3848 3877
3849 3878 A changeset that has been rewritten in multiple different ways is called
3850 3879 "divergent". Such changesets have multiple successor sets (each of which
3851 3880 may also be split, i.e. have multiple successors).
3852 3881
3853 3882 Results are displayed as follows::
3854 3883
3855 3884 <rev1>
3856 3885 <successors-1A>
3857 3886 <rev2>
3858 3887 <successors-2A>
3859 3888 <successors-2B1> <successors-2B2> <successors-2B3>
3860 3889
3861 3890 Here rev2 has two possible (i.e. divergent) successors sets. The first
3862 3891 holds one element, whereas the second holds three (i.e. the changeset has
3863 3892 been split).
3864 3893 """
3865 3894 # passed to successorssets caching computation from one call to another
3866 3895 cache = {}
3867 3896 ctx2str = bytes
3868 3897 node2str = short
3869 3898 for rev in scmutil.revrange(repo, revs):
3870 3899 ctx = repo[rev]
3871 3900 ui.write(b'%s\n' % ctx2str(ctx))
3872 3901 for succsset in obsutil.successorssets(
3873 3902 repo, ctx.node(), closest=opts['closest'], cache=cache
3874 3903 ):
3875 3904 if succsset:
3876 3905 ui.write(b' ')
3877 3906 ui.write(node2str(succsset[0]))
3878 3907 for node in succsset[1:]:
3879 3908 ui.write(b' ')
3880 3909 ui.write(node2str(node))
3881 3910 ui.write(b'\n')
3882 3911
3883 3912
3884 3913 @command(b'debugtagscache', [])
3885 3914 def debugtagscache(ui, repo):
3886 3915 """display the contents of .hg/cache/hgtagsfnodes1"""
3887 3916 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3888 3917 flog = repo.file(b'.hgtags')
3889 3918 for r in repo:
3890 3919 node = repo[r].node()
3891 3920 tagsnode = cache.getfnode(node, computemissing=False)
3892 3921 if tagsnode:
3893 3922 tagsnodedisplay = hex(tagsnode)
3894 3923 if not flog.hasnode(tagsnode):
3895 3924 tagsnodedisplay += b' (unknown node)'
3896 3925 elif tagsnode is None:
3897 3926 tagsnodedisplay = b'missing'
3898 3927 else:
3899 3928 tagsnodedisplay = b'invalid'
3900 3929
3901 3930 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3902 3931
3903 3932
3904 3933 @command(
3905 3934 b'debugtemplate',
3906 3935 [
3907 3936 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3908 3937 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3909 3938 ],
3910 3939 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3911 3940 optionalrepo=True,
3912 3941 )
3913 3942 def debugtemplate(ui, repo, tmpl, **opts):
3914 3943 """parse and apply a template
3915 3944
3916 3945 If -r/--rev is given, the template is processed as a log template and
3917 3946 applied to the given changesets. Otherwise, it is processed as a generic
3918 3947 template.
3919 3948
3920 3949 Use --verbose to print the parsed tree.
3921 3950 """
3922 3951 revs = None
3923 3952 if opts['rev']:
3924 3953 if repo is None:
3925 3954 raise error.RepoError(
3926 3955 _(b'there is no Mercurial repository here (.hg not found)')
3927 3956 )
3928 3957 revs = scmutil.revrange(repo, opts['rev'])
3929 3958
3930 3959 props = {}
3931 3960 for d in opts['define']:
3932 3961 try:
3933 3962 k, v = (e.strip() for e in d.split(b'=', 1))
3934 3963 if not k or k == b'ui':
3935 3964 raise ValueError
3936 3965 props[k] = v
3937 3966 except ValueError:
3938 3967 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3939 3968
3940 3969 if ui.verbose:
3941 3970 aliases = ui.configitems(b'templatealias')
3942 3971 tree = templater.parse(tmpl)
3943 3972 ui.note(templater.prettyformat(tree), b'\n')
3944 3973 newtree = templater.expandaliases(tree, aliases)
3945 3974 if newtree != tree:
3946 3975 ui.notenoi18n(
3947 3976 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3948 3977 )
3949 3978
3950 3979 if revs is None:
3951 3980 tres = formatter.templateresources(ui, repo)
3952 3981 t = formatter.maketemplater(ui, tmpl, resources=tres)
3953 3982 if ui.verbose:
3954 3983 kwds, funcs = t.symbolsuseddefault()
3955 3984 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3956 3985 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3957 3986 ui.write(t.renderdefault(props))
3958 3987 else:
3959 3988 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3960 3989 if ui.verbose:
3961 3990 kwds, funcs = displayer.t.symbolsuseddefault()
3962 3991 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3963 3992 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3964 3993 for r in revs:
3965 3994 displayer.show(repo[r], **pycompat.strkwargs(props))
3966 3995 displayer.close()
3967 3996
3968 3997
3969 3998 @command(
3970 3999 b'debuguigetpass',
3971 4000 [
3972 4001 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3973 4002 ],
3974 4003 _(b'[-p TEXT]'),
3975 4004 norepo=True,
3976 4005 )
3977 4006 def debuguigetpass(ui, prompt=b''):
3978 4007 """show prompt to type password"""
3979 4008 r = ui.getpass(prompt)
3980 4009 if r is None:
3981 4010 r = b"<default response>"
3982 4011 ui.writenoi18n(b'response: %s\n' % r)
3983 4012
3984 4013
3985 4014 @command(
3986 4015 b'debuguiprompt',
3987 4016 [
3988 4017 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3989 4018 ],
3990 4019 _(b'[-p TEXT]'),
3991 4020 norepo=True,
3992 4021 )
3993 4022 def debuguiprompt(ui, prompt=b''):
3994 4023 """show plain prompt"""
3995 4024 r = ui.prompt(prompt)
3996 4025 ui.writenoi18n(b'response: %s\n' % r)
3997 4026
3998 4027
3999 4028 @command(b'debugupdatecaches', [])
4000 4029 def debugupdatecaches(ui, repo, *pats, **opts):
4001 4030 """warm all known caches in the repository"""
4002 4031 with repo.wlock(), repo.lock():
4003 4032 repo.updatecaches(full=True)
4004 4033
4005 4034
4006 4035 @command(
4007 4036 b'debugupgraderepo',
4008 4037 [
4009 4038 (
4010 4039 b'o',
4011 4040 b'optimize',
4012 4041 [],
4013 4042 _(b'extra optimization to perform'),
4014 4043 _(b'NAME'),
4015 4044 ),
4016 4045 (b'', b'run', False, _(b'performs an upgrade')),
4017 4046 (b'', b'backup', True, _(b'keep the old repository content around')),
4018 4047 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4019 4048 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4020 4049 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4021 4050 ],
4022 4051 )
4023 4052 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4024 4053 """upgrade a repository to use different features
4025 4054
4026 4055 If no arguments are specified, the repository is evaluated for upgrade
4027 4056 and a list of problems and potential optimizations is printed.
4028 4057
4029 4058 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4030 4059 can be influenced via additional arguments. More details will be provided
4031 4060 by the command output when run without ``--run``.
4032 4061
4033 4062 During the upgrade, the repository will be locked and no writes will be
4034 4063 allowed.
4035 4064
4036 4065 At the end of the upgrade, the repository may not be readable while new
4037 4066 repository data is swapped in. This window will be as long as it takes to
4038 4067 rename some directories inside the ``.hg`` directory. On most machines, this
4039 4068 should complete almost instantaneously and the chances of a consumer being
4040 4069 unable to access the repository should be low.
4041 4070
4042 4071 By default, all revlog will be upgraded. You can restrict this using flag
4043 4072 such as `--manifest`:
4044 4073
4045 4074 * `--manifest`: only optimize the manifest
4046 4075 * `--no-manifest`: optimize all revlog but the manifest
4047 4076 * `--changelog`: optimize the changelog only
4048 4077 * `--no-changelog --no-manifest`: optimize filelogs only
4049 4078 * `--filelogs`: optimize the filelogs only
4050 4079 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4051 4080 """
4052 4081 return upgrade.upgraderepo(
4053 4082 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4054 4083 )
4055 4084
4056 4085
4057 4086 @command(
4058 4087 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4059 4088 )
4060 4089 def debugwalk(ui, repo, *pats, **opts):
4061 4090 """show how files match on given patterns"""
4062 4091 opts = pycompat.byteskwargs(opts)
4063 4092 m = scmutil.match(repo[None], pats, opts)
4064 4093 if ui.verbose:
4065 4094 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4066 4095 items = list(repo[None].walk(m))
4067 4096 if not items:
4068 4097 return
4069 4098 f = lambda fn: fn
4070 4099 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4071 4100 f = lambda fn: util.normpath(fn)
4072 4101 fmt = b'f %%-%ds %%-%ds %%s' % (
4073 4102 max([len(abs) for abs in items]),
4074 4103 max([len(repo.pathto(abs)) for abs in items]),
4075 4104 )
4076 4105 for abs in items:
4077 4106 line = fmt % (
4078 4107 abs,
4079 4108 f(repo.pathto(abs)),
4080 4109 m.exact(abs) and b'exact' or b'',
4081 4110 )
4082 4111 ui.write(b"%s\n" % line.rstrip())
4083 4112
4084 4113
4085 4114 @command(b'debugwhyunstable', [], _(b'REV'))
4086 4115 def debugwhyunstable(ui, repo, rev):
4087 4116 """explain instabilities of a changeset"""
4088 4117 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4089 4118 dnodes = b''
4090 4119 if entry.get(b'divergentnodes'):
4091 4120 dnodes = (
4092 4121 b' '.join(
4093 4122 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4094 4123 for ctx in entry[b'divergentnodes']
4095 4124 )
4096 4125 + b' '
4097 4126 )
4098 4127 ui.write(
4099 4128 b'%s: %s%s %s\n'
4100 4129 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4101 4130 )
4102 4131
4103 4132
4104 4133 @command(
4105 4134 b'debugwireargs',
4106 4135 [
4107 4136 (b'', b'three', b'', b'three'),
4108 4137 (b'', b'four', b'', b'four'),
4109 4138 (b'', b'five', b'', b'five'),
4110 4139 ]
4111 4140 + cmdutil.remoteopts,
4112 4141 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4113 4142 norepo=True,
4114 4143 )
4115 4144 def debugwireargs(ui, repopath, *vals, **opts):
4116 4145 opts = pycompat.byteskwargs(opts)
4117 4146 repo = hg.peer(ui, opts, repopath)
4118 4147 try:
4119 4148 for opt in cmdutil.remoteopts:
4120 4149 del opts[opt[1]]
4121 4150 args = {}
4122 4151 for k, v in pycompat.iteritems(opts):
4123 4152 if v:
4124 4153 args[k] = v
4125 4154 args = pycompat.strkwargs(args)
4126 4155 # run twice to check that we don't mess up the stream for the next command
4127 4156 res1 = repo.debugwireargs(*vals, **args)
4128 4157 res2 = repo.debugwireargs(*vals, **args)
4129 4158 ui.write(b"%s\n" % res1)
4130 4159 if res1 != res2:
4131 4160 ui.warn(b"%s\n" % res2)
4132 4161 finally:
4133 4162 repo.close()
4134 4163
4135 4164
4136 4165 def _parsewirelangblocks(fh):
4137 4166 activeaction = None
4138 4167 blocklines = []
4139 4168 lastindent = 0
4140 4169
4141 4170 for line in fh:
4142 4171 line = line.rstrip()
4143 4172 if not line:
4144 4173 continue
4145 4174
4146 4175 if line.startswith(b'#'):
4147 4176 continue
4148 4177
4149 4178 if not line.startswith(b' '):
4150 4179 # New block. Flush previous one.
4151 4180 if activeaction:
4152 4181 yield activeaction, blocklines
4153 4182
4154 4183 activeaction = line
4155 4184 blocklines = []
4156 4185 lastindent = 0
4157 4186 continue
4158 4187
4159 4188 # Else we start with an indent.
4160 4189
4161 4190 if not activeaction:
4162 4191 raise error.Abort(_(b'indented line outside of block'))
4163 4192
4164 4193 indent = len(line) - len(line.lstrip())
4165 4194
4166 4195 # If this line is indented more than the last line, concatenate it.
4167 4196 if indent > lastindent and blocklines:
4168 4197 blocklines[-1] += line.lstrip()
4169 4198 else:
4170 4199 blocklines.append(line)
4171 4200 lastindent = indent
4172 4201
4173 4202 # Flush last block.
4174 4203 if activeaction:
4175 4204 yield activeaction, blocklines
4176 4205
4177 4206
4178 4207 @command(
4179 4208 b'debugwireproto',
4180 4209 [
4181 4210 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4182 4211 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4183 4212 (
4184 4213 b'',
4185 4214 b'noreadstderr',
4186 4215 False,
4187 4216 _(b'do not read from stderr of the remote'),
4188 4217 ),
4189 4218 (
4190 4219 b'',
4191 4220 b'nologhandshake',
4192 4221 False,
4193 4222 _(b'do not log I/O related to the peer handshake'),
4194 4223 ),
4195 4224 ]
4196 4225 + cmdutil.remoteopts,
4197 4226 _(b'[PATH]'),
4198 4227 optionalrepo=True,
4199 4228 )
4200 4229 def debugwireproto(ui, repo, path=None, **opts):
4201 4230 """send wire protocol commands to a server
4202 4231
4203 4232 This command can be used to issue wire protocol commands to remote
4204 4233 peers and to debug the raw data being exchanged.
4205 4234
4206 4235 ``--localssh`` will start an SSH server against the current repository
4207 4236 and connect to that. By default, the connection will perform a handshake
4208 4237 and establish an appropriate peer instance.
4209 4238
4210 4239 ``--peer`` can be used to bypass the handshake protocol and construct a
4211 4240 peer instance using the specified class type. Valid values are ``raw``,
4212 4241 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4213 4242 raw data payloads and don't support higher-level command actions.
4214 4243
4215 4244 ``--noreadstderr`` can be used to disable automatic reading from stderr
4216 4245 of the peer (for SSH connections only). Disabling automatic reading of
4217 4246 stderr is useful for making output more deterministic.
4218 4247
4219 4248 Commands are issued via a mini language which is specified via stdin.
4220 4249 The language consists of individual actions to perform. An action is
4221 4250 defined by a block. A block is defined as a line with no leading
4222 4251 space followed by 0 or more lines with leading space. Blocks are
4223 4252 effectively a high-level command with additional metadata.
4224 4253
4225 4254 Lines beginning with ``#`` are ignored.
4226 4255
4227 4256 The following sections denote available actions.
4228 4257
4229 4258 raw
4230 4259 ---
4231 4260
4232 4261 Send raw data to the server.
4233 4262
4234 4263 The block payload contains the raw data to send as one atomic send
4235 4264 operation. The data may not actually be delivered in a single system
4236 4265 call: it depends on the abilities of the transport being used.
4237 4266
4238 4267 Each line in the block is de-indented and concatenated. Then, that
4239 4268 value is evaluated as a Python b'' literal. This allows the use of
4240 4269 backslash escaping, etc.
4241 4270
4242 4271 raw+
4243 4272 ----
4244 4273
4245 4274 Behaves like ``raw`` except flushes output afterwards.
4246 4275
4247 4276 command <X>
4248 4277 -----------
4249 4278
4250 4279 Send a request to run a named command, whose name follows the ``command``
4251 4280 string.
4252 4281
4253 4282 Arguments to the command are defined as lines in this block. The format of
4254 4283 each line is ``<key> <value>``. e.g.::
4255 4284
4256 4285 command listkeys
4257 4286 namespace bookmarks
4258 4287
4259 4288 If the value begins with ``eval:``, it will be interpreted as a Python
4260 4289 literal expression. Otherwise values are interpreted as Python b'' literals.
4261 4290 This allows sending complex types and encoding special byte sequences via
4262 4291 backslash escaping.
4263 4292
4264 4293 The following arguments have special meaning:
4265 4294
4266 4295 ``PUSHFILE``
4267 4296 When defined, the *push* mechanism of the peer will be used instead
4268 4297 of the static request-response mechanism and the content of the
4269 4298 file specified in the value of this argument will be sent as the
4270 4299 command payload.
4271 4300
4272 4301 This can be used to submit a local bundle file to the remote.
4273 4302
4274 4303 batchbegin
4275 4304 ----------
4276 4305
4277 4306 Instruct the peer to begin a batched send.
4278 4307
4279 4308 All ``command`` blocks are queued for execution until the next
4280 4309 ``batchsubmit`` block.
4281 4310
4282 4311 batchsubmit
4283 4312 -----------
4284 4313
4285 4314 Submit previously queued ``command`` blocks as a batch request.
4286 4315
4287 4316 This action MUST be paired with a ``batchbegin`` action.
4288 4317
4289 4318 httprequest <method> <path>
4290 4319 ---------------------------
4291 4320
4292 4321 (HTTP peer only)
4293 4322
4294 4323 Send an HTTP request to the peer.
4295 4324
4296 4325 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4297 4326
4298 4327 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4299 4328 headers to add to the request. e.g. ``Accept: foo``.
4300 4329
4301 4330 The following arguments are special:
4302 4331
4303 4332 ``BODYFILE``
4304 4333 The content of the file defined as the value to this argument will be
4305 4334 transferred verbatim as the HTTP request body.
4306 4335
4307 4336 ``frame <type> <flags> <payload>``
4308 4337 Send a unified protocol frame as part of the request body.
4309 4338
4310 4339 All frames will be collected and sent as the body to the HTTP
4311 4340 request.
4312 4341
4313 4342 close
4314 4343 -----
4315 4344
4316 4345 Close the connection to the server.
4317 4346
4318 4347 flush
4319 4348 -----
4320 4349
4321 4350 Flush data written to the server.
4322 4351
4323 4352 readavailable
4324 4353 -------------
4325 4354
4326 4355 Close the write end of the connection and read all available data from
4327 4356 the server.
4328 4357
4329 4358 If the connection to the server encompasses multiple pipes, we poll both
4330 4359 pipes and read available data.
4331 4360
4332 4361 readline
4333 4362 --------
4334 4363
4335 4364 Read a line of output from the server. If there are multiple output
4336 4365 pipes, reads only the main pipe.
4337 4366
4338 4367 ereadline
4339 4368 ---------
4340 4369
4341 4370 Like ``readline``, but read from the stderr pipe, if available.
4342 4371
4343 4372 read <X>
4344 4373 --------
4345 4374
4346 4375 ``read()`` N bytes from the server's main output pipe.
4347 4376
4348 4377 eread <X>
4349 4378 ---------
4350 4379
4351 4380 ``read()`` N bytes from the server's stderr pipe, if available.
4352 4381
4353 4382 Specifying Unified Frame-Based Protocol Frames
4354 4383 ----------------------------------------------
4355 4384
4356 4385 It is possible to emit a *Unified Frame-Based Protocol* by using special
4357 4386 syntax.
4358 4387
4359 4388 A frame is composed as a type, flags, and payload. These can be parsed
4360 4389 from a string of the form:
4361 4390
4362 4391 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4363 4392
4364 4393 ``request-id`` and ``stream-id`` are integers defining the request and
4365 4394 stream identifiers.
4366 4395
4367 4396 ``type`` can be an integer value for the frame type or the string name
4368 4397 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4369 4398 ``command-name``.
4370 4399
4371 4400 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4372 4401 components. Each component (and there can be just one) can be an integer
4373 4402 or a flag name for stream flags or frame flags, respectively. Values are
4374 4403 resolved to integers and then bitwise OR'd together.
4375 4404
4376 4405 ``payload`` represents the raw frame payload. If it begins with
4377 4406 ``cbor:``, the following string is evaluated as Python code and the
4378 4407 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4379 4408 as a Python byte string literal.
4380 4409 """
4381 4410 opts = pycompat.byteskwargs(opts)
4382 4411
4383 4412 if opts[b'localssh'] and not repo:
4384 4413 raise error.Abort(_(b'--localssh requires a repository'))
4385 4414
4386 4415 if opts[b'peer'] and opts[b'peer'] not in (
4387 4416 b'raw',
4388 4417 b'http2',
4389 4418 b'ssh1',
4390 4419 b'ssh2',
4391 4420 ):
4392 4421 raise error.Abort(
4393 4422 _(b'invalid value for --peer'),
4394 4423 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4395 4424 )
4396 4425
4397 4426 if path and opts[b'localssh']:
4398 4427 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4399 4428
4400 4429 if ui.interactive():
4401 4430 ui.write(_(b'(waiting for commands on stdin)\n'))
4402 4431
4403 4432 blocks = list(_parsewirelangblocks(ui.fin))
4404 4433
4405 4434 proc = None
4406 4435 stdin = None
4407 4436 stdout = None
4408 4437 stderr = None
4409 4438 opener = None
4410 4439
4411 4440 if opts[b'localssh']:
4412 4441 # We start the SSH server in its own process so there is process
4413 4442 # separation. This prevents a whole class of potential bugs around
4414 4443 # shared state from interfering with server operation.
4415 4444 args = procutil.hgcmd() + [
4416 4445 b'-R',
4417 4446 repo.root,
4418 4447 b'debugserve',
4419 4448 b'--sshstdio',
4420 4449 ]
4421 4450 proc = subprocess.Popen(
4422 4451 pycompat.rapply(procutil.tonativestr, args),
4423 4452 stdin=subprocess.PIPE,
4424 4453 stdout=subprocess.PIPE,
4425 4454 stderr=subprocess.PIPE,
4426 4455 bufsize=0,
4427 4456 )
4428 4457
4429 4458 stdin = proc.stdin
4430 4459 stdout = proc.stdout
4431 4460 stderr = proc.stderr
4432 4461
4433 4462 # We turn the pipes into observers so we can log I/O.
4434 4463 if ui.verbose or opts[b'peer'] == b'raw':
4435 4464 stdin = util.makeloggingfileobject(
4436 4465 ui, proc.stdin, b'i', logdata=True
4437 4466 )
4438 4467 stdout = util.makeloggingfileobject(
4439 4468 ui, proc.stdout, b'o', logdata=True
4440 4469 )
4441 4470 stderr = util.makeloggingfileobject(
4442 4471 ui, proc.stderr, b'e', logdata=True
4443 4472 )
4444 4473
4445 4474 # --localssh also implies the peer connection settings.
4446 4475
4447 4476 url = b'ssh://localserver'
4448 4477 autoreadstderr = not opts[b'noreadstderr']
4449 4478
4450 4479 if opts[b'peer'] == b'ssh1':
4451 4480 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4452 4481 peer = sshpeer.sshv1peer(
4453 4482 ui,
4454 4483 url,
4455 4484 proc,
4456 4485 stdin,
4457 4486 stdout,
4458 4487 stderr,
4459 4488 None,
4460 4489 autoreadstderr=autoreadstderr,
4461 4490 )
4462 4491 elif opts[b'peer'] == b'ssh2':
4463 4492 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4464 4493 peer = sshpeer.sshv2peer(
4465 4494 ui,
4466 4495 url,
4467 4496 proc,
4468 4497 stdin,
4469 4498 stdout,
4470 4499 stderr,
4471 4500 None,
4472 4501 autoreadstderr=autoreadstderr,
4473 4502 )
4474 4503 elif opts[b'peer'] == b'raw':
4475 4504 ui.write(_(b'using raw connection to peer\n'))
4476 4505 peer = None
4477 4506 else:
4478 4507 ui.write(_(b'creating ssh peer from handshake results\n'))
4479 4508 peer = sshpeer.makepeer(
4480 4509 ui,
4481 4510 url,
4482 4511 proc,
4483 4512 stdin,
4484 4513 stdout,
4485 4514 stderr,
4486 4515 autoreadstderr=autoreadstderr,
4487 4516 )
4488 4517
4489 4518 elif path:
4490 4519 # We bypass hg.peer() so we can proxy the sockets.
4491 4520 # TODO consider not doing this because we skip
4492 4521 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4493 4522 u = util.url(path)
4494 4523 if u.scheme != b'http':
4495 4524 raise error.Abort(_(b'only http:// paths are currently supported'))
4496 4525
4497 4526 url, authinfo = u.authinfo()
4498 4527 openerargs = {
4499 4528 'useragent': b'Mercurial debugwireproto',
4500 4529 }
4501 4530
4502 4531 # Turn pipes/sockets into observers so we can log I/O.
4503 4532 if ui.verbose:
4504 4533 openerargs.update(
4505 4534 {
4506 4535 'loggingfh': ui,
4507 4536 'loggingname': b's',
4508 4537 'loggingopts': {
4509 4538 'logdata': True,
4510 4539 'logdataapis': False,
4511 4540 },
4512 4541 }
4513 4542 )
4514 4543
4515 4544 if ui.debugflag:
4516 4545 openerargs['loggingopts']['logdataapis'] = True
4517 4546
4518 4547 # Don't send default headers when in raw mode. This allows us to
4519 4548 # bypass most of the behavior of our URL handling code so we can
4520 4549 # have near complete control over what's sent on the wire.
4521 4550 if opts[b'peer'] == b'raw':
4522 4551 openerargs['sendaccept'] = False
4523 4552
4524 4553 opener = urlmod.opener(ui, authinfo, **openerargs)
4525 4554
4526 4555 if opts[b'peer'] == b'http2':
4527 4556 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4528 4557 # We go through makepeer() because we need an API descriptor for
4529 4558 # the peer instance to be useful.
4530 4559 with ui.configoverride(
4531 4560 {(b'experimental', b'httppeer.advertise-v2'): True}
4532 4561 ):
4533 4562 if opts[b'nologhandshake']:
4534 4563 ui.pushbuffer()
4535 4564
4536 4565 peer = httppeer.makepeer(ui, path, opener=opener)
4537 4566
4538 4567 if opts[b'nologhandshake']:
4539 4568 ui.popbuffer()
4540 4569
4541 4570 if not isinstance(peer, httppeer.httpv2peer):
4542 4571 raise error.Abort(
4543 4572 _(
4544 4573 b'could not instantiate HTTP peer for '
4545 4574 b'wire protocol version 2'
4546 4575 ),
4547 4576 hint=_(
4548 4577 b'the server may not have the feature '
4549 4578 b'enabled or is not allowing this '
4550 4579 b'client version'
4551 4580 ),
4552 4581 )
4553 4582
4554 4583 elif opts[b'peer'] == b'raw':
4555 4584 ui.write(_(b'using raw connection to peer\n'))
4556 4585 peer = None
4557 4586 elif opts[b'peer']:
4558 4587 raise error.Abort(
4559 4588 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4560 4589 )
4561 4590 else:
4562 4591 peer = httppeer.makepeer(ui, path, opener=opener)
4563 4592
4564 4593 # We /could/ populate stdin/stdout with sock.makefile()...
4565 4594 else:
4566 4595 raise error.Abort(_(b'unsupported connection configuration'))
4567 4596
4568 4597 batchedcommands = None
4569 4598
4570 4599 # Now perform actions based on the parsed wire language instructions.
4571 4600 for action, lines in blocks:
4572 4601 if action in (b'raw', b'raw+'):
4573 4602 if not stdin:
4574 4603 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4575 4604
4576 4605 # Concatenate the data together.
4577 4606 data = b''.join(l.lstrip() for l in lines)
4578 4607 data = stringutil.unescapestr(data)
4579 4608 stdin.write(data)
4580 4609
4581 4610 if action == b'raw+':
4582 4611 stdin.flush()
4583 4612 elif action == b'flush':
4584 4613 if not stdin:
4585 4614 raise error.Abort(_(b'cannot call flush on this peer'))
4586 4615 stdin.flush()
4587 4616 elif action.startswith(b'command'):
4588 4617 if not peer:
4589 4618 raise error.Abort(
4590 4619 _(
4591 4620 b'cannot send commands unless peer instance '
4592 4621 b'is available'
4593 4622 )
4594 4623 )
4595 4624
4596 4625 command = action.split(b' ', 1)[1]
4597 4626
4598 4627 args = {}
4599 4628 for line in lines:
4600 4629 # We need to allow empty values.
4601 4630 fields = line.lstrip().split(b' ', 1)
4602 4631 if len(fields) == 1:
4603 4632 key = fields[0]
4604 4633 value = b''
4605 4634 else:
4606 4635 key, value = fields
4607 4636
4608 4637 if value.startswith(b'eval:'):
4609 4638 value = stringutil.evalpythonliteral(value[5:])
4610 4639 else:
4611 4640 value = stringutil.unescapestr(value)
4612 4641
4613 4642 args[key] = value
4614 4643
4615 4644 if batchedcommands is not None:
4616 4645 batchedcommands.append((command, args))
4617 4646 continue
4618 4647
4619 4648 ui.status(_(b'sending %s command\n') % command)
4620 4649
4621 4650 if b'PUSHFILE' in args:
4622 4651 with open(args[b'PUSHFILE'], 'rb') as fh:
4623 4652 del args[b'PUSHFILE']
4624 4653 res, output = peer._callpush(
4625 4654 command, fh, **pycompat.strkwargs(args)
4626 4655 )
4627 4656 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4628 4657 ui.status(
4629 4658 _(b'remote output: %s\n') % stringutil.escapestr(output)
4630 4659 )
4631 4660 else:
4632 4661 with peer.commandexecutor() as e:
4633 4662 res = e.callcommand(command, args).result()
4634 4663
4635 4664 if isinstance(res, wireprotov2peer.commandresponse):
4636 4665 val = res.objects()
4637 4666 ui.status(
4638 4667 _(b'response: %s\n')
4639 4668 % stringutil.pprint(val, bprefix=True, indent=2)
4640 4669 )
4641 4670 else:
4642 4671 ui.status(
4643 4672 _(b'response: %s\n')
4644 4673 % stringutil.pprint(res, bprefix=True, indent=2)
4645 4674 )
4646 4675
4647 4676 elif action == b'batchbegin':
4648 4677 if batchedcommands is not None:
4649 4678 raise error.Abort(_(b'nested batchbegin not allowed'))
4650 4679
4651 4680 batchedcommands = []
4652 4681 elif action == b'batchsubmit':
4653 4682 # There is a batching API we could go through. But it would be
4654 4683 # difficult to normalize requests into function calls. It is easier
4655 4684 # to bypass this layer and normalize to commands + args.
4656 4685 ui.status(
4657 4686 _(b'sending batch with %d sub-commands\n')
4658 4687 % len(batchedcommands)
4659 4688 )
4660 4689 assert peer is not None
4661 4690 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4662 4691 ui.status(
4663 4692 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4664 4693 )
4665 4694
4666 4695 batchedcommands = None
4667 4696
4668 4697 elif action.startswith(b'httprequest '):
4669 4698 if not opener:
4670 4699 raise error.Abort(
4671 4700 _(b'cannot use httprequest without an HTTP peer')
4672 4701 )
4673 4702
4674 4703 request = action.split(b' ', 2)
4675 4704 if len(request) != 3:
4676 4705 raise error.Abort(
4677 4706 _(
4678 4707 b'invalid httprequest: expected format is '
4679 4708 b'"httprequest <method> <path>'
4680 4709 )
4681 4710 )
4682 4711
4683 4712 method, httppath = request[1:]
4684 4713 headers = {}
4685 4714 body = None
4686 4715 frames = []
4687 4716 for line in lines:
4688 4717 line = line.lstrip()
4689 4718 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4690 4719 if m:
4691 4720 # Headers need to use native strings.
4692 4721 key = pycompat.strurl(m.group(1))
4693 4722 value = pycompat.strurl(m.group(2))
4694 4723 headers[key] = value
4695 4724 continue
4696 4725
4697 4726 if line.startswith(b'BODYFILE '):
4698 4727 with open(line.split(b' ', 1), b'rb') as fh:
4699 4728 body = fh.read()
4700 4729 elif line.startswith(b'frame '):
4701 4730 frame = wireprotoframing.makeframefromhumanstring(
4702 4731 line[len(b'frame ') :]
4703 4732 )
4704 4733
4705 4734 frames.append(frame)
4706 4735 else:
4707 4736 raise error.Abort(
4708 4737 _(b'unknown argument to httprequest: %s') % line
4709 4738 )
4710 4739
4711 4740 url = path + httppath
4712 4741
4713 4742 if frames:
4714 4743 body = b''.join(bytes(f) for f in frames)
4715 4744
4716 4745 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4717 4746
4718 4747 # urllib.Request insists on using has_data() as a proxy for
4719 4748 # determining the request method. Override that to use our
4720 4749 # explicitly requested method.
4721 4750 req.get_method = lambda: pycompat.sysstr(method)
4722 4751
4723 4752 try:
4724 4753 res = opener.open(req)
4725 4754 body = res.read()
4726 4755 except util.urlerr.urlerror as e:
4727 4756 # read() method must be called, but only exists in Python 2
4728 4757 getattr(e, 'read', lambda: None)()
4729 4758 continue
4730 4759
4731 4760 ct = res.headers.get('Content-Type')
4732 4761 if ct == 'application/mercurial-cbor':
4733 4762 ui.write(
4734 4763 _(b'cbor> %s\n')
4735 4764 % stringutil.pprint(
4736 4765 cborutil.decodeall(body), bprefix=True, indent=2
4737 4766 )
4738 4767 )
4739 4768
4740 4769 elif action == b'close':
4741 4770 assert peer is not None
4742 4771 peer.close()
4743 4772 elif action == b'readavailable':
4744 4773 if not stdout or not stderr:
4745 4774 raise error.Abort(
4746 4775 _(b'readavailable not available on this peer')
4747 4776 )
4748 4777
4749 4778 stdin.close()
4750 4779 stdout.read()
4751 4780 stderr.read()
4752 4781
4753 4782 elif action == b'readline':
4754 4783 if not stdout:
4755 4784 raise error.Abort(_(b'readline not available on this peer'))
4756 4785 stdout.readline()
4757 4786 elif action == b'ereadline':
4758 4787 if not stderr:
4759 4788 raise error.Abort(_(b'ereadline not available on this peer'))
4760 4789 stderr.readline()
4761 4790 elif action.startswith(b'read '):
4762 4791 count = int(action.split(b' ', 1)[1])
4763 4792 if not stdout:
4764 4793 raise error.Abort(_(b'read not available on this peer'))
4765 4794 stdout.read(count)
4766 4795 elif action.startswith(b'eread '):
4767 4796 count = int(action.split(b' ', 1)[1])
4768 4797 if not stderr:
4769 4798 raise error.Abort(_(b'eread not available on this peer'))
4770 4799 stderr.read(count)
4771 4800 else:
4772 4801 raise error.Abort(_(b'unknown action: %s') % action)
4773 4802
4774 4803 if batchedcommands is not None:
4775 4804 raise error.Abort(_(b'unclosed "batchbegin" request'))
4776 4805
4777 4806 if peer:
4778 4807 peer.close()
4779 4808
4780 4809 if proc:
4781 4810 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now