##// END OF EJS Templates
debug: `isinstance(a, x) or isinstance(a, y)` is `isinstance(a, (x, y))`
av6 -
r51442:9dcb0084 default
parent child Browse files
Show More
@@ -1,4852 +1,4852 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 bundlerepo,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 dirstateutils,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filelog,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 manifest,
63 63 mergestate as mergestatemod,
64 64 metadata,
65 65 obsolete,
66 66 obsutil,
67 67 pathutil,
68 68 phases,
69 69 policy,
70 70 pvec,
71 71 pycompat,
72 72 registrar,
73 73 repair,
74 74 repoview,
75 75 requirements,
76 76 revlog,
77 77 revset,
78 78 revsetlang,
79 79 scmutil,
80 80 setdiscovery,
81 81 simplemerge,
82 82 sshpeer,
83 83 sslutil,
84 84 streamclone,
85 85 strip,
86 86 tags as tagsmod,
87 87 templater,
88 88 treediscovery,
89 89 upgrade,
90 90 url as urlmod,
91 91 util,
92 92 verify,
93 93 vfs as vfsmod,
94 94 wireprotoframing,
95 95 wireprotoserver,
96 96 )
97 97 from .interfaces import repository
98 98 from .stabletailgraph import stabletailsort
99 99 from .utils import (
100 100 cborutil,
101 101 compression,
102 102 dateutil,
103 103 procutil,
104 104 stringutil,
105 105 urlutil,
106 106 )
107 107
108 108 from .revlogutils import (
109 109 constants as revlog_constants,
110 110 debug as revlog_debug,
111 111 deltas as deltautil,
112 112 nodemap,
113 113 rewrite,
114 114 sidedata,
115 115 )
116 116
117 117 release = lockmod.release
118 118
119 119 table = {}
120 120 table.update(strip.command._table)
121 121 command = registrar.command(table)
122 122
123 123
124 124 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
125 125 def debugancestor(ui, repo, *args):
126 126 """find the ancestor revision of two revisions in a given index"""
127 127 if len(args) == 3:
128 128 index, rev1, rev2 = args
129 129 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
130 130 lookup = r.lookup
131 131 elif len(args) == 2:
132 132 if not repo:
133 133 raise error.Abort(
134 134 _(b'there is no Mercurial repository here (.hg not found)')
135 135 )
136 136 rev1, rev2 = args
137 137 r = repo.changelog
138 138 lookup = repo.lookup
139 139 else:
140 140 raise error.Abort(_(b'either two or three arguments required'))
141 141 a = r.ancestor(lookup(rev1), lookup(rev2))
142 142 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
143 143
144 144
145 145 @command(b'debugantivirusrunning', [])
146 146 def debugantivirusrunning(ui, repo):
147 147 """attempt to trigger an antivirus scanner to see if one is active"""
148 148 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
149 149 f.write(
150 150 util.b85decode(
151 151 # This is a base85-armored version of the EICAR test file. See
152 152 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
153 153 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
154 154 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
155 155 )
156 156 )
157 157 # Give an AV engine time to scan the file.
158 158 time.sleep(2)
159 159 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
160 160
161 161
162 162 @command(b'debugapplystreamclonebundle', [], b'FILE')
163 163 def debugapplystreamclonebundle(ui, repo, fname):
164 164 """apply a stream clone bundle file"""
165 165 f = hg.openpath(ui, fname)
166 166 gen = exchange.readbundle(ui, f, fname)
167 167 gen.apply(repo)
168 168
169 169
170 170 @command(
171 171 b'debugbuilddag',
172 172 [
173 173 (
174 174 b'm',
175 175 b'mergeable-file',
176 176 None,
177 177 _(b'add single file mergeable changes'),
178 178 ),
179 179 (
180 180 b'o',
181 181 b'overwritten-file',
182 182 None,
183 183 _(b'add single file all revs overwrite'),
184 184 ),
185 185 (b'n', b'new-file', None, _(b'add new file at each rev')),
186 186 (
187 187 b'',
188 188 b'from-existing',
189 189 None,
190 190 _(b'continue from a non-empty repository'),
191 191 ),
192 192 ],
193 193 _(b'[OPTION]... [TEXT]'),
194 194 )
195 195 def debugbuilddag(
196 196 ui,
197 197 repo,
198 198 text=None,
199 199 mergeable_file=False,
200 200 overwritten_file=False,
201 201 new_file=False,
202 202 from_existing=False,
203 203 ):
204 204 """builds a repo with a given DAG from scratch in the current empty repo
205 205
206 206 The description of the DAG is read from stdin if not given on the
207 207 command line.
208 208
209 209 Elements:
210 210
211 211 - "+n" is a linear run of n nodes based on the current default parent
212 212 - "." is a single node based on the current default parent
213 213 - "$" resets the default parent to null (implied at the start);
214 214 otherwise the default parent is always the last node created
215 215 - "<p" sets the default parent to the backref p
216 216 - "*p" is a fork at parent p, which is a backref
217 217 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
218 218 - "/p2" is a merge of the preceding node and p2
219 219 - ":tag" defines a local tag for the preceding node
220 220 - "@branch" sets the named branch for subsequent nodes
221 221 - "#...\\n" is a comment up to the end of the line
222 222
223 223 Whitespace between the above elements is ignored.
224 224
225 225 A backref is either
226 226
227 227 - a number n, which references the node curr-n, where curr is the current
228 228 node, or
229 229 - the name of a local tag you placed earlier using ":tag", or
230 230 - empty to denote the default parent.
231 231
232 232 All string valued-elements are either strictly alphanumeric, or must
233 233 be enclosed in double quotes ("..."), with "\\" as escape character.
234 234 """
235 235
236 236 if text is None:
237 237 ui.status(_(b"reading DAG from stdin\n"))
238 238 text = ui.fin.read()
239 239
240 240 cl = repo.changelog
241 241 if len(cl) > 0 and not from_existing:
242 242 raise error.Abort(_(b'repository is not empty'))
243 243
244 244 # determine number of revs in DAG
245 245 total = 0
246 246 for type, data in dagparser.parsedag(text):
247 247 if type == b'n':
248 248 total += 1
249 249
250 250 if mergeable_file:
251 251 linesperrev = 2
252 252 # make a file with k lines per rev
253 253 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
254 254 initialmergedlines.append(b"")
255 255
256 256 tags = []
257 257 progress = ui.makeprogress(
258 258 _(b'building'), unit=_(b'revisions'), total=total
259 259 )
260 260 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
261 261 at = -1
262 262 atbranch = b'default'
263 263 nodeids = []
264 264 id = 0
265 265 progress.update(id)
266 266 for type, data in dagparser.parsedag(text):
267 267 if type == b'n':
268 268 ui.note((b'node %s\n' % pycompat.bytestr(data)))
269 269 id, ps = data
270 270
271 271 files = []
272 272 filecontent = {}
273 273
274 274 p2 = None
275 275 if mergeable_file:
276 276 fn = b"mf"
277 277 p1 = repo[ps[0]]
278 278 if len(ps) > 1:
279 279 p2 = repo[ps[1]]
280 280 pa = p1.ancestor(p2)
281 281 base, local, other = [
282 282 x[fn].data() for x in (pa, p1, p2)
283 283 ]
284 284 m3 = simplemerge.Merge3Text(base, local, other)
285 285 ml = [
286 286 l.strip()
287 287 for l in simplemerge.render_minimized(m3)[0]
288 288 ]
289 289 ml.append(b"")
290 290 elif at > 0:
291 291 ml = p1[fn].data().split(b"\n")
292 292 else:
293 293 ml = initialmergedlines
294 294 ml[id * linesperrev] += b" r%i" % id
295 295 mergedtext = b"\n".join(ml)
296 296 files.append(fn)
297 297 filecontent[fn] = mergedtext
298 298
299 299 if overwritten_file:
300 300 fn = b"of"
301 301 files.append(fn)
302 302 filecontent[fn] = b"r%i\n" % id
303 303
304 304 if new_file:
305 305 fn = b"nf%i" % id
306 306 files.append(fn)
307 307 filecontent[fn] = b"r%i\n" % id
308 308 if len(ps) > 1:
309 309 if not p2:
310 310 p2 = repo[ps[1]]
311 311 for fn in p2:
312 312 if fn.startswith(b"nf"):
313 313 files.append(fn)
314 314 filecontent[fn] = p2[fn].data()
315 315
316 316 def fctxfn(repo, cx, path):
317 317 if path in filecontent:
318 318 return context.memfilectx(
319 319 repo, cx, path, filecontent[path]
320 320 )
321 321 return None
322 322
323 323 if len(ps) == 0 or ps[0] < 0:
324 324 pars = [None, None]
325 325 elif len(ps) == 1:
326 326 pars = [nodeids[ps[0]], None]
327 327 else:
328 328 pars = [nodeids[p] for p in ps]
329 329 cx = context.memctx(
330 330 repo,
331 331 pars,
332 332 b"r%i" % id,
333 333 files,
334 334 fctxfn,
335 335 date=(id, 0),
336 336 user=b"debugbuilddag",
337 337 extra={b'branch': atbranch},
338 338 )
339 339 nodeid = repo.commitctx(cx)
340 340 nodeids.append(nodeid)
341 341 at = id
342 342 elif type == b'l':
343 343 id, name = data
344 344 ui.note((b'tag %s\n' % name))
345 345 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
346 346 elif type == b'a':
347 347 ui.note((b'branch %s\n' % data))
348 348 atbranch = data
349 349 progress.update(id)
350 350
351 351 if tags:
352 352 repo.vfs.write(b"localtags", b"".join(tags))
353 353
354 354
355 355 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
356 356 indent_string = b' ' * indent
357 357 if all:
358 358 ui.writenoi18n(
359 359 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
360 360 % indent_string
361 361 )
362 362
363 363 def showchunks(named):
364 364 ui.write(b"\n%s%s\n" % (indent_string, named))
365 365 for deltadata in gen.deltaiter():
366 366 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
367 367 ui.write(
368 368 b"%s%s %s %s %s %s %d\n"
369 369 % (
370 370 indent_string,
371 371 hex(node),
372 372 hex(p1),
373 373 hex(p2),
374 374 hex(cs),
375 375 hex(deltabase),
376 376 len(delta),
377 377 )
378 378 )
379 379
380 380 gen.changelogheader()
381 381 showchunks(b"changelog")
382 382 gen.manifestheader()
383 383 showchunks(b"manifest")
384 384 for chunkdata in iter(gen.filelogheader, {}):
385 385 fname = chunkdata[b'filename']
386 386 showchunks(fname)
387 387 else:
388 388 if isinstance(gen, bundle2.unbundle20):
389 389 raise error.Abort(_(b'use debugbundle2 for this file'))
390 390 gen.changelogheader()
391 391 for deltadata in gen.deltaiter():
392 392 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
393 393 ui.write(b"%s%s\n" % (indent_string, hex(node)))
394 394
395 395
396 396 def _debugobsmarkers(ui, part, indent=0, **opts):
397 397 """display version and markers contained in 'data'"""
398 398 opts = pycompat.byteskwargs(opts)
399 399 data = part.read()
400 400 indent_string = b' ' * indent
401 401 try:
402 402 version, markers = obsolete._readmarkers(data)
403 403 except error.UnknownVersion as exc:
404 404 msg = b"%sunsupported version: %s (%d bytes)\n"
405 405 msg %= indent_string, exc.version, len(data)
406 406 ui.write(msg)
407 407 else:
408 408 msg = b"%sversion: %d (%d bytes)\n"
409 409 msg %= indent_string, version, len(data)
410 410 ui.write(msg)
411 411 fm = ui.formatter(b'debugobsolete', opts)
412 412 for rawmarker in sorted(markers):
413 413 m = obsutil.marker(None, rawmarker)
414 414 fm.startitem()
415 415 fm.plain(indent_string)
416 416 cmdutil.showmarker(fm, m)
417 417 fm.end()
418 418
419 419
420 420 def _debugphaseheads(ui, data, indent=0):
421 421 """display version and markers contained in 'data'"""
422 422 indent_string = b' ' * indent
423 423 headsbyphase = phases.binarydecode(data)
424 424 for phase in phases.allphases:
425 425 for head in headsbyphase[phase]:
426 426 ui.write(indent_string)
427 427 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
428 428
429 429
430 430 def _quasirepr(thing):
431 431 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
432 432 return b'{%s}' % (
433 433 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
434 434 )
435 435 return pycompat.bytestr(repr(thing))
436 436
437 437
438 438 def _debugbundle2(ui, gen, all=None, **opts):
439 439 """lists the contents of a bundle2"""
440 440 if not isinstance(gen, bundle2.unbundle20):
441 441 raise error.Abort(_(b'not a bundle2 file'))
442 442 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
443 443 parttypes = opts.get('part_type', [])
444 444 for part in gen.iterparts():
445 445 if parttypes and part.type not in parttypes:
446 446 continue
447 447 msg = b'%s -- %s (mandatory: %r)\n'
448 448 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
449 449 if part.type == b'changegroup':
450 450 version = part.params.get(b'version', b'01')
451 451 cg = changegroup.getunbundler(version, part, b'UN')
452 452 if not ui.quiet:
453 453 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
454 454 if part.type == b'obsmarkers':
455 455 if not ui.quiet:
456 456 _debugobsmarkers(ui, part, indent=4, **opts)
457 457 if part.type == b'phase-heads':
458 458 if not ui.quiet:
459 459 _debugphaseheads(ui, part, indent=4)
460 460
461 461
462 462 @command(
463 463 b'debugbundle',
464 464 [
465 465 (b'a', b'all', None, _(b'show all details')),
466 466 (b'', b'part-type', [], _(b'show only the named part type')),
467 467 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
468 468 ],
469 469 _(b'FILE'),
470 470 norepo=True,
471 471 )
472 472 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
473 473 """lists the contents of a bundle"""
474 474 with hg.openpath(ui, bundlepath) as f:
475 475 if spec:
476 476 spec = exchange.getbundlespec(ui, f)
477 477 ui.write(b'%s\n' % spec)
478 478 return
479 479
480 480 gen = exchange.readbundle(ui, f, bundlepath)
481 481 if isinstance(gen, bundle2.unbundle20):
482 482 return _debugbundle2(ui, gen, all=all, **opts)
483 483 _debugchangegroup(ui, gen, all=all, **opts)
484 484
485 485
486 486 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
487 487 def debugcapabilities(ui, path, **opts):
488 488 """lists the capabilities of a remote peer"""
489 489 opts = pycompat.byteskwargs(opts)
490 490 peer = hg.peer(ui, opts, path)
491 491 try:
492 492 caps = peer.capabilities()
493 493 ui.writenoi18n(b'Main capabilities:\n')
494 494 for c in sorted(caps):
495 495 ui.write(b' %s\n' % c)
496 496 b2caps = bundle2.bundle2caps(peer)
497 497 if b2caps:
498 498 ui.writenoi18n(b'Bundle2 capabilities:\n')
499 499 for key, values in sorted(b2caps.items()):
500 500 ui.write(b' %s\n' % key)
501 501 for v in values:
502 502 ui.write(b' %s\n' % v)
503 503 finally:
504 504 peer.close()
505 505
506 506
507 507 @command(
508 508 b'debugchangedfiles',
509 509 [
510 510 (
511 511 b'',
512 512 b'compute',
513 513 False,
514 514 b"compute information instead of reading it from storage",
515 515 ),
516 516 ],
517 517 b'REV',
518 518 )
519 519 def debugchangedfiles(ui, repo, rev, **opts):
520 520 """list the stored files changes for a revision"""
521 521 ctx = logcmdutil.revsingle(repo, rev, None)
522 522 files = None
523 523
524 524 if opts['compute']:
525 525 files = metadata.compute_all_files_changes(ctx)
526 526 else:
527 527 sd = repo.changelog.sidedata(ctx.rev())
528 528 files_block = sd.get(sidedata.SD_FILES)
529 529 if files_block is not None:
530 530 files = metadata.decode_files_sidedata(sd)
531 531 if files is not None:
532 532 for f in sorted(files.touched):
533 533 if f in files.added:
534 534 action = b"added"
535 535 elif f in files.removed:
536 536 action = b"removed"
537 537 elif f in files.merged:
538 538 action = b"merged"
539 539 elif f in files.salvaged:
540 540 action = b"salvaged"
541 541 else:
542 542 action = b"touched"
543 543
544 544 copy_parent = b""
545 545 copy_source = b""
546 546 if f in files.copied_from_p1:
547 547 copy_parent = b"p1"
548 548 copy_source = files.copied_from_p1[f]
549 549 elif f in files.copied_from_p2:
550 550 copy_parent = b"p2"
551 551 copy_source = files.copied_from_p2[f]
552 552
553 553 data = (action, copy_parent, f, copy_source)
554 554 template = b"%-8s %2s: %s, %s;\n"
555 555 ui.write(template % data)
556 556
557 557
558 558 @command(b'debugcheckstate', [], b'')
559 559 def debugcheckstate(ui, repo):
560 560 """validate the correctness of the current dirstate"""
561 561 errors = verify.verifier(repo)._verify_dirstate()
562 562 if errors:
563 563 errstr = _(b"dirstate inconsistent with current parent's manifest")
564 564 raise error.Abort(errstr)
565 565
566 566
567 567 @command(
568 568 b'debugcolor',
569 569 [(b'', b'style', None, _(b'show all configured styles'))],
570 570 b'hg debugcolor',
571 571 )
572 572 def debugcolor(ui, repo, **opts):
573 573 """show available color, effects or style"""
574 574 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
575 575 if opts.get('style'):
576 576 return _debugdisplaystyle(ui)
577 577 else:
578 578 return _debugdisplaycolor(ui)
579 579
580 580
581 581 def _debugdisplaycolor(ui):
582 582 ui = ui.copy()
583 583 ui._styles.clear()
584 584 for effect in color._activeeffects(ui).keys():
585 585 ui._styles[effect] = effect
586 586 if ui._terminfoparams:
587 587 for k, v in ui.configitems(b'color'):
588 588 if k.startswith(b'color.'):
589 589 ui._styles[k] = k[6:]
590 590 elif k.startswith(b'terminfo.'):
591 591 ui._styles[k] = k[9:]
592 592 ui.write(_(b'available colors:\n'))
593 593 # sort label with a '_' after the other to group '_background' entry.
594 594 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
595 595 for colorname, label in items:
596 596 ui.write(b'%s\n' % colorname, label=label)
597 597
598 598
599 599 def _debugdisplaystyle(ui):
600 600 ui.write(_(b'available style:\n'))
601 601 if not ui._styles:
602 602 return
603 603 width = max(len(s) for s in ui._styles)
604 604 for label, effects in sorted(ui._styles.items()):
605 605 ui.write(b'%s' % label, label=label)
606 606 if effects:
607 607 # 50
608 608 ui.write(b': ')
609 609 ui.write(b' ' * (max(0, width - len(label))))
610 610 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
611 611 ui.write(b'\n')
612 612
613 613
614 614 @command(b'debugcreatestreamclonebundle', [], b'FILE')
615 615 def debugcreatestreamclonebundle(ui, repo, fname):
616 616 """create a stream clone bundle file
617 617
618 618 Stream bundles are special bundles that are essentially archives of
619 619 revlog files. They are commonly used for cloning very quickly.
620 620 """
621 621 # TODO we may want to turn this into an abort when this functionality
622 622 # is moved into `hg bundle`.
623 623 if phases.hassecret(repo):
624 624 ui.warn(
625 625 _(
626 626 b'(warning: stream clone bundle will contain secret '
627 627 b'revisions)\n'
628 628 )
629 629 )
630 630
631 631 requirements, gen = streamclone.generatebundlev1(repo)
632 632 changegroup.writechunks(ui, gen, fname)
633 633
634 634 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
635 635
636 636
637 637 @command(
638 638 b'debugdag',
639 639 [
640 640 (b't', b'tags', None, _(b'use tags as labels')),
641 641 (b'b', b'branches', None, _(b'annotate with branch names')),
642 642 (b'', b'dots', None, _(b'use dots for runs')),
643 643 (b's', b'spaces', None, _(b'separate elements by spaces')),
644 644 ],
645 645 _(b'[OPTION]... [FILE [REV]...]'),
646 646 optionalrepo=True,
647 647 )
648 648 def debugdag(ui, repo, file_=None, *revs, **opts):
649 649 """format the changelog or an index DAG as a concise textual description
650 650
651 651 If you pass a revlog index, the revlog's DAG is emitted. If you list
652 652 revision numbers, they get labeled in the output as rN.
653 653
654 654 Otherwise, the changelog DAG of the current repo is emitted.
655 655 """
656 656 spaces = opts.get('spaces')
657 657 dots = opts.get('dots')
658 658 if file_:
659 659 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
660 660 revs = {int(r) for r in revs}
661 661
662 662 def events():
663 663 for r in rlog:
664 664 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
665 665 if r in revs:
666 666 yield b'l', (r, b"r%i" % r)
667 667
668 668 elif repo:
669 669 cl = repo.changelog
670 670 tags = opts.get('tags')
671 671 branches = opts.get('branches')
672 672 if tags:
673 673 labels = {}
674 674 for l, n in repo.tags().items():
675 675 labels.setdefault(cl.rev(n), []).append(l)
676 676
677 677 def events():
678 678 b = b"default"
679 679 for r in cl:
680 680 if branches:
681 681 newb = cl.read(cl.node(r))[5][b'branch']
682 682 if newb != b:
683 683 yield b'a', newb
684 684 b = newb
685 685 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
686 686 if tags:
687 687 ls = labels.get(r)
688 688 if ls:
689 689 for l in ls:
690 690 yield b'l', (r, l)
691 691
692 692 else:
693 693 raise error.Abort(_(b'need repo for changelog dag'))
694 694
695 695 for line in dagparser.dagtextlines(
696 696 events(),
697 697 addspaces=spaces,
698 698 wraplabels=True,
699 699 wrapannotations=True,
700 700 wrapnonlinear=dots,
701 701 usedots=dots,
702 702 maxlinewidth=70,
703 703 ):
704 704 ui.write(line)
705 705 ui.write(b"\n")
706 706
707 707
708 708 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
709 709 def debugdata(ui, repo, file_, rev=None, **opts):
710 710 """dump the contents of a data file revision"""
711 711 opts = pycompat.byteskwargs(opts)
712 712 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
713 713 if rev is not None:
714 714 raise error.InputError(
715 715 _(b'cannot specify a revision with other arguments')
716 716 )
717 717 file_, rev = None, file_
718 718 elif rev is None:
719 719 raise error.InputError(_(b'please specify a revision'))
720 720 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
721 721 try:
722 722 ui.write(r.rawdata(r.lookup(rev)))
723 723 except KeyError:
724 724 raise error.Abort(_(b'invalid revision identifier %s') % rev)
725 725
726 726
727 727 @command(
728 728 b'debugdate',
729 729 [(b'e', b'extended', None, _(b'try extended date formats'))],
730 730 _(b'[-e] DATE [RANGE]'),
731 731 norepo=True,
732 732 optionalrepo=True,
733 733 )
734 734 def debugdate(ui, date, range=None, **opts):
735 735 """parse and display a date"""
736 736 if opts["extended"]:
737 737 d = dateutil.parsedate(date, dateutil.extendeddateformats)
738 738 else:
739 739 d = dateutil.parsedate(date)
740 740 ui.writenoi18n(b"internal: %d %d\n" % d)
741 741 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
742 742 if range:
743 743 m = dateutil.matchdate(range)
744 744 ui.writenoi18n(b"match: %s\n" % m(d[0]))
745 745
746 746
747 747 @command(
748 748 b'debugdeltachain',
749 749 cmdutil.debugrevlogopts + cmdutil.formatteropts,
750 750 _(b'-c|-m|FILE'),
751 751 optionalrepo=True,
752 752 )
753 753 def debugdeltachain(ui, repo, file_=None, **opts):
754 754 """dump information about delta chains in a revlog
755 755
756 756 Output can be templatized. Available template keywords are:
757 757
758 758 :``rev``: revision number
759 759 :``p1``: parent 1 revision number (for reference)
760 760 :``p2``: parent 2 revision number (for reference)
761 761 :``chainid``: delta chain identifier (numbered by unique base)
762 762 :``chainlen``: delta chain length to this revision
763 763 :``prevrev``: previous revision in delta chain
764 764 :``deltatype``: role of delta / how it was computed
765 765 - base: a full snapshot
766 766 - snap: an intermediate snapshot
767 767 - p1: a delta against the first parent
768 768 - p2: a delta against the second parent
769 769 - skip1: a delta against the same base as p1
770 770 (when p1 has empty delta
771 771 - skip2: a delta against the same base as p2
772 772 (when p2 has empty delta
773 773 - prev: a delta against the previous revision
774 774 - other: a delta against an arbitrary revision
775 775 :``compsize``: compressed size of revision
776 776 :``uncompsize``: uncompressed size of revision
777 777 :``chainsize``: total size of compressed revisions in chain
778 778 :``chainratio``: total chain size divided by uncompressed revision size
779 779 (new delta chains typically start at ratio 2.00)
780 780 :``lindist``: linear distance from base revision in delta chain to end
781 781 of this revision
782 782 :``extradist``: total size of revisions not part of this delta chain from
783 783 base of delta chain to end of this revision; a measurement
784 784 of how much extra data we need to read/seek across to read
785 785 the delta chain for this revision
786 786 :``extraratio``: extradist divided by chainsize; another representation of
787 787 how much unrelated data is needed to load this delta chain
788 788
789 789 If the repository is configured to use the sparse read, additional keywords
790 790 are available:
791 791
792 792 :``readsize``: total size of data read from the disk for a revision
793 793 (sum of the sizes of all the blocks)
794 794 :``largestblock``: size of the largest block of data read from the disk
795 795 :``readdensity``: density of useful bytes in the data read from the disk
796 796 :``srchunks``: in how many data hunks the whole revision would be read
797 797
798 798 The sparse read can be enabled with experimental.sparse-read = True
799 799 """
800 800 opts = pycompat.byteskwargs(opts)
801 801 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
802 802 index = r.index
803 803 start = r.start
804 804 length = r.length
805 805 generaldelta = r._generaldelta
806 806 withsparseread = getattr(r, '_withsparseread', False)
807 807
808 808 # security to avoid crash on corrupted revlogs
809 809 total_revs = len(index)
810 810
811 811 chain_size_cache = {}
812 812
813 813 def revinfo(rev):
814 814 e = index[rev]
815 815 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
816 816 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
817 817
818 818 base = e[revlog_constants.ENTRY_DELTA_BASE]
819 819 p1 = e[revlog_constants.ENTRY_PARENT_1]
820 820 p2 = e[revlog_constants.ENTRY_PARENT_2]
821 821
822 822 # If the parents of a revision has an empty delta, we never try to delta
823 823 # against that parent, but directly against the delta base of that
824 824 # parent (recursively). It avoids adding a useless entry in the chain.
825 825 #
826 826 # However we need to detect that as a special case for delta-type, that
827 827 # is not simply "other".
828 828 p1_base = p1
829 829 if p1 != nullrev and p1 < total_revs:
830 830 e1 = index[p1]
831 831 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
832 832 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
833 833 if (
834 834 new_base == p1_base
835 835 or new_base == nullrev
836 836 or new_base >= total_revs
837 837 ):
838 838 break
839 839 p1_base = new_base
840 840 e1 = index[p1_base]
841 841 p2_base = p2
842 842 if p2 != nullrev and p2 < total_revs:
843 843 e2 = index[p2]
844 844 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
845 845 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
846 846 if (
847 847 new_base == p2_base
848 848 or new_base == nullrev
849 849 or new_base >= total_revs
850 850 ):
851 851 break
852 852 p2_base = new_base
853 853 e2 = index[p2_base]
854 854
855 855 if generaldelta:
856 856 if base == p1:
857 857 deltatype = b'p1'
858 858 elif base == p2:
859 859 deltatype = b'p2'
860 860 elif base == rev:
861 861 deltatype = b'base'
862 862 elif base == p1_base:
863 863 deltatype = b'skip1'
864 864 elif base == p2_base:
865 865 deltatype = b'skip2'
866 866 elif r.issnapshot(rev):
867 867 deltatype = b'snap'
868 868 elif base == rev - 1:
869 869 deltatype = b'prev'
870 870 else:
871 871 deltatype = b'other'
872 872 else:
873 873 if base == rev:
874 874 deltatype = b'base'
875 875 else:
876 876 deltatype = b'prev'
877 877
878 878 chain = r._deltachain(rev)[0]
879 879 chain_size = 0
880 880 for iter_rev in reversed(chain):
881 881 cached = chain_size_cache.get(iter_rev)
882 882 if cached is not None:
883 883 chain_size += cached
884 884 break
885 885 e = index[iter_rev]
886 886 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
887 887 chain_size_cache[rev] = chain_size
888 888
889 889 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
890 890
891 891 fm = ui.formatter(b'debugdeltachain', opts)
892 892
893 893 fm.plain(
894 894 b' rev p1 p2 chain# chainlen prev delta '
895 895 b'size rawsize chainsize ratio lindist extradist '
896 896 b'extraratio'
897 897 )
898 898 if withsparseread:
899 899 fm.plain(b' readsize largestblk rddensity srchunks')
900 900 fm.plain(b'\n')
901 901
902 902 chainbases = {}
903 903 for rev in r:
904 904 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
905 905 chainbase = chain[0]
906 906 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
907 907 basestart = start(chainbase)
908 908 revstart = start(rev)
909 909 lineardist = revstart + comp - basestart
910 910 extradist = lineardist - chainsize
911 911 try:
912 912 prevrev = chain[-2]
913 913 except IndexError:
914 914 prevrev = -1
915 915
916 916 if uncomp != 0:
917 917 chainratio = float(chainsize) / float(uncomp)
918 918 else:
919 919 chainratio = chainsize
920 920
921 921 if chainsize != 0:
922 922 extraratio = float(extradist) / float(chainsize)
923 923 else:
924 924 extraratio = extradist
925 925
926 926 fm.startitem()
927 927 fm.write(
928 928 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
929 929 b'uncompsize chainsize chainratio lindist extradist '
930 930 b'extraratio',
931 931 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
932 932 rev,
933 933 p1,
934 934 p2,
935 935 chainid,
936 936 len(chain),
937 937 prevrev,
938 938 deltatype,
939 939 comp,
940 940 uncomp,
941 941 chainsize,
942 942 chainratio,
943 943 lineardist,
944 944 extradist,
945 945 extraratio,
946 946 rev=rev,
947 947 chainid=chainid,
948 948 chainlen=len(chain),
949 949 prevrev=prevrev,
950 950 deltatype=deltatype,
951 951 compsize=comp,
952 952 uncompsize=uncomp,
953 953 chainsize=chainsize,
954 954 chainratio=chainratio,
955 955 lindist=lineardist,
956 956 extradist=extradist,
957 957 extraratio=extraratio,
958 958 )
959 959 if withsparseread:
960 960 readsize = 0
961 961 largestblock = 0
962 962 srchunks = 0
963 963
964 964 for revschunk in deltautil.slicechunk(r, chain):
965 965 srchunks += 1
966 966 blkend = start(revschunk[-1]) + length(revschunk[-1])
967 967 blksize = blkend - start(revschunk[0])
968 968
969 969 readsize += blksize
970 970 if largestblock < blksize:
971 971 largestblock = blksize
972 972
973 973 if readsize:
974 974 readdensity = float(chainsize) / float(readsize)
975 975 else:
976 976 readdensity = 1
977 977
978 978 fm.write(
979 979 b'readsize largestblock readdensity srchunks',
980 980 b' %10d %10d %9.5f %8d',
981 981 readsize,
982 982 largestblock,
983 983 readdensity,
984 984 srchunks,
985 985 readsize=readsize,
986 986 largestblock=largestblock,
987 987 readdensity=readdensity,
988 988 srchunks=srchunks,
989 989 )
990 990
991 991 fm.plain(b'\n')
992 992
993 993 fm.end()
994 994
995 995
996 996 @command(
997 997 b'debug-delta-find',
998 998 cmdutil.debugrevlogopts
999 999 + cmdutil.formatteropts
1000 1000 + [
1001 1001 (
1002 1002 b'',
1003 1003 b'source',
1004 1004 b'full',
1005 1005 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1006 1006 ),
1007 1007 ],
1008 1008 _(b'-c|-m|FILE REV'),
1009 1009 optionalrepo=True,
1010 1010 )
1011 1011 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1012 1012 """display the computation to get to a valid delta for storing REV
1013 1013
1014 1014 This command will replay the process used to find the "best" delta to store
1015 1015 a revision and display information about all the steps used to get to that
1016 1016 result.
1017 1017
1018 1018 By default, the process is fed with a the full-text for the revision. This
1019 1019 can be controlled with the --source flag.
1020 1020
1021 1021 The revision use the revision number of the target storage (not changelog
1022 1022 revision number).
1023 1023
1024 1024 note: the process is initiated from a full text of the revision to store.
1025 1025 """
1026 1026 opts = pycompat.byteskwargs(opts)
1027 1027 if arg_2 is None:
1028 1028 file_ = None
1029 1029 rev = arg_1
1030 1030 else:
1031 1031 file_ = arg_1
1032 1032 rev = arg_2
1033 1033
1034 1034 rev = int(rev)
1035 1035
1036 1036 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1037 1037 p1r, p2r = revlog.parentrevs(rev)
1038 1038
1039 1039 if source == b'full':
1040 1040 base_rev = nullrev
1041 1041 elif source == b'storage':
1042 1042 base_rev = revlog.deltaparent(rev)
1043 1043 elif source == b'p1':
1044 1044 base_rev = p1r
1045 1045 elif source == b'p2':
1046 1046 base_rev = p2r
1047 1047 elif source == b'prev':
1048 1048 base_rev = rev - 1
1049 1049 else:
1050 1050 raise error.InputError(b"invalid --source value: %s" % source)
1051 1051
1052 1052 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1053 1053
1054 1054
1055 1055 @command(
1056 1056 b'debugdirstate|debugstate',
1057 1057 [
1058 1058 (
1059 1059 b'',
1060 1060 b'nodates',
1061 1061 None,
1062 1062 _(b'do not display the saved mtime (DEPRECATED)'),
1063 1063 ),
1064 1064 (b'', b'dates', True, _(b'display the saved mtime')),
1065 1065 (b'', b'datesort', None, _(b'sort by saved mtime')),
1066 1066 (
1067 1067 b'',
1068 1068 b'docket',
1069 1069 False,
1070 1070 _(b'display the docket (metadata file) instead'),
1071 1071 ),
1072 1072 (
1073 1073 b'',
1074 1074 b'all',
1075 1075 False,
1076 1076 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1077 1077 ),
1078 1078 ],
1079 1079 _(b'[OPTION]...'),
1080 1080 )
1081 1081 def debugstate(ui, repo, **opts):
1082 1082 """show the contents of the current dirstate"""
1083 1083
1084 1084 if opts.get("docket"):
1085 1085 if not repo.dirstate._use_dirstate_v2:
1086 1086 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1087 1087
1088 1088 docket = repo.dirstate._map.docket
1089 1089 (
1090 1090 start_offset,
1091 1091 root_nodes,
1092 1092 nodes_with_entry,
1093 1093 nodes_with_copy,
1094 1094 unused_bytes,
1095 1095 _unused,
1096 1096 ignore_pattern,
1097 1097 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1098 1098
1099 1099 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1100 1100 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1101 1101 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1102 1102 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1103 1103 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1104 1104 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1105 1105 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1106 1106 ui.write(
1107 1107 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1108 1108 )
1109 1109 return
1110 1110
1111 1111 nodates = not opts['dates']
1112 1112 if opts.get('nodates') is not None:
1113 1113 nodates = True
1114 1114 datesort = opts.get('datesort')
1115 1115
1116 1116 if datesort:
1117 1117
1118 1118 def keyfunc(entry):
1119 1119 filename, _state, _mode, _size, mtime = entry
1120 1120 return (mtime, filename)
1121 1121
1122 1122 else:
1123 1123 keyfunc = None # sort by filename
1124 1124 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1125 1125 entries.sort(key=keyfunc)
1126 1126 for entry in entries:
1127 1127 filename, state, mode, size, mtime = entry
1128 1128 if mtime == -1:
1129 1129 timestr = b'unset '
1130 1130 elif nodates:
1131 1131 timestr = b'set '
1132 1132 else:
1133 1133 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1134 1134 timestr = encoding.strtolocal(timestr)
1135 1135 if mode & 0o20000:
1136 1136 mode = b'lnk'
1137 1137 else:
1138 1138 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1139 1139 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1140 1140 for f in repo.dirstate.copies():
1141 1141 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1142 1142
1143 1143
1144 1144 @command(
1145 1145 b'debugdirstateignorepatternshash',
1146 1146 [],
1147 1147 _(b''),
1148 1148 )
1149 1149 def debugdirstateignorepatternshash(ui, repo, **opts):
1150 1150 """show the hash of ignore patterns stored in dirstate if v2,
1151 1151 or nothing for dirstate-v2
1152 1152 """
1153 1153 if repo.dirstate._use_dirstate_v2:
1154 1154 docket = repo.dirstate._map.docket
1155 1155 hash_len = 20 # 160 bits for SHA-1
1156 1156 hash_bytes = docket.tree_metadata[-hash_len:]
1157 1157 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1158 1158
1159 1159
1160 1160 @command(
1161 1161 b'debugdiscovery',
1162 1162 [
1163 1163 (b'', b'old', None, _(b'use old-style discovery')),
1164 1164 (
1165 1165 b'',
1166 1166 b'nonheads',
1167 1167 None,
1168 1168 _(b'use old-style discovery with non-heads included'),
1169 1169 ),
1170 1170 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1171 1171 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1172 1172 (
1173 1173 b'',
1174 1174 b'local-as-revs',
1175 1175 b"",
1176 1176 b'treat local has having these revisions only',
1177 1177 ),
1178 1178 (
1179 1179 b'',
1180 1180 b'remote-as-revs',
1181 1181 b"",
1182 1182 b'use local as remote, with only these revisions',
1183 1183 ),
1184 1184 ]
1185 1185 + cmdutil.remoteopts
1186 1186 + cmdutil.formatteropts,
1187 1187 _(b'[--rev REV] [OTHER]'),
1188 1188 )
1189 1189 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1190 1190 """runs the changeset discovery protocol in isolation
1191 1191
1192 1192 The local peer can be "replaced" by a subset of the local repository by
1193 1193 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1194 1194 can be "replaced" by a subset of the local repository using the
1195 1195 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1196 1196 discovery situations.
1197 1197
1198 1198 The following developer oriented config are relevant for people playing with this command:
1199 1199
1200 1200 * devel.discovery.exchange-heads=True
1201 1201
1202 1202 If False, the discovery will not start with
1203 1203 remote head fetching and local head querying.
1204 1204
1205 1205 * devel.discovery.grow-sample=True
1206 1206
1207 1207 If False, the sample size used in set discovery will not be increased
1208 1208 through the process
1209 1209
1210 1210 * devel.discovery.grow-sample.dynamic=True
1211 1211
1212 1212 When discovery.grow-sample.dynamic is True, the default, the sample size is
1213 1213 adapted to the shape of the undecided set (it is set to the max of:
1214 1214 <target-size>, len(roots(undecided)), len(heads(undecided)
1215 1215
1216 1216 * devel.discovery.grow-sample.rate=1.05
1217 1217
1218 1218 the rate at which the sample grow
1219 1219
1220 1220 * devel.discovery.randomize=True
1221 1221
1222 1222 If andom sampling during discovery are deterministic. It is meant for
1223 1223 integration tests.
1224 1224
1225 1225 * devel.discovery.sample-size=200
1226 1226
1227 1227 Control the initial size of the discovery sample
1228 1228
1229 1229 * devel.discovery.sample-size.initial=100
1230 1230
1231 1231 Control the initial size of the discovery for initial change
1232 1232 """
1233 1233 opts = pycompat.byteskwargs(opts)
1234 1234 unfi = repo.unfiltered()
1235 1235
1236 1236 # setup potential extra filtering
1237 1237 local_revs = opts[b"local_as_revs"]
1238 1238 remote_revs = opts[b"remote_as_revs"]
1239 1239
1240 1240 # make sure tests are repeatable
1241 1241 random.seed(int(opts[b'seed']))
1242 1242
1243 1243 if not remote_revs:
1244 1244 path = urlutil.get_unique_pull_path_obj(
1245 1245 b'debugdiscovery', ui, remoteurl
1246 1246 )
1247 1247 branches = (path.branch, [])
1248 1248 remote = hg.peer(repo, opts, path)
1249 1249 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1250 1250 else:
1251 1251 branches = (None, [])
1252 1252 remote_filtered_revs = logcmdutil.revrange(
1253 1253 unfi, [b"not (::(%s))" % remote_revs]
1254 1254 )
1255 1255 remote_filtered_revs = frozenset(remote_filtered_revs)
1256 1256
1257 1257 def remote_func(x):
1258 1258 return remote_filtered_revs
1259 1259
1260 1260 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1261 1261
1262 1262 remote = repo.peer()
1263 1263 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1264 1264
1265 1265 if local_revs:
1266 1266 local_filtered_revs = logcmdutil.revrange(
1267 1267 unfi, [b"not (::(%s))" % local_revs]
1268 1268 )
1269 1269 local_filtered_revs = frozenset(local_filtered_revs)
1270 1270
1271 1271 def local_func(x):
1272 1272 return local_filtered_revs
1273 1273
1274 1274 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1275 1275 repo = repo.filtered(b'debug-discovery-local-filter')
1276 1276
1277 1277 data = {}
1278 1278 if opts.get(b'old'):
1279 1279
1280 1280 def doit(pushedrevs, remoteheads, remote=remote):
1281 1281 if not util.safehasattr(remote, b'branches'):
1282 1282 # enable in-client legacy support
1283 1283 remote = localrepo.locallegacypeer(remote.local())
1284 1284 if remote_revs:
1285 1285 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1286 1286 remote._repo = r
1287 1287 common, _in, hds = treediscovery.findcommonincoming(
1288 1288 repo, remote, force=True, audit=data
1289 1289 )
1290 1290 common = set(common)
1291 1291 if not opts.get(b'nonheads'):
1292 1292 ui.writenoi18n(
1293 1293 b"unpruned common: %s\n"
1294 1294 % b" ".join(sorted(short(n) for n in common))
1295 1295 )
1296 1296
1297 1297 clnode = repo.changelog.node
1298 1298 common = repo.revs(b'heads(::%ln)', common)
1299 1299 common = {clnode(r) for r in common}
1300 1300 return common, hds
1301 1301
1302 1302 else:
1303 1303
1304 1304 def doit(pushedrevs, remoteheads, remote=remote):
1305 1305 nodes = None
1306 1306 if pushedrevs:
1307 1307 revs = logcmdutil.revrange(repo, pushedrevs)
1308 1308 nodes = [repo[r].node() for r in revs]
1309 1309 common, any, hds = setdiscovery.findcommonheads(
1310 1310 ui,
1311 1311 repo,
1312 1312 remote,
1313 1313 ancestorsof=nodes,
1314 1314 audit=data,
1315 1315 abortwhenunrelated=False,
1316 1316 )
1317 1317 return common, hds
1318 1318
1319 1319 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1320 1320 localrevs = opts[b'rev']
1321 1321
1322 1322 fm = ui.formatter(b'debugdiscovery', opts)
1323 1323 if fm.strict_format:
1324 1324
1325 1325 @contextlib.contextmanager
1326 1326 def may_capture_output():
1327 1327 ui.pushbuffer()
1328 1328 yield
1329 1329 data[b'output'] = ui.popbuffer()
1330 1330
1331 1331 else:
1332 1332 may_capture_output = util.nullcontextmanager
1333 1333 with may_capture_output():
1334 1334 with util.timedcm('debug-discovery') as t:
1335 1335 common, hds = doit(localrevs, remoterevs)
1336 1336
1337 1337 # compute all statistics
1338 1338 if len(common) == 1 and repo.nullid in common:
1339 1339 common = set()
1340 1340 heads_common = set(common)
1341 1341 heads_remote = set(hds)
1342 1342 heads_local = set(repo.heads())
1343 1343 # note: they cannot be a local or remote head that is in common and not
1344 1344 # itself a head of common.
1345 1345 heads_common_local = heads_common & heads_local
1346 1346 heads_common_remote = heads_common & heads_remote
1347 1347 heads_common_both = heads_common & heads_remote & heads_local
1348 1348
1349 1349 all = repo.revs(b'all()')
1350 1350 common = repo.revs(b'::%ln', common)
1351 1351 roots_common = repo.revs(b'roots(::%ld)', common)
1352 1352 missing = repo.revs(b'not ::%ld', common)
1353 1353 heads_missing = repo.revs(b'heads(%ld)', missing)
1354 1354 roots_missing = repo.revs(b'roots(%ld)', missing)
1355 1355 assert len(common) + len(missing) == len(all)
1356 1356
1357 1357 initial_undecided = repo.revs(
1358 1358 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1359 1359 )
1360 1360 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1361 1361 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1362 1362 common_initial_undecided = initial_undecided & common
1363 1363 missing_initial_undecided = initial_undecided & missing
1364 1364
1365 1365 data[b'elapsed'] = t.elapsed
1366 1366 data[b'nb-common-heads'] = len(heads_common)
1367 1367 data[b'nb-common-heads-local'] = len(heads_common_local)
1368 1368 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1369 1369 data[b'nb-common-heads-both'] = len(heads_common_both)
1370 1370 data[b'nb-common-roots'] = len(roots_common)
1371 1371 data[b'nb-head-local'] = len(heads_local)
1372 1372 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1373 1373 data[b'nb-head-remote'] = len(heads_remote)
1374 1374 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1375 1375 heads_common_remote
1376 1376 )
1377 1377 data[b'nb-revs'] = len(all)
1378 1378 data[b'nb-revs-common'] = len(common)
1379 1379 data[b'nb-revs-missing'] = len(missing)
1380 1380 data[b'nb-missing-heads'] = len(heads_missing)
1381 1381 data[b'nb-missing-roots'] = len(roots_missing)
1382 1382 data[b'nb-ini_und'] = len(initial_undecided)
1383 1383 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1384 1384 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1385 1385 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1386 1386 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1387 1387
1388 1388 fm.startitem()
1389 1389 fm.data(**pycompat.strkwargs(data))
1390 1390 # display discovery summary
1391 1391 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1392 1392 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1393 1393 if b'total-round-trips-heads' in data:
1394 1394 fm.plain(
1395 1395 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1396 1396 )
1397 1397 if b'total-round-trips-branches' in data:
1398 1398 fm.plain(
1399 1399 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1400 1400 % data
1401 1401 )
1402 1402 if b'total-round-trips-between' in data:
1403 1403 fm.plain(
1404 1404 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1405 1405 )
1406 1406 fm.plain(b"queries: %(total-queries)9d\n" % data)
1407 1407 if b'total-queries-branches' in data:
1408 1408 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1409 1409 if b'total-queries-between' in data:
1410 1410 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1411 1411 fm.plain(b"heads summary:\n")
1412 1412 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1413 1413 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1414 1414 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1415 1415 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1416 1416 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1417 1417 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1418 1418 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1419 1419 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1420 1420 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1421 1421 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1422 1422 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1423 1423 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1424 1424 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1425 1425 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1426 1426 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1427 1427 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1428 1428 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1429 1429 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1430 1430 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1431 1431 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1432 1432 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1433 1433 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1434 1434
1435 1435 if ui.verbose:
1436 1436 fm.plain(
1437 1437 b"common heads: %s\n"
1438 1438 % b" ".join(sorted(short(n) for n in heads_common))
1439 1439 )
1440 1440 fm.end()
1441 1441
1442 1442
1443 1443 _chunksize = 4 << 10
1444 1444
1445 1445
1446 1446 @command(
1447 1447 b'debugdownload',
1448 1448 [
1449 1449 (b'o', b'output', b'', _(b'path')),
1450 1450 ],
1451 1451 optionalrepo=True,
1452 1452 )
1453 1453 def debugdownload(ui, repo, url, output=None, **opts):
1454 1454 """download a resource using Mercurial logic and config"""
1455 1455 fh = urlmod.open(ui, url, output)
1456 1456
1457 1457 dest = ui
1458 1458 if output:
1459 1459 dest = open(output, b"wb", _chunksize)
1460 1460 try:
1461 1461 data = fh.read(_chunksize)
1462 1462 while data:
1463 1463 dest.write(data)
1464 1464 data = fh.read(_chunksize)
1465 1465 finally:
1466 1466 if output:
1467 1467 dest.close()
1468 1468
1469 1469
1470 1470 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1471 1471 def debugextensions(ui, repo, **opts):
1472 1472 '''show information about active extensions'''
1473 1473 opts = pycompat.byteskwargs(opts)
1474 1474 exts = extensions.extensions(ui)
1475 1475 hgver = util.version()
1476 1476 fm = ui.formatter(b'debugextensions', opts)
1477 1477 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1478 1478 isinternal = extensions.ismoduleinternal(extmod)
1479 1479 extsource = None
1480 1480
1481 1481 if util.safehasattr(extmod, '__file__'):
1482 1482 extsource = pycompat.fsencode(extmod.__file__)
1483 1483 elif getattr(sys, 'oxidized', False):
1484 1484 extsource = pycompat.sysexecutable
1485 1485 if isinternal:
1486 1486 exttestedwith = [] # never expose magic string to users
1487 1487 else:
1488 1488 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1489 1489 extbuglink = getattr(extmod, 'buglink', None)
1490 1490
1491 1491 fm.startitem()
1492 1492
1493 1493 if ui.quiet or ui.verbose:
1494 1494 fm.write(b'name', b'%s\n', extname)
1495 1495 else:
1496 1496 fm.write(b'name', b'%s', extname)
1497 1497 if isinternal or hgver in exttestedwith:
1498 1498 fm.plain(b'\n')
1499 1499 elif not exttestedwith:
1500 1500 fm.plain(_(b' (untested!)\n'))
1501 1501 else:
1502 1502 lasttestedversion = exttestedwith[-1]
1503 1503 fm.plain(b' (%s!)\n' % lasttestedversion)
1504 1504
1505 1505 fm.condwrite(
1506 1506 ui.verbose and extsource,
1507 1507 b'source',
1508 1508 _(b' location: %s\n'),
1509 1509 extsource or b"",
1510 1510 )
1511 1511
1512 1512 if ui.verbose:
1513 1513 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1514 1514 fm.data(bundled=isinternal)
1515 1515
1516 1516 fm.condwrite(
1517 1517 ui.verbose and exttestedwith,
1518 1518 b'testedwith',
1519 1519 _(b' tested with: %s\n'),
1520 1520 fm.formatlist(exttestedwith, name=b'ver'),
1521 1521 )
1522 1522
1523 1523 fm.condwrite(
1524 1524 ui.verbose and extbuglink,
1525 1525 b'buglink',
1526 1526 _(b' bug reporting: %s\n'),
1527 1527 extbuglink or b"",
1528 1528 )
1529 1529
1530 1530 fm.end()
1531 1531
1532 1532
1533 1533 @command(
1534 1534 b'debugfileset',
1535 1535 [
1536 1536 (
1537 1537 b'r',
1538 1538 b'rev',
1539 1539 b'',
1540 1540 _(b'apply the filespec on this revision'),
1541 1541 _(b'REV'),
1542 1542 ),
1543 1543 (
1544 1544 b'',
1545 1545 b'all-files',
1546 1546 False,
1547 1547 _(b'test files from all revisions and working directory'),
1548 1548 ),
1549 1549 (
1550 1550 b's',
1551 1551 b'show-matcher',
1552 1552 None,
1553 1553 _(b'print internal representation of matcher'),
1554 1554 ),
1555 1555 (
1556 1556 b'p',
1557 1557 b'show-stage',
1558 1558 [],
1559 1559 _(b'print parsed tree at the given stage'),
1560 1560 _(b'NAME'),
1561 1561 ),
1562 1562 ],
1563 1563 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1564 1564 )
1565 1565 def debugfileset(ui, repo, expr, **opts):
1566 1566 '''parse and apply a fileset specification'''
1567 1567 from . import fileset
1568 1568
1569 1569 fileset.symbols # force import of fileset so we have predicates to optimize
1570 1570 opts = pycompat.byteskwargs(opts)
1571 1571 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1572 1572
1573 1573 stages = [
1574 1574 (b'parsed', pycompat.identity),
1575 1575 (b'analyzed', filesetlang.analyze),
1576 1576 (b'optimized', filesetlang.optimize),
1577 1577 ]
1578 1578 stagenames = {n for n, f in stages}
1579 1579
1580 1580 showalways = set()
1581 1581 if ui.verbose and not opts[b'show_stage']:
1582 1582 # show parsed tree by --verbose (deprecated)
1583 1583 showalways.add(b'parsed')
1584 1584 if opts[b'show_stage'] == [b'all']:
1585 1585 showalways.update(stagenames)
1586 1586 else:
1587 1587 for n in opts[b'show_stage']:
1588 1588 if n not in stagenames:
1589 1589 raise error.Abort(_(b'invalid stage name: %s') % n)
1590 1590 showalways.update(opts[b'show_stage'])
1591 1591
1592 1592 tree = filesetlang.parse(expr)
1593 1593 for n, f in stages:
1594 1594 tree = f(tree)
1595 1595 if n in showalways:
1596 1596 if opts[b'show_stage'] or n != b'parsed':
1597 1597 ui.write(b"* %s:\n" % n)
1598 1598 ui.write(filesetlang.prettyformat(tree), b"\n")
1599 1599
1600 1600 files = set()
1601 1601 if opts[b'all_files']:
1602 1602 for r in repo:
1603 1603 c = repo[r]
1604 1604 files.update(c.files())
1605 1605 files.update(c.substate)
1606 1606 if opts[b'all_files'] or ctx.rev() is None:
1607 1607 wctx = repo[None]
1608 1608 files.update(
1609 1609 repo.dirstate.walk(
1610 1610 scmutil.matchall(repo),
1611 1611 subrepos=list(wctx.substate),
1612 1612 unknown=True,
1613 1613 ignored=True,
1614 1614 )
1615 1615 )
1616 1616 files.update(wctx.substate)
1617 1617 else:
1618 1618 files.update(ctx.files())
1619 1619 files.update(ctx.substate)
1620 1620
1621 1621 m = ctx.matchfileset(repo.getcwd(), expr)
1622 1622 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1623 1623 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1624 1624 for f in sorted(files):
1625 1625 if not m(f):
1626 1626 continue
1627 1627 ui.write(b"%s\n" % f)
1628 1628
1629 1629
1630 1630 @command(
1631 1631 b"debug-repair-issue6528",
1632 1632 [
1633 1633 (
1634 1634 b'',
1635 1635 b'to-report',
1636 1636 b'',
1637 1637 _(b'build a report of affected revisions to this file'),
1638 1638 _(b'FILE'),
1639 1639 ),
1640 1640 (
1641 1641 b'',
1642 1642 b'from-report',
1643 1643 b'',
1644 1644 _(b'repair revisions listed in this report file'),
1645 1645 _(b'FILE'),
1646 1646 ),
1647 1647 (
1648 1648 b'',
1649 1649 b'paranoid',
1650 1650 False,
1651 1651 _(b'check that both detection methods do the same thing'),
1652 1652 ),
1653 1653 ]
1654 1654 + cmdutil.dryrunopts,
1655 1655 )
1656 1656 def debug_repair_issue6528(ui, repo, **opts):
1657 1657 """find affected revisions and repair them. See issue6528 for more details.
1658 1658
1659 1659 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1660 1660 computation of affected revisions for a given repository across clones.
1661 1661 The report format is line-based (with empty lines ignored):
1662 1662
1663 1663 ```
1664 1664 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1665 1665 ```
1666 1666
1667 1667 There can be multiple broken revisions per filelog, they are separated by
1668 1668 a comma with no spaces. The only space is between the revision(s) and the
1669 1669 filename.
1670 1670
1671 1671 Note that this does *not* mean that this repairs future affected revisions,
1672 1672 that needs a separate fix at the exchange level that was introduced in
1673 1673 Mercurial 5.9.1.
1674 1674
1675 1675 There is a `--paranoid` flag to test that the fast implementation is correct
1676 1676 by checking it against the slow implementation. Since this matter is quite
1677 1677 urgent and testing every edge-case is probably quite costly, we use this
1678 1678 method to test on large repositories as a fuzzing method of sorts.
1679 1679 """
1680 1680 cmdutil.check_incompatible_arguments(
1681 1681 opts, 'to_report', ['from_report', 'dry_run']
1682 1682 )
1683 1683 dry_run = opts.get('dry_run')
1684 1684 to_report = opts.get('to_report')
1685 1685 from_report = opts.get('from_report')
1686 1686 paranoid = opts.get('paranoid')
1687 1687 # TODO maybe add filelog pattern and revision pattern parameters to help
1688 1688 # narrow down the search for users that know what they're looking for?
1689 1689
1690 1690 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1691 1691 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1692 1692 raise error.Abort(_(msg))
1693 1693
1694 1694 rewrite.repair_issue6528(
1695 1695 ui,
1696 1696 repo,
1697 1697 dry_run=dry_run,
1698 1698 to_report=to_report,
1699 1699 from_report=from_report,
1700 1700 paranoid=paranoid,
1701 1701 )
1702 1702
1703 1703
1704 1704 @command(b'debugformat', [] + cmdutil.formatteropts)
1705 1705 def debugformat(ui, repo, **opts):
1706 1706 """display format information about the current repository
1707 1707
1708 1708 Use --verbose to get extra information about current config value and
1709 1709 Mercurial default."""
1710 1710 opts = pycompat.byteskwargs(opts)
1711 1711 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1712 1712 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1713 1713
1714 1714 def makeformatname(name):
1715 1715 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1716 1716
1717 1717 fm = ui.formatter(b'debugformat', opts)
1718 1718 if fm.isplain():
1719 1719
1720 1720 def formatvalue(value):
1721 1721 if util.safehasattr(value, b'startswith'):
1722 1722 return value
1723 1723 if value:
1724 1724 return b'yes'
1725 1725 else:
1726 1726 return b'no'
1727 1727
1728 1728 else:
1729 1729 formatvalue = pycompat.identity
1730 1730
1731 1731 fm.plain(b'format-variant')
1732 1732 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1733 1733 fm.plain(b' repo')
1734 1734 if ui.verbose:
1735 1735 fm.plain(b' config default')
1736 1736 fm.plain(b'\n')
1737 1737 for fv in upgrade.allformatvariant:
1738 1738 fm.startitem()
1739 1739 repovalue = fv.fromrepo(repo)
1740 1740 configvalue = fv.fromconfig(repo)
1741 1741
1742 1742 if repovalue != configvalue:
1743 1743 namelabel = b'formatvariant.name.mismatchconfig'
1744 1744 repolabel = b'formatvariant.repo.mismatchconfig'
1745 1745 elif repovalue != fv.default:
1746 1746 namelabel = b'formatvariant.name.mismatchdefault'
1747 1747 repolabel = b'formatvariant.repo.mismatchdefault'
1748 1748 else:
1749 1749 namelabel = b'formatvariant.name.uptodate'
1750 1750 repolabel = b'formatvariant.repo.uptodate'
1751 1751
1752 1752 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1753 1753 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1754 1754 if fv.default != configvalue:
1755 1755 configlabel = b'formatvariant.config.special'
1756 1756 else:
1757 1757 configlabel = b'formatvariant.config.default'
1758 1758 fm.condwrite(
1759 1759 ui.verbose,
1760 1760 b'config',
1761 1761 b' %6s',
1762 1762 formatvalue(configvalue),
1763 1763 label=configlabel,
1764 1764 )
1765 1765 fm.condwrite(
1766 1766 ui.verbose,
1767 1767 b'default',
1768 1768 b' %7s',
1769 1769 formatvalue(fv.default),
1770 1770 label=b'formatvariant.default',
1771 1771 )
1772 1772 fm.plain(b'\n')
1773 1773 fm.end()
1774 1774
1775 1775
1776 1776 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1777 1777 def debugfsinfo(ui, path=b"."):
1778 1778 """show information detected about current filesystem"""
1779 1779 ui.writenoi18n(b'path: %s\n' % path)
1780 1780 ui.writenoi18n(
1781 1781 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1782 1782 )
1783 1783 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1784 1784 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1785 1785 ui.writenoi18n(
1786 1786 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1787 1787 )
1788 1788 ui.writenoi18n(
1789 1789 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1790 1790 )
1791 1791 casesensitive = b'(unknown)'
1792 1792 try:
1793 1793 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1794 1794 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1795 1795 except OSError:
1796 1796 pass
1797 1797 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1798 1798
1799 1799
1800 1800 @command(
1801 1801 b'debuggetbundle',
1802 1802 [
1803 1803 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1804 1804 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1805 1805 (
1806 1806 b't',
1807 1807 b'type',
1808 1808 b'bzip2',
1809 1809 _(b'bundle compression type to use'),
1810 1810 _(b'TYPE'),
1811 1811 ),
1812 1812 ],
1813 1813 _(b'REPO FILE [-H|-C ID]...'),
1814 1814 norepo=True,
1815 1815 )
1816 1816 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1817 1817 """retrieves a bundle from a repo
1818 1818
1819 1819 Every ID must be a full-length hex node id string. Saves the bundle to the
1820 1820 given file.
1821 1821 """
1822 1822 opts = pycompat.byteskwargs(opts)
1823 1823 repo = hg.peer(ui, opts, repopath)
1824 1824 if not repo.capable(b'getbundle'):
1825 1825 raise error.Abort(b"getbundle() not supported by target repository")
1826 1826 args = {}
1827 1827 if common:
1828 1828 args['common'] = [bin(s) for s in common]
1829 1829 if head:
1830 1830 args['heads'] = [bin(s) for s in head]
1831 1831 # TODO: get desired bundlecaps from command line.
1832 1832 args['bundlecaps'] = None
1833 1833 bundle = repo.getbundle(b'debug', **args)
1834 1834
1835 1835 bundletype = opts.get(b'type', b'bzip2').lower()
1836 1836 btypes = {
1837 1837 b'none': b'HG10UN',
1838 1838 b'bzip2': b'HG10BZ',
1839 1839 b'gzip': b'HG10GZ',
1840 1840 b'bundle2': b'HG20',
1841 1841 }
1842 1842 bundletype = btypes.get(bundletype)
1843 1843 if bundletype not in bundle2.bundletypes:
1844 1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1845 1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1846 1846
1847 1847
1848 1848 @command(b'debugignore', [], b'[FILE]...')
1849 1849 def debugignore(ui, repo, *files, **opts):
1850 1850 """display the combined ignore pattern and information about ignored files
1851 1851
1852 1852 With no argument display the combined ignore pattern.
1853 1853
1854 1854 Given space separated file names, shows if the given file is ignored and
1855 1855 if so, show the ignore rule (file and line number) that matched it.
1856 1856 """
1857 1857 ignore = repo.dirstate._ignore
1858 1858 if not files:
1859 1859 # Show all the patterns
1860 1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1861 1861 else:
1862 1862 m = scmutil.match(repo[None], pats=files)
1863 1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1864 1864 for f in m.files():
1865 1865 nf = util.normpath(f)
1866 1866 ignored = None
1867 1867 ignoredata = None
1868 1868 if nf != b'.':
1869 1869 if ignore(nf):
1870 1870 ignored = nf
1871 1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1872 1872 else:
1873 1873 for p in pathutil.finddirs(nf):
1874 1874 if ignore(p):
1875 1875 ignored = p
1876 1876 ignoredata = repo.dirstate._ignorefileandline(p)
1877 1877 break
1878 1878 if ignored:
1879 1879 if ignored == nf:
1880 1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1881 1881 else:
1882 1882 ui.write(
1883 1883 _(
1884 1884 b"%s is ignored because of "
1885 1885 b"containing directory %s\n"
1886 1886 )
1887 1887 % (uipathfn(f), ignored)
1888 1888 )
1889 1889 ignorefile, lineno, line = ignoredata
1890 1890 ui.write(
1891 1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1892 1892 % (ignorefile, lineno, line)
1893 1893 )
1894 1894 else:
1895 1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1896 1896
1897 1897
1898 1898 @command(
1899 1899 b'debug-revlog-index|debugindex',
1900 1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1901 1901 _(b'-c|-m|FILE'),
1902 1902 )
1903 1903 def debugindex(ui, repo, file_=None, **opts):
1904 1904 """dump index data for a revlog"""
1905 1905 opts = pycompat.byteskwargs(opts)
1906 1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1907 1907
1908 1908 fm = ui.formatter(b'debugindex', opts)
1909 1909
1910 1910 revlog = getattr(store, b'_revlog', store)
1911 1911
1912 1912 return revlog_debug.debug_index(
1913 1913 ui,
1914 1914 repo,
1915 1915 formatter=fm,
1916 1916 revlog=revlog,
1917 1917 full_node=ui.debugflag,
1918 1918 )
1919 1919
1920 1920
1921 1921 @command(
1922 1922 b'debugindexdot',
1923 1923 cmdutil.debugrevlogopts,
1924 1924 _(b'-c|-m|FILE'),
1925 1925 optionalrepo=True,
1926 1926 )
1927 1927 def debugindexdot(ui, repo, file_=None, **opts):
1928 1928 """dump an index DAG as a graphviz dot file"""
1929 1929 opts = pycompat.byteskwargs(opts)
1930 1930 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1931 1931 ui.writenoi18n(b"digraph G {\n")
1932 1932 for i in r:
1933 1933 node = r.node(i)
1934 1934 pp = r.parents(node)
1935 1935 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1936 1936 if pp[1] != repo.nullid:
1937 1937 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1938 1938 ui.write(b"}\n")
1939 1939
1940 1940
1941 1941 @command(b'debugindexstats', [])
1942 1942 def debugindexstats(ui, repo):
1943 1943 """show stats related to the changelog index"""
1944 1944 repo.changelog.shortest(repo.nullid, 1)
1945 1945 index = repo.changelog.index
1946 1946 if not util.safehasattr(index, b'stats'):
1947 1947 raise error.Abort(_(b'debugindexstats only works with native code'))
1948 1948 for k, v in sorted(index.stats().items()):
1949 1949 ui.write(b'%s: %d\n' % (k, v))
1950 1950
1951 1951
1952 1952 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1953 1953 def debuginstall(ui, **opts):
1954 1954 """test Mercurial installation
1955 1955
1956 1956 Returns 0 on success.
1957 1957 """
1958 1958 opts = pycompat.byteskwargs(opts)
1959 1959
1960 1960 problems = 0
1961 1961
1962 1962 fm = ui.formatter(b'debuginstall', opts)
1963 1963 fm.startitem()
1964 1964
1965 1965 # encoding might be unknown or wrong. don't translate these messages.
1966 1966 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1967 1967 err = None
1968 1968 try:
1969 1969 codecs.lookup(pycompat.sysstr(encoding.encoding))
1970 1970 except LookupError as inst:
1971 1971 err = stringutil.forcebytestr(inst)
1972 1972 problems += 1
1973 1973 fm.condwrite(
1974 1974 err,
1975 1975 b'encodingerror',
1976 1976 b" %s\n (check that your locale is properly set)\n",
1977 1977 err,
1978 1978 )
1979 1979
1980 1980 # Python
1981 1981 pythonlib = None
1982 1982 if util.safehasattr(os, '__file__'):
1983 1983 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1984 1984 elif getattr(sys, 'oxidized', False):
1985 1985 pythonlib = pycompat.sysexecutable
1986 1986
1987 1987 fm.write(
1988 1988 b'pythonexe',
1989 1989 _(b"checking Python executable (%s)\n"),
1990 1990 pycompat.sysexecutable or _(b"unknown"),
1991 1991 )
1992 1992 fm.write(
1993 1993 b'pythonimplementation',
1994 1994 _(b"checking Python implementation (%s)\n"),
1995 1995 pycompat.sysbytes(platform.python_implementation()),
1996 1996 )
1997 1997 fm.write(
1998 1998 b'pythonver',
1999 1999 _(b"checking Python version (%s)\n"),
2000 2000 (b"%d.%d.%d" % sys.version_info[:3]),
2001 2001 )
2002 2002 fm.write(
2003 2003 b'pythonlib',
2004 2004 _(b"checking Python lib (%s)...\n"),
2005 2005 pythonlib or _(b"unknown"),
2006 2006 )
2007 2007
2008 2008 try:
2009 2009 from . import rustext # pytype: disable=import-error
2010 2010
2011 2011 rustext.__doc__ # trigger lazy import
2012 2012 except ImportError:
2013 2013 rustext = None
2014 2014
2015 2015 security = set(sslutil.supportedprotocols)
2016 2016 if sslutil.hassni:
2017 2017 security.add(b'sni')
2018 2018
2019 2019 fm.write(
2020 2020 b'pythonsecurity',
2021 2021 _(b"checking Python security support (%s)\n"),
2022 2022 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2023 2023 )
2024 2024
2025 2025 # These are warnings, not errors. So don't increment problem count. This
2026 2026 # may change in the future.
2027 2027 if b'tls1.2' not in security:
2028 2028 fm.plain(
2029 2029 _(
2030 2030 b' TLS 1.2 not supported by Python install; '
2031 2031 b'network connections lack modern security\n'
2032 2032 )
2033 2033 )
2034 2034 if b'sni' not in security:
2035 2035 fm.plain(
2036 2036 _(
2037 2037 b' SNI not supported by Python install; may have '
2038 2038 b'connectivity issues with some servers\n'
2039 2039 )
2040 2040 )
2041 2041
2042 2042 fm.plain(
2043 2043 _(
2044 2044 b"checking Rust extensions (%s)\n"
2045 2045 % (b'missing' if rustext is None else b'installed')
2046 2046 ),
2047 2047 )
2048 2048
2049 2049 # TODO print CA cert info
2050 2050
2051 2051 # hg version
2052 2052 hgver = util.version()
2053 2053 fm.write(
2054 2054 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2055 2055 )
2056 2056 fm.write(
2057 2057 b'hgverextra',
2058 2058 _(b"checking Mercurial custom build (%s)\n"),
2059 2059 b'+'.join(hgver.split(b'+')[1:]),
2060 2060 )
2061 2061
2062 2062 # compiled modules
2063 2063 hgmodules = None
2064 2064 if util.safehasattr(sys.modules[__name__], '__file__'):
2065 2065 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2066 2066 elif getattr(sys, 'oxidized', False):
2067 2067 hgmodules = pycompat.sysexecutable
2068 2068
2069 2069 fm.write(
2070 2070 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2071 2071 )
2072 2072 fm.write(
2073 2073 b'hgmodules',
2074 2074 _(b"checking installed modules (%s)...\n"),
2075 2075 hgmodules or _(b"unknown"),
2076 2076 )
2077 2077
2078 2078 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2079 2079 rustext = rustandc # for now, that's the only case
2080 2080 cext = policy.policy in (b'c', b'allow') or rustandc
2081 2081 nopure = cext or rustext
2082 2082 if nopure:
2083 2083 err = None
2084 2084 try:
2085 2085 if cext:
2086 2086 from .cext import ( # pytype: disable=import-error
2087 2087 base85,
2088 2088 bdiff,
2089 2089 mpatch,
2090 2090 osutil,
2091 2091 )
2092 2092
2093 2093 # quiet pyflakes
2094 2094 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2095 2095 if rustext:
2096 2096 from .rustext import ( # pytype: disable=import-error
2097 2097 ancestor,
2098 2098 dirstate,
2099 2099 )
2100 2100
2101 2101 dir(ancestor), dir(dirstate) # quiet pyflakes
2102 2102 except Exception as inst:
2103 2103 err = stringutil.forcebytestr(inst)
2104 2104 problems += 1
2105 2105 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2106 2106
2107 2107 compengines = util.compengines._engines.values()
2108 2108 fm.write(
2109 2109 b'compengines',
2110 2110 _(b'checking registered compression engines (%s)\n'),
2111 2111 fm.formatlist(
2112 2112 sorted(e.name() for e in compengines),
2113 2113 name=b'compengine',
2114 2114 fmt=b'%s',
2115 2115 sep=b', ',
2116 2116 ),
2117 2117 )
2118 2118 fm.write(
2119 2119 b'compenginesavail',
2120 2120 _(b'checking available compression engines (%s)\n'),
2121 2121 fm.formatlist(
2122 2122 sorted(e.name() for e in compengines if e.available()),
2123 2123 name=b'compengine',
2124 2124 fmt=b'%s',
2125 2125 sep=b', ',
2126 2126 ),
2127 2127 )
2128 2128 wirecompengines = compression.compengines.supportedwireengines(
2129 2129 compression.SERVERROLE
2130 2130 )
2131 2131 fm.write(
2132 2132 b'compenginesserver',
2133 2133 _(
2134 2134 b'checking available compression engines '
2135 2135 b'for wire protocol (%s)\n'
2136 2136 ),
2137 2137 fm.formatlist(
2138 2138 [e.name() for e in wirecompengines if e.wireprotosupport()],
2139 2139 name=b'compengine',
2140 2140 fmt=b'%s',
2141 2141 sep=b', ',
2142 2142 ),
2143 2143 )
2144 2144 re2 = b'missing'
2145 2145 if util._re2:
2146 2146 re2 = b'available'
2147 2147 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2148 2148 fm.data(re2=bool(util._re2))
2149 2149
2150 2150 # templates
2151 2151 p = templater.templatedir()
2152 2152 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2153 2153 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2154 2154 if p:
2155 2155 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2156 2156 if m:
2157 2157 # template found, check if it is working
2158 2158 err = None
2159 2159 try:
2160 2160 templater.templater.frommapfile(m)
2161 2161 except Exception as inst:
2162 2162 err = stringutil.forcebytestr(inst)
2163 2163 p = None
2164 2164 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2165 2165 else:
2166 2166 p = None
2167 2167 fm.condwrite(
2168 2168 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2169 2169 )
2170 2170 fm.condwrite(
2171 2171 not m,
2172 2172 b'defaulttemplatenotfound',
2173 2173 _(b" template '%s' not found\n"),
2174 2174 b"default",
2175 2175 )
2176 2176 if not p:
2177 2177 problems += 1
2178 2178 fm.condwrite(
2179 2179 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2180 2180 )
2181 2181
2182 2182 # editor
2183 2183 editor = ui.geteditor()
2184 2184 editor = util.expandpath(editor)
2185 2185 editorbin = procutil.shellsplit(editor)[0]
2186 2186 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2187 2187 cmdpath = procutil.findexe(editorbin)
2188 2188 fm.condwrite(
2189 2189 not cmdpath and editor == b'vi',
2190 2190 b'vinotfound',
2191 2191 _(
2192 2192 b" No commit editor set and can't find %s in PATH\n"
2193 2193 b" (specify a commit editor in your configuration"
2194 2194 b" file)\n"
2195 2195 ),
2196 2196 not cmdpath and editor == b'vi' and editorbin,
2197 2197 )
2198 2198 fm.condwrite(
2199 2199 not cmdpath and editor != b'vi',
2200 2200 b'editornotfound',
2201 2201 _(
2202 2202 b" Can't find editor '%s' in PATH\n"
2203 2203 b" (specify a commit editor in your configuration"
2204 2204 b" file)\n"
2205 2205 ),
2206 2206 not cmdpath and editorbin,
2207 2207 )
2208 2208 if not cmdpath and editor != b'vi':
2209 2209 problems += 1
2210 2210
2211 2211 # check username
2212 2212 username = None
2213 2213 err = None
2214 2214 try:
2215 2215 username = ui.username()
2216 2216 except error.Abort as e:
2217 2217 err = e.message
2218 2218 problems += 1
2219 2219
2220 2220 fm.condwrite(
2221 2221 username, b'username', _(b"checking username (%s)\n"), username
2222 2222 )
2223 2223 fm.condwrite(
2224 2224 err,
2225 2225 b'usernameerror',
2226 2226 _(
2227 2227 b"checking username...\n %s\n"
2228 2228 b" (specify a username in your configuration file)\n"
2229 2229 ),
2230 2230 err,
2231 2231 )
2232 2232
2233 2233 for name, mod in extensions.extensions():
2234 2234 handler = getattr(mod, 'debuginstall', None)
2235 2235 if handler is not None:
2236 2236 problems += handler(ui, fm)
2237 2237
2238 2238 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2239 2239 if not problems:
2240 2240 fm.data(problems=problems)
2241 2241 fm.condwrite(
2242 2242 problems,
2243 2243 b'problems',
2244 2244 _(b"%d problems detected, please check your install!\n"),
2245 2245 problems,
2246 2246 )
2247 2247 fm.end()
2248 2248
2249 2249 return problems
2250 2250
2251 2251
2252 2252 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2253 2253 def debugknown(ui, repopath, *ids, **opts):
2254 2254 """test whether node ids are known to a repo
2255 2255
2256 2256 Every ID must be a full-length hex node id string. Returns a list of 0s
2257 2257 and 1s indicating unknown/known.
2258 2258 """
2259 2259 opts = pycompat.byteskwargs(opts)
2260 2260 repo = hg.peer(ui, opts, repopath)
2261 2261 if not repo.capable(b'known'):
2262 2262 raise error.Abort(b"known() not supported by target repository")
2263 2263 flags = repo.known([bin(s) for s in ids])
2264 2264 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2265 2265
2266 2266
2267 2267 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2268 2268 def debuglabelcomplete(ui, repo, *args):
2269 2269 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2270 2270 debugnamecomplete(ui, repo, *args)
2271 2271
2272 2272
2273 2273 @command(
2274 2274 b'debuglocks',
2275 2275 [
2276 2276 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2277 2277 (
2278 2278 b'W',
2279 2279 b'force-free-wlock',
2280 2280 None,
2281 2281 _(b'free the working state lock (DANGEROUS)'),
2282 2282 ),
2283 2283 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2284 2284 (
2285 2285 b'S',
2286 2286 b'set-wlock',
2287 2287 None,
2288 2288 _(b'set the working state lock until stopped'),
2289 2289 ),
2290 2290 ],
2291 2291 _(b'[OPTION]...'),
2292 2292 )
2293 2293 def debuglocks(ui, repo, **opts):
2294 2294 """show or modify state of locks
2295 2295
2296 2296 By default, this command will show which locks are held. This
2297 2297 includes the user and process holding the lock, the amount of time
2298 2298 the lock has been held, and the machine name where the process is
2299 2299 running if it's not local.
2300 2300
2301 2301 Locks protect the integrity of Mercurial's data, so should be
2302 2302 treated with care. System crashes or other interruptions may cause
2303 2303 locks to not be properly released, though Mercurial will usually
2304 2304 detect and remove such stale locks automatically.
2305 2305
2306 2306 However, detecting stale locks may not always be possible (for
2307 2307 instance, on a shared filesystem). Removing locks may also be
2308 2308 blocked by filesystem permissions.
2309 2309
2310 2310 Setting a lock will prevent other commands from changing the data.
2311 2311 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2312 2312 The set locks are removed when the command exits.
2313 2313
2314 2314 Returns 0 if no locks are held.
2315 2315
2316 2316 """
2317 2317
2318 2318 if opts.get('force_free_lock'):
2319 2319 repo.svfs.tryunlink(b'lock')
2320 2320 if opts.get('force_free_wlock'):
2321 2321 repo.vfs.tryunlink(b'wlock')
2322 2322 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2323 2323 return 0
2324 2324
2325 2325 locks = []
2326 2326 try:
2327 2327 if opts.get('set_wlock'):
2328 2328 try:
2329 2329 locks.append(repo.wlock(False))
2330 2330 except error.LockHeld:
2331 2331 raise error.Abort(_(b'wlock is already held'))
2332 2332 if opts.get('set_lock'):
2333 2333 try:
2334 2334 locks.append(repo.lock(False))
2335 2335 except error.LockHeld:
2336 2336 raise error.Abort(_(b'lock is already held'))
2337 2337 if len(locks):
2338 2338 try:
2339 2339 if ui.interactive():
2340 2340 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2341 2341 ui.promptchoice(prompt)
2342 2342 else:
2343 2343 msg = b"%d locks held, waiting for signal\n"
2344 2344 msg %= len(locks)
2345 2345 ui.status(msg)
2346 2346 while True: # XXX wait for a signal
2347 2347 time.sleep(0.1)
2348 2348 except KeyboardInterrupt:
2349 2349 msg = b"signal-received releasing locks\n"
2350 2350 ui.status(msg)
2351 2351 return 0
2352 2352 finally:
2353 2353 release(*locks)
2354 2354
2355 2355 now = time.time()
2356 2356 held = 0
2357 2357
2358 2358 def report(vfs, name, method):
2359 2359 # this causes stale locks to get reaped for more accurate reporting
2360 2360 try:
2361 2361 l = method(False)
2362 2362 except error.LockHeld:
2363 2363 l = None
2364 2364
2365 2365 if l:
2366 2366 l.release()
2367 2367 else:
2368 2368 try:
2369 2369 st = vfs.lstat(name)
2370 2370 age = now - st[stat.ST_MTIME]
2371 2371 user = util.username(st.st_uid)
2372 2372 locker = vfs.readlock(name)
2373 2373 if b":" in locker:
2374 2374 host, pid = locker.split(b':')
2375 2375 if host == socket.gethostname():
2376 2376 locker = b'user %s, process %s' % (user or b'None', pid)
2377 2377 else:
2378 2378 locker = b'user %s, process %s, host %s' % (
2379 2379 user or b'None',
2380 2380 pid,
2381 2381 host,
2382 2382 )
2383 2383 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2384 2384 return 1
2385 2385 except FileNotFoundError:
2386 2386 pass
2387 2387
2388 2388 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2389 2389 return 0
2390 2390
2391 2391 held += report(repo.svfs, b"lock", repo.lock)
2392 2392 held += report(repo.vfs, b"wlock", repo.wlock)
2393 2393
2394 2394 return held
2395 2395
2396 2396
2397 2397 @command(
2398 2398 b'debugmanifestfulltextcache',
2399 2399 [
2400 2400 (b'', b'clear', False, _(b'clear the cache')),
2401 2401 (
2402 2402 b'a',
2403 2403 b'add',
2404 2404 [],
2405 2405 _(b'add the given manifest nodes to the cache'),
2406 2406 _(b'NODE'),
2407 2407 ),
2408 2408 ],
2409 2409 b'',
2410 2410 )
2411 2411 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2412 2412 """show, clear or amend the contents of the manifest fulltext cache"""
2413 2413
2414 2414 def getcache():
2415 2415 r = repo.manifestlog.getstorage(b'')
2416 2416 try:
2417 2417 return r._fulltextcache
2418 2418 except AttributeError:
2419 2419 msg = _(
2420 2420 b"Current revlog implementation doesn't appear to have a "
2421 2421 b"manifest fulltext cache\n"
2422 2422 )
2423 2423 raise error.Abort(msg)
2424 2424
2425 2425 if opts.get('clear'):
2426 2426 with repo.wlock():
2427 2427 cache = getcache()
2428 2428 cache.clear(clear_persisted_data=True)
2429 2429 return
2430 2430
2431 2431 if add:
2432 2432 with repo.wlock():
2433 2433 m = repo.manifestlog
2434 2434 store = m.getstorage(b'')
2435 2435 for n in add:
2436 2436 try:
2437 2437 manifest = m[store.lookup(n)]
2438 2438 except error.LookupError as e:
2439 2439 raise error.Abort(
2440 2440 bytes(e), hint=b"Check your manifest node id"
2441 2441 )
2442 2442 manifest.read() # stores revisision in cache too
2443 2443 return
2444 2444
2445 2445 cache = getcache()
2446 2446 if not len(cache):
2447 2447 ui.write(_(b'cache empty\n'))
2448 2448 else:
2449 2449 ui.write(
2450 2450 _(
2451 2451 b'cache contains %d manifest entries, in order of most to '
2452 2452 b'least recent:\n'
2453 2453 )
2454 2454 % (len(cache),)
2455 2455 )
2456 2456 totalsize = 0
2457 2457 for nodeid in cache:
2458 2458 # Use cache.get to not update the LRU order
2459 2459 data = cache.peek(nodeid)
2460 2460 size = len(data)
2461 2461 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2462 2462 ui.write(
2463 2463 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2464 2464 )
2465 2465 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2466 2466 ui.write(
2467 2467 _(b'total cache data size %s, on-disk %s\n')
2468 2468 % (util.bytecount(totalsize), util.bytecount(ondisk))
2469 2469 )
2470 2470
2471 2471
2472 2472 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2473 2473 def debugmergestate(ui, repo, *args, **opts):
2474 2474 """print merge state
2475 2475
2476 2476 Use --verbose to print out information about whether v1 or v2 merge state
2477 2477 was chosen."""
2478 2478
2479 2479 if ui.verbose:
2480 2480 ms = mergestatemod.mergestate(repo)
2481 2481
2482 2482 # sort so that reasonable information is on top
2483 2483 v1records = ms._readrecordsv1()
2484 2484 v2records = ms._readrecordsv2()
2485 2485
2486 2486 if not v1records and not v2records:
2487 2487 pass
2488 2488 elif not v2records:
2489 2489 ui.writenoi18n(b'no version 2 merge state\n')
2490 2490 elif ms._v1v2match(v1records, v2records):
2491 2491 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2492 2492 else:
2493 2493 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2494 2494
2495 2495 opts = pycompat.byteskwargs(opts)
2496 2496 if not opts[b'template']:
2497 2497 opts[b'template'] = (
2498 2498 b'{if(commits, "", "no merge state found\n")}'
2499 2499 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2500 2500 b'{files % "file: {path} (state \\"{state}\\")\n'
2501 2501 b'{if(local_path, "'
2502 2502 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2503 2503 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2504 2504 b' other path: {other_path} (node {other_node})\n'
2505 2505 b'")}'
2506 2506 b'{if(rename_side, "'
2507 2507 b' rename side: {rename_side}\n'
2508 2508 b' renamed path: {renamed_path}\n'
2509 2509 b'")}'
2510 2510 b'{extras % " extra: {key} = {value}\n"}'
2511 2511 b'"}'
2512 2512 b'{extras % "extra: {file} ({key} = {value})\n"}'
2513 2513 )
2514 2514
2515 2515 ms = mergestatemod.mergestate.read(repo)
2516 2516
2517 2517 fm = ui.formatter(b'debugmergestate', opts)
2518 2518 fm.startitem()
2519 2519
2520 2520 fm_commits = fm.nested(b'commits')
2521 2521 if ms.active():
2522 2522 for name, node, label_index in (
2523 2523 (b'local', ms.local, 0),
2524 2524 (b'other', ms.other, 1),
2525 2525 ):
2526 2526 fm_commits.startitem()
2527 2527 fm_commits.data(name=name)
2528 2528 fm_commits.data(node=hex(node))
2529 2529 if ms._labels and len(ms._labels) > label_index:
2530 2530 fm_commits.data(label=ms._labels[label_index])
2531 2531 fm_commits.end()
2532 2532
2533 2533 fm_files = fm.nested(b'files')
2534 2534 if ms.active():
2535 2535 for f in ms:
2536 2536 fm_files.startitem()
2537 2537 fm_files.data(path=f)
2538 2538 state = ms._state[f]
2539 2539 fm_files.data(state=state[0])
2540 2540 if state[0] in (
2541 2541 mergestatemod.MERGE_RECORD_UNRESOLVED,
2542 2542 mergestatemod.MERGE_RECORD_RESOLVED,
2543 2543 ):
2544 2544 fm_files.data(local_key=state[1])
2545 2545 fm_files.data(local_path=state[2])
2546 2546 fm_files.data(ancestor_path=state[3])
2547 2547 fm_files.data(ancestor_node=state[4])
2548 2548 fm_files.data(other_path=state[5])
2549 2549 fm_files.data(other_node=state[6])
2550 2550 fm_files.data(local_flags=state[7])
2551 2551 elif state[0] in (
2552 2552 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2553 2553 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2554 2554 ):
2555 2555 fm_files.data(renamed_path=state[1])
2556 2556 fm_files.data(rename_side=state[2])
2557 2557 fm_extras = fm_files.nested(b'extras')
2558 2558 for k, v in sorted(ms.extras(f).items()):
2559 2559 fm_extras.startitem()
2560 2560 fm_extras.data(key=k)
2561 2561 fm_extras.data(value=v)
2562 2562 fm_extras.end()
2563 2563
2564 2564 fm_files.end()
2565 2565
2566 2566 fm_extras = fm.nested(b'extras')
2567 2567 for f, d in sorted(ms.allextras().items()):
2568 2568 if f in ms:
2569 2569 # If file is in mergestate, we have already processed it's extras
2570 2570 continue
2571 2571 for k, v in d.items():
2572 2572 fm_extras.startitem()
2573 2573 fm_extras.data(file=f)
2574 2574 fm_extras.data(key=k)
2575 2575 fm_extras.data(value=v)
2576 2576 fm_extras.end()
2577 2577
2578 2578 fm.end()
2579 2579
2580 2580
2581 2581 @command(b'debugnamecomplete', [], _(b'NAME...'))
2582 2582 def debugnamecomplete(ui, repo, *args):
2583 2583 '''complete "names" - tags, open branch names, bookmark names'''
2584 2584
2585 2585 names = set()
2586 2586 # since we previously only listed open branches, we will handle that
2587 2587 # specially (after this for loop)
2588 2588 for name, ns in repo.names.items():
2589 2589 if name != b'branches':
2590 2590 names.update(ns.listnames(repo))
2591 2591 names.update(
2592 2592 tag
2593 2593 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2594 2594 if not closed
2595 2595 )
2596 2596 completions = set()
2597 2597 if not args:
2598 2598 args = [b'']
2599 2599 for a in args:
2600 2600 completions.update(n for n in names if n.startswith(a))
2601 2601 ui.write(b'\n'.join(sorted(completions)))
2602 2602 ui.write(b'\n')
2603 2603
2604 2604
2605 2605 @command(
2606 2606 b'debugnodemap',
2607 2607 (
2608 2608 cmdutil.debugrevlogopts
2609 2609 + [
2610 2610 (
2611 2611 b'',
2612 2612 b'dump-new',
2613 2613 False,
2614 2614 _(b'write a (new) persistent binary nodemap on stdout'),
2615 2615 ),
2616 2616 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2617 2617 (
2618 2618 b'',
2619 2619 b'check',
2620 2620 False,
2621 2621 _(b'check that the data on disk data are correct.'),
2622 2622 ),
2623 2623 (
2624 2624 b'',
2625 2625 b'metadata',
2626 2626 False,
2627 2627 _(b'display the on disk meta data for the nodemap'),
2628 2628 ),
2629 2629 ]
2630 2630 ),
2631 2631 _(b'-c|-m|FILE'),
2632 2632 )
2633 2633 def debugnodemap(ui, repo, file_=None, **opts):
2634 2634 """write and inspect on disk nodemap"""
2635 2635 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2636 2636 if file_ is not None:
2637 2637 raise error.InputError(
2638 2638 _(b'cannot specify a file with other arguments')
2639 2639 )
2640 2640 elif file_ is None:
2641 2641 opts['changelog'] = True
2642 2642 r = cmdutil.openstorage(
2643 2643 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2644 2644 )
2645 if isinstance(r, manifest.manifestrevlog) or isinstance(r, filelog.filelog):
2645 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2646 2646 r = r._revlog
2647 2647 if opts['dump_new']:
2648 2648 if util.safehasattr(r.index, "nodemap_data_all"):
2649 2649 data = r.index.nodemap_data_all()
2650 2650 else:
2651 2651 data = nodemap.persistent_data(r.index)
2652 2652 ui.write(data)
2653 2653 elif opts['dump_disk']:
2654 2654 nm_data = nodemap.persisted_data(r)
2655 2655 if nm_data is not None:
2656 2656 docket, data = nm_data
2657 2657 ui.write(data[:])
2658 2658 elif opts['check']:
2659 2659 nm_data = nodemap.persisted_data(r)
2660 2660 if nm_data is not None:
2661 2661 docket, data = nm_data
2662 2662 return nodemap.check_data(ui, r.index, data)
2663 2663 elif opts['metadata']:
2664 2664 nm_data = nodemap.persisted_data(r)
2665 2665 if nm_data is not None:
2666 2666 docket, data = nm_data
2667 2667 ui.write((b"uid: %s\n") % docket.uid)
2668 2668 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2669 2669 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2670 2670 ui.write((b"data-length: %d\n") % docket.data_length)
2671 2671 ui.write((b"data-unused: %d\n") % docket.data_unused)
2672 2672 unused_perc = docket.data_unused * 100.0 / docket.data_length
2673 2673 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2674 2674
2675 2675
2676 2676 @command(
2677 2677 b'debugobsolete',
2678 2678 [
2679 2679 (b'', b'flags', 0, _(b'markers flag')),
2680 2680 (
2681 2681 b'',
2682 2682 b'record-parents',
2683 2683 False,
2684 2684 _(b'record parent information for the precursor'),
2685 2685 ),
2686 2686 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2687 2687 (
2688 2688 b'',
2689 2689 b'exclusive',
2690 2690 False,
2691 2691 _(b'restrict display to markers only relevant to REV'),
2692 2692 ),
2693 2693 (b'', b'index', False, _(b'display index of the marker')),
2694 2694 (b'', b'delete', [], _(b'delete markers specified by indices')),
2695 2695 ]
2696 2696 + cmdutil.commitopts2
2697 2697 + cmdutil.formatteropts,
2698 2698 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2699 2699 )
2700 2700 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2701 2701 """create arbitrary obsolete marker
2702 2702
2703 2703 With no arguments, displays the list of obsolescence markers."""
2704 2704
2705 2705 opts = pycompat.byteskwargs(opts)
2706 2706
2707 2707 def parsenodeid(s):
2708 2708 try:
2709 2709 # We do not use revsingle/revrange functions here to accept
2710 2710 # arbitrary node identifiers, possibly not present in the
2711 2711 # local repository.
2712 2712 n = bin(s)
2713 2713 if len(n) != repo.nodeconstants.nodelen:
2714 2714 raise ValueError
2715 2715 return n
2716 2716 except ValueError:
2717 2717 raise error.InputError(
2718 2718 b'changeset references must be full hexadecimal '
2719 2719 b'node identifiers'
2720 2720 )
2721 2721
2722 2722 if opts.get(b'delete'):
2723 2723 indices = []
2724 2724 for v in opts.get(b'delete'):
2725 2725 try:
2726 2726 indices.append(int(v))
2727 2727 except ValueError:
2728 2728 raise error.InputError(
2729 2729 _(b'invalid index value: %r') % v,
2730 2730 hint=_(b'use integers for indices'),
2731 2731 )
2732 2732
2733 2733 if repo.currenttransaction():
2734 2734 raise error.Abort(
2735 2735 _(b'cannot delete obsmarkers in the middle of transaction.')
2736 2736 )
2737 2737
2738 2738 with repo.lock():
2739 2739 n = repair.deleteobsmarkers(repo.obsstore, indices)
2740 2740 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2741 2741
2742 2742 return
2743 2743
2744 2744 if precursor is not None:
2745 2745 if opts[b'rev']:
2746 2746 raise error.InputError(
2747 2747 b'cannot select revision when creating marker'
2748 2748 )
2749 2749 metadata = {}
2750 2750 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2751 2751 succs = tuple(parsenodeid(succ) for succ in successors)
2752 2752 l = repo.lock()
2753 2753 try:
2754 2754 tr = repo.transaction(b'debugobsolete')
2755 2755 try:
2756 2756 date = opts.get(b'date')
2757 2757 if date:
2758 2758 date = dateutil.parsedate(date)
2759 2759 else:
2760 2760 date = None
2761 2761 prec = parsenodeid(precursor)
2762 2762 parents = None
2763 2763 if opts[b'record_parents']:
2764 2764 if prec not in repo.unfiltered():
2765 2765 raise error.Abort(
2766 2766 b'cannot used --record-parents on '
2767 2767 b'unknown changesets'
2768 2768 )
2769 2769 parents = repo.unfiltered()[prec].parents()
2770 2770 parents = tuple(p.node() for p in parents)
2771 2771 repo.obsstore.create(
2772 2772 tr,
2773 2773 prec,
2774 2774 succs,
2775 2775 opts[b'flags'],
2776 2776 parents=parents,
2777 2777 date=date,
2778 2778 metadata=metadata,
2779 2779 ui=ui,
2780 2780 )
2781 2781 tr.close()
2782 2782 except ValueError as exc:
2783 2783 raise error.Abort(
2784 2784 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2785 2785 )
2786 2786 finally:
2787 2787 tr.release()
2788 2788 finally:
2789 2789 l.release()
2790 2790 else:
2791 2791 if opts[b'rev']:
2792 2792 revs = logcmdutil.revrange(repo, opts[b'rev'])
2793 2793 nodes = [repo[r].node() for r in revs]
2794 2794 markers = list(
2795 2795 obsutil.getmarkers(
2796 2796 repo, nodes=nodes, exclusive=opts[b'exclusive']
2797 2797 )
2798 2798 )
2799 2799 markers.sort(key=lambda x: x._data)
2800 2800 else:
2801 2801 markers = obsutil.getmarkers(repo)
2802 2802
2803 2803 markerstoiter = markers
2804 2804 isrelevant = lambda m: True
2805 2805 if opts.get(b'rev') and opts.get(b'index'):
2806 2806 markerstoiter = obsutil.getmarkers(repo)
2807 2807 markerset = set(markers)
2808 2808 isrelevant = lambda m: m in markerset
2809 2809
2810 2810 fm = ui.formatter(b'debugobsolete', opts)
2811 2811 for i, m in enumerate(markerstoiter):
2812 2812 if not isrelevant(m):
2813 2813 # marker can be irrelevant when we're iterating over a set
2814 2814 # of markers (markerstoiter) which is bigger than the set
2815 2815 # of markers we want to display (markers)
2816 2816 # this can happen if both --index and --rev options are
2817 2817 # provided and thus we need to iterate over all of the markers
2818 2818 # to get the correct indices, but only display the ones that
2819 2819 # are relevant to --rev value
2820 2820 continue
2821 2821 fm.startitem()
2822 2822 ind = i if opts.get(b'index') else None
2823 2823 cmdutil.showmarker(fm, m, index=ind)
2824 2824 fm.end()
2825 2825
2826 2826
2827 2827 @command(
2828 2828 b'debugp1copies',
2829 2829 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2830 2830 _(b'[-r REV]'),
2831 2831 )
2832 2832 def debugp1copies(ui, repo, **opts):
2833 2833 """dump copy information compared to p1"""
2834 2834
2835 2835 opts = pycompat.byteskwargs(opts)
2836 2836 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2837 2837 for dst, src in ctx.p1copies().items():
2838 2838 ui.write(b'%s -> %s\n' % (src, dst))
2839 2839
2840 2840
2841 2841 @command(
2842 2842 b'debugp2copies',
2843 2843 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2844 2844 _(b'[-r REV]'),
2845 2845 )
2846 2846 def debugp2copies(ui, repo, **opts):
2847 2847 """dump copy information compared to p2"""
2848 2848
2849 2849 opts = pycompat.byteskwargs(opts)
2850 2850 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2851 2851 for dst, src in ctx.p2copies().items():
2852 2852 ui.write(b'%s -> %s\n' % (src, dst))
2853 2853
2854 2854
2855 2855 @command(
2856 2856 b'debugpathcomplete',
2857 2857 [
2858 2858 (b'f', b'full', None, _(b'complete an entire path')),
2859 2859 (b'n', b'normal', None, _(b'show only normal files')),
2860 2860 (b'a', b'added', None, _(b'show only added files')),
2861 2861 (b'r', b'removed', None, _(b'show only removed files')),
2862 2862 ],
2863 2863 _(b'FILESPEC...'),
2864 2864 )
2865 2865 def debugpathcomplete(ui, repo, *specs, **opts):
2866 2866 """complete part or all of a tracked path
2867 2867
2868 2868 This command supports shells that offer path name completion. It
2869 2869 currently completes only files already known to the dirstate.
2870 2870
2871 2871 Completion extends only to the next path segment unless
2872 2872 --full is specified, in which case entire paths are used."""
2873 2873
2874 2874 def complete(path, acceptable):
2875 2875 dirstate = repo.dirstate
2876 2876 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2877 2877 rootdir = repo.root + pycompat.ossep
2878 2878 if spec != repo.root and not spec.startswith(rootdir):
2879 2879 return [], []
2880 2880 if os.path.isdir(spec):
2881 2881 spec += b'/'
2882 2882 spec = spec[len(rootdir) :]
2883 2883 fixpaths = pycompat.ossep != b'/'
2884 2884 if fixpaths:
2885 2885 spec = spec.replace(pycompat.ossep, b'/')
2886 2886 speclen = len(spec)
2887 2887 fullpaths = opts['full']
2888 2888 files, dirs = set(), set()
2889 2889 adddir, addfile = dirs.add, files.add
2890 2890 for f, st in dirstate.items():
2891 2891 if f.startswith(spec) and st.state in acceptable:
2892 2892 if fixpaths:
2893 2893 f = f.replace(b'/', pycompat.ossep)
2894 2894 if fullpaths:
2895 2895 addfile(f)
2896 2896 continue
2897 2897 s = f.find(pycompat.ossep, speclen)
2898 2898 if s >= 0:
2899 2899 adddir(f[:s])
2900 2900 else:
2901 2901 addfile(f)
2902 2902 return files, dirs
2903 2903
2904 2904 acceptable = b''
2905 2905 if opts['normal']:
2906 2906 acceptable += b'nm'
2907 2907 if opts['added']:
2908 2908 acceptable += b'a'
2909 2909 if opts['removed']:
2910 2910 acceptable += b'r'
2911 2911 cwd = repo.getcwd()
2912 2912 if not specs:
2913 2913 specs = [b'.']
2914 2914
2915 2915 files, dirs = set(), set()
2916 2916 for spec in specs:
2917 2917 f, d = complete(spec, acceptable or b'nmar')
2918 2918 files.update(f)
2919 2919 dirs.update(d)
2920 2920 files.update(dirs)
2921 2921 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2922 2922 ui.write(b'\n')
2923 2923
2924 2924
2925 2925 @command(
2926 2926 b'debugpathcopies',
2927 2927 cmdutil.walkopts,
2928 2928 b'hg debugpathcopies REV1 REV2 [FILE]',
2929 2929 inferrepo=True,
2930 2930 )
2931 2931 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2932 2932 """show copies between two revisions"""
2933 2933 ctx1 = scmutil.revsingle(repo, rev1)
2934 2934 ctx2 = scmutil.revsingle(repo, rev2)
2935 2935 m = scmutil.match(ctx1, pats, opts)
2936 2936 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2937 2937 ui.write(b'%s -> %s\n' % (src, dst))
2938 2938
2939 2939
2940 2940 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2941 2941 def debugpeer(ui, path):
2942 2942 """establish a connection to a peer repository"""
2943 2943 # Always enable peer request logging. Requires --debug to display
2944 2944 # though.
2945 2945 overrides = {
2946 2946 (b'devel', b'debug.peer-request'): True,
2947 2947 }
2948 2948
2949 2949 with ui.configoverride(overrides):
2950 2950 peer = hg.peer(ui, {}, path)
2951 2951
2952 2952 try:
2953 2953 local = peer.local() is not None
2954 2954 canpush = peer.canpush()
2955 2955
2956 2956 ui.write(_(b'url: %s\n') % peer.url())
2957 2957 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2958 2958 ui.write(
2959 2959 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2960 2960 )
2961 2961 finally:
2962 2962 peer.close()
2963 2963
2964 2964
2965 2965 @command(
2966 2966 b'debugpickmergetool',
2967 2967 [
2968 2968 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2969 2969 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2970 2970 ]
2971 2971 + cmdutil.walkopts
2972 2972 + cmdutil.mergetoolopts,
2973 2973 _(b'[PATTERN]...'),
2974 2974 inferrepo=True,
2975 2975 )
2976 2976 def debugpickmergetool(ui, repo, *pats, **opts):
2977 2977 """examine which merge tool is chosen for specified file
2978 2978
2979 2979 As described in :hg:`help merge-tools`, Mercurial examines
2980 2980 configurations below in this order to decide which merge tool is
2981 2981 chosen for specified file.
2982 2982
2983 2983 1. ``--tool`` option
2984 2984 2. ``HGMERGE`` environment variable
2985 2985 3. configurations in ``merge-patterns`` section
2986 2986 4. configuration of ``ui.merge``
2987 2987 5. configurations in ``merge-tools`` section
2988 2988 6. ``hgmerge`` tool (for historical reason only)
2989 2989 7. default tool for fallback (``:merge`` or ``:prompt``)
2990 2990
2991 2991 This command writes out examination result in the style below::
2992 2992
2993 2993 FILE = MERGETOOL
2994 2994
2995 2995 By default, all files known in the first parent context of the
2996 2996 working directory are examined. Use file patterns and/or -I/-X
2997 2997 options to limit target files. -r/--rev is also useful to examine
2998 2998 files in another context without actual updating to it.
2999 2999
3000 3000 With --debug, this command shows warning messages while matching
3001 3001 against ``merge-patterns`` and so on, too. It is recommended to
3002 3002 use this option with explicit file patterns and/or -I/-X options,
3003 3003 because this option increases amount of output per file according
3004 3004 to configurations in hgrc.
3005 3005
3006 3006 With -v/--verbose, this command shows configurations below at
3007 3007 first (only if specified).
3008 3008
3009 3009 - ``--tool`` option
3010 3010 - ``HGMERGE`` environment variable
3011 3011 - configuration of ``ui.merge``
3012 3012
3013 3013 If merge tool is chosen before matching against
3014 3014 ``merge-patterns``, this command can't show any helpful
3015 3015 information, even with --debug. In such case, information above is
3016 3016 useful to know why a merge tool is chosen.
3017 3017 """
3018 3018 opts = pycompat.byteskwargs(opts)
3019 3019 overrides = {}
3020 3020 if opts[b'tool']:
3021 3021 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3022 3022 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3023 3023
3024 3024 with ui.configoverride(overrides, b'debugmergepatterns'):
3025 3025 hgmerge = encoding.environ.get(b"HGMERGE")
3026 3026 if hgmerge is not None:
3027 3027 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3028 3028 uimerge = ui.config(b"ui", b"merge")
3029 3029 if uimerge:
3030 3030 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3031 3031
3032 3032 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3033 3033 m = scmutil.match(ctx, pats, opts)
3034 3034 changedelete = opts[b'changedelete']
3035 3035 for path in ctx.walk(m):
3036 3036 fctx = ctx[path]
3037 3037 with ui.silent(
3038 3038 error=True
3039 3039 ) if not ui.debugflag else util.nullcontextmanager():
3040 3040 tool, toolpath = filemerge._picktool(
3041 3041 repo,
3042 3042 ui,
3043 3043 path,
3044 3044 fctx.isbinary(),
3045 3045 b'l' in fctx.flags(),
3046 3046 changedelete,
3047 3047 )
3048 3048 ui.write(b'%s = %s\n' % (path, tool))
3049 3049
3050 3050
3051 3051 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3052 3052 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3053 3053 """access the pushkey key/value protocol
3054 3054
3055 3055 With two args, list the keys in the given namespace.
3056 3056
3057 3057 With five args, set a key to new if it currently is set to old.
3058 3058 Reports success or failure.
3059 3059 """
3060 3060
3061 3061 target = hg.peer(ui, {}, repopath)
3062 3062 try:
3063 3063 if keyinfo:
3064 3064 key, old, new = keyinfo
3065 3065 with target.commandexecutor() as e:
3066 3066 r = e.callcommand(
3067 3067 b'pushkey',
3068 3068 {
3069 3069 b'namespace': namespace,
3070 3070 b'key': key,
3071 3071 b'old': old,
3072 3072 b'new': new,
3073 3073 },
3074 3074 ).result()
3075 3075
3076 3076 ui.status(pycompat.bytestr(r) + b'\n')
3077 3077 return not r
3078 3078 else:
3079 3079 for k, v in sorted(target.listkeys(namespace).items()):
3080 3080 ui.write(
3081 3081 b"%s\t%s\n"
3082 3082 % (stringutil.escapestr(k), stringutil.escapestr(v))
3083 3083 )
3084 3084 finally:
3085 3085 target.close()
3086 3086
3087 3087
3088 3088 @command(b'debugpvec', [], _(b'A B'))
3089 3089 def debugpvec(ui, repo, a, b=None):
3090 3090 ca = scmutil.revsingle(repo, a)
3091 3091 cb = scmutil.revsingle(repo, b)
3092 3092 pa = pvec.ctxpvec(ca)
3093 3093 pb = pvec.ctxpvec(cb)
3094 3094 if pa == pb:
3095 3095 rel = b"="
3096 3096 elif pa > pb:
3097 3097 rel = b">"
3098 3098 elif pa < pb:
3099 3099 rel = b"<"
3100 3100 elif pa | pb:
3101 3101 rel = b"|"
3102 3102 ui.write(_(b"a: %s\n") % pa)
3103 3103 ui.write(_(b"b: %s\n") % pb)
3104 3104 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3105 3105 ui.write(
3106 3106 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3107 3107 % (
3108 3108 abs(pa._depth - pb._depth),
3109 3109 pvec._hamming(pa._vec, pb._vec),
3110 3110 pa.distance(pb),
3111 3111 rel,
3112 3112 )
3113 3113 )
3114 3114
3115 3115
3116 3116 @command(
3117 3117 b'debugrebuilddirstate|debugrebuildstate',
3118 3118 [
3119 3119 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3120 3120 (
3121 3121 b'',
3122 3122 b'minimal',
3123 3123 None,
3124 3124 _(
3125 3125 b'only rebuild files that are inconsistent with '
3126 3126 b'the working copy parent'
3127 3127 ),
3128 3128 ),
3129 3129 ],
3130 3130 _(b'[-r REV]'),
3131 3131 )
3132 3132 def debugrebuilddirstate(ui, repo, rev, **opts):
3133 3133 """rebuild the dirstate as it would look like for the given revision
3134 3134
3135 3135 If no revision is specified the first current parent will be used.
3136 3136
3137 3137 The dirstate will be set to the files of the given revision.
3138 3138 The actual working directory content or existing dirstate
3139 3139 information such as adds or removes is not considered.
3140 3140
3141 3141 ``minimal`` will only rebuild the dirstate status for files that claim to be
3142 3142 tracked but are not in the parent manifest, or that exist in the parent
3143 3143 manifest but are not in the dirstate. It will not change adds, removes, or
3144 3144 modified files that are in the working copy parent.
3145 3145
3146 3146 One use of this command is to make the next :hg:`status` invocation
3147 3147 check the actual file content.
3148 3148 """
3149 3149 ctx = scmutil.revsingle(repo, rev)
3150 3150 with repo.wlock():
3151 3151 if repo.currenttransaction() is not None:
3152 3152 msg = b'rebuild the dirstate outside of a transaction'
3153 3153 raise error.ProgrammingError(msg)
3154 3154 dirstate = repo.dirstate
3155 3155 changedfiles = None
3156 3156 # See command doc for what minimal does.
3157 3157 if opts.get('minimal'):
3158 3158 manifestfiles = set(ctx.manifest().keys())
3159 3159 dirstatefiles = set(dirstate)
3160 3160 manifestonly = manifestfiles - dirstatefiles
3161 3161 dsonly = dirstatefiles - manifestfiles
3162 3162 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3163 3163 changedfiles = manifestonly | dsnotadded
3164 3164
3165 3165 with dirstate.changing_parents(repo):
3166 3166 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3167 3167
3168 3168
3169 3169 @command(
3170 3170 b'debugrebuildfncache',
3171 3171 [
3172 3172 (
3173 3173 b'',
3174 3174 b'only-data',
3175 3175 False,
3176 3176 _(b'only look for wrong .d files (much faster)'),
3177 3177 )
3178 3178 ],
3179 3179 b'',
3180 3180 )
3181 3181 def debugrebuildfncache(ui, repo, **opts):
3182 3182 """rebuild the fncache file"""
3183 3183 opts = pycompat.byteskwargs(opts)
3184 3184 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3185 3185
3186 3186
3187 3187 @command(
3188 3188 b'debugrename',
3189 3189 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3190 3190 _(b'[-r REV] [FILE]...'),
3191 3191 )
3192 3192 def debugrename(ui, repo, *pats, **opts):
3193 3193 """dump rename information"""
3194 3194
3195 3195 opts = pycompat.byteskwargs(opts)
3196 3196 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3197 3197 m = scmutil.match(ctx, pats, opts)
3198 3198 for abs in ctx.walk(m):
3199 3199 fctx = ctx[abs]
3200 3200 o = fctx.filelog().renamed(fctx.filenode())
3201 3201 rel = repo.pathto(abs)
3202 3202 if o:
3203 3203 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3204 3204 else:
3205 3205 ui.write(_(b"%s not renamed\n") % rel)
3206 3206
3207 3207
3208 3208 @command(b'debugrequires|debugrequirements', [], b'')
3209 3209 def debugrequirements(ui, repo):
3210 3210 """print the current repo requirements"""
3211 3211 for r in sorted(repo.requirements):
3212 3212 ui.write(b"%s\n" % r)
3213 3213
3214 3214
3215 3215 @command(
3216 3216 b'debugrevlog',
3217 3217 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3218 3218 _(b'-c|-m|FILE'),
3219 3219 optionalrepo=True,
3220 3220 )
3221 3221 def debugrevlog(ui, repo, file_=None, **opts):
3222 3222 """show data and statistics about a revlog"""
3223 3223 opts = pycompat.byteskwargs(opts)
3224 3224 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3225 3225
3226 3226 if opts.get(b"dump"):
3227 3227 revlog_debug.dump(ui, r)
3228 3228 else:
3229 3229 revlog_debug.debug_revlog(ui, r)
3230 3230 return 0
3231 3231
3232 3232
3233 3233 @command(
3234 3234 b'debugrevlogindex',
3235 3235 cmdutil.debugrevlogopts
3236 3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3237 3237 _(b'[-f FORMAT] -c|-m|FILE'),
3238 3238 optionalrepo=True,
3239 3239 )
3240 3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3241 3241 """dump the contents of a revlog index"""
3242 3242 opts = pycompat.byteskwargs(opts)
3243 3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3244 3244 format = opts.get(b'format', 0)
3245 3245 if format not in (0, 1):
3246 3246 raise error.Abort(_(b"unknown format %d") % format)
3247 3247
3248 3248 if ui.debugflag:
3249 3249 shortfn = hex
3250 3250 else:
3251 3251 shortfn = short
3252 3252
3253 3253 # There might not be anything in r, so have a sane default
3254 3254 idlen = 12
3255 3255 for i in r:
3256 3256 idlen = len(shortfn(r.node(i)))
3257 3257 break
3258 3258
3259 3259 if format == 0:
3260 3260 if ui.verbose:
3261 3261 ui.writenoi18n(
3262 3262 b" rev offset length linkrev %s %s p2\n"
3263 3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3264 3264 )
3265 3265 else:
3266 3266 ui.writenoi18n(
3267 3267 b" rev linkrev %s %s p2\n"
3268 3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3269 3269 )
3270 3270 elif format == 1:
3271 3271 if ui.verbose:
3272 3272 ui.writenoi18n(
3273 3273 (
3274 3274 b" rev flag offset length size link p1"
3275 3275 b" p2 %s\n"
3276 3276 )
3277 3277 % b"nodeid".rjust(idlen)
3278 3278 )
3279 3279 else:
3280 3280 ui.writenoi18n(
3281 3281 b" rev flag size link p1 p2 %s\n"
3282 3282 % b"nodeid".rjust(idlen)
3283 3283 )
3284 3284
3285 3285 for i in r:
3286 3286 node = r.node(i)
3287 3287 if format == 0:
3288 3288 try:
3289 3289 pp = r.parents(node)
3290 3290 except Exception:
3291 3291 pp = [repo.nullid, repo.nullid]
3292 3292 if ui.verbose:
3293 3293 ui.write(
3294 3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3295 3295 % (
3296 3296 i,
3297 3297 r.start(i),
3298 3298 r.length(i),
3299 3299 r.linkrev(i),
3300 3300 shortfn(node),
3301 3301 shortfn(pp[0]),
3302 3302 shortfn(pp[1]),
3303 3303 )
3304 3304 )
3305 3305 else:
3306 3306 ui.write(
3307 3307 b"% 6d % 7d %s %s %s\n"
3308 3308 % (
3309 3309 i,
3310 3310 r.linkrev(i),
3311 3311 shortfn(node),
3312 3312 shortfn(pp[0]),
3313 3313 shortfn(pp[1]),
3314 3314 )
3315 3315 )
3316 3316 elif format == 1:
3317 3317 pr = r.parentrevs(i)
3318 3318 if ui.verbose:
3319 3319 ui.write(
3320 3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3321 3321 % (
3322 3322 i,
3323 3323 r.flags(i),
3324 3324 r.start(i),
3325 3325 r.length(i),
3326 3326 r.rawsize(i),
3327 3327 r.linkrev(i),
3328 3328 pr[0],
3329 3329 pr[1],
3330 3330 shortfn(node),
3331 3331 )
3332 3332 )
3333 3333 else:
3334 3334 ui.write(
3335 3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3336 3336 % (
3337 3337 i,
3338 3338 r.flags(i),
3339 3339 r.rawsize(i),
3340 3340 r.linkrev(i),
3341 3341 pr[0],
3342 3342 pr[1],
3343 3343 shortfn(node),
3344 3344 )
3345 3345 )
3346 3346
3347 3347
3348 3348 @command(
3349 3349 b'debugrevspec',
3350 3350 [
3351 3351 (
3352 3352 b'',
3353 3353 b'optimize',
3354 3354 None,
3355 3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3356 3356 ),
3357 3357 (
3358 3358 b'',
3359 3359 b'show-revs',
3360 3360 True,
3361 3361 _(b'print list of result revisions (default)'),
3362 3362 ),
3363 3363 (
3364 3364 b's',
3365 3365 b'show-set',
3366 3366 None,
3367 3367 _(b'print internal representation of result set'),
3368 3368 ),
3369 3369 (
3370 3370 b'p',
3371 3371 b'show-stage',
3372 3372 [],
3373 3373 _(b'print parsed tree at the given stage'),
3374 3374 _(b'NAME'),
3375 3375 ),
3376 3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3377 3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3378 3378 ],
3379 3379 b'REVSPEC',
3380 3380 )
3381 3381 def debugrevspec(ui, repo, expr, **opts):
3382 3382 """parse and apply a revision specification
3383 3383
3384 3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3385 3385 Use -p all to print tree at every stage.
3386 3386
3387 3387 Use --no-show-revs option with -s or -p to print only the set
3388 3388 representation or the parsed tree respectively.
3389 3389
3390 3390 Use --verify-optimized to compare the optimized result with the unoptimized
3391 3391 one. Returns 1 if the optimized result differs.
3392 3392 """
3393 3393 opts = pycompat.byteskwargs(opts)
3394 3394 aliases = ui.configitems(b'revsetalias')
3395 3395 stages = [
3396 3396 (b'parsed', lambda tree: tree),
3397 3397 (
3398 3398 b'expanded',
3399 3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3400 3400 ),
3401 3401 (b'concatenated', revsetlang.foldconcat),
3402 3402 (b'analyzed', revsetlang.analyze),
3403 3403 (b'optimized', revsetlang.optimize),
3404 3404 ]
3405 3405 if opts[b'no_optimized']:
3406 3406 stages = stages[:-1]
3407 3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3408 3408 raise error.Abort(
3409 3409 _(b'cannot use --verify-optimized with --no-optimized')
3410 3410 )
3411 3411 stagenames = {n for n, f in stages}
3412 3412
3413 3413 showalways = set()
3414 3414 showchanged = set()
3415 3415 if ui.verbose and not opts[b'show_stage']:
3416 3416 # show parsed tree by --verbose (deprecated)
3417 3417 showalways.add(b'parsed')
3418 3418 showchanged.update([b'expanded', b'concatenated'])
3419 3419 if opts[b'optimize']:
3420 3420 showalways.add(b'optimized')
3421 3421 if opts[b'show_stage'] and opts[b'optimize']:
3422 3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3423 3423 if opts[b'show_stage'] == [b'all']:
3424 3424 showalways.update(stagenames)
3425 3425 else:
3426 3426 for n in opts[b'show_stage']:
3427 3427 if n not in stagenames:
3428 3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3429 3429 showalways.update(opts[b'show_stage'])
3430 3430
3431 3431 treebystage = {}
3432 3432 printedtree = None
3433 3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3434 3434 for n, f in stages:
3435 3435 treebystage[n] = tree = f(tree)
3436 3436 if n in showalways or (n in showchanged and tree != printedtree):
3437 3437 if opts[b'show_stage'] or n != b'parsed':
3438 3438 ui.write(b"* %s:\n" % n)
3439 3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3440 3440 printedtree = tree
3441 3441
3442 3442 if opts[b'verify_optimized']:
3443 3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3444 3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3445 3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3446 3446 ui.writenoi18n(
3447 3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3448 3448 )
3449 3449 ui.writenoi18n(
3450 3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3451 3451 )
3452 3452 arevs = list(arevs)
3453 3453 brevs = list(brevs)
3454 3454 if arevs == brevs:
3455 3455 return 0
3456 3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3457 3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3458 3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3459 3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3460 3460 if tag in ('delete', 'replace'):
3461 3461 for c in arevs[alo:ahi]:
3462 3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3463 3463 if tag in ('insert', 'replace'):
3464 3464 for c in brevs[blo:bhi]:
3465 3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3466 3466 if tag == 'equal':
3467 3467 for c in arevs[alo:ahi]:
3468 3468 ui.write(b' %d\n' % c)
3469 3469 return 1
3470 3470
3471 3471 func = revset.makematcher(tree)
3472 3472 revs = func(repo)
3473 3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3474 3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3475 3475 if not opts[b'show_revs']:
3476 3476 return
3477 3477 for c in revs:
3478 3478 ui.write(b"%d\n" % c)
3479 3479
3480 3480
3481 3481 @command(
3482 3482 b'debugserve',
3483 3483 [
3484 3484 (
3485 3485 b'',
3486 3486 b'sshstdio',
3487 3487 False,
3488 3488 _(b'run an SSH server bound to process handles'),
3489 3489 ),
3490 3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3491 3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3492 3492 ],
3493 3493 b'',
3494 3494 )
3495 3495 def debugserve(ui, repo, **opts):
3496 3496 """run a server with advanced settings
3497 3497
3498 3498 This command is similar to :hg:`serve`. It exists partially as a
3499 3499 workaround to the fact that ``hg serve --stdio`` must have specific
3500 3500 arguments for security reasons.
3501 3501 """
3502 3502 opts = pycompat.byteskwargs(opts)
3503 3503
3504 3504 if not opts[b'sshstdio']:
3505 3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3506 3506
3507 3507 logfh = None
3508 3508
3509 3509 if opts[b'logiofd'] and opts[b'logiofile']:
3510 3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3511 3511
3512 3512 if opts[b'logiofd']:
3513 3513 # Ideally we would be line buffered. But line buffering in binary
3514 3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3515 3515 # buffering could have performance impacts. But since this isn't
3516 3516 # performance critical code, it should be fine.
3517 3517 try:
3518 3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3519 3519 except OSError as e:
3520 3520 if e.errno != errno.ESPIPE:
3521 3521 raise
3522 3522 # can't seek a pipe, so `ab` mode fails on py3
3523 3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3524 3524 elif opts[b'logiofile']:
3525 3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3526 3526
3527 3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3528 3528 s.serve_forever()
3529 3529
3530 3530
3531 3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3532 3532 def debugsetparents(ui, repo, rev1, rev2=None):
3533 3533 """manually set the parents of the current working directory (DANGEROUS)
3534 3534
3535 3535 This command is not what you are looking for and should not be used. Using
3536 3536 this command will most certainly results in slight corruption of the file
3537 3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3538 3538
3539 3539 The command update the p1 and p2 field in the dirstate, and not touching
3540 3540 anything else. This useful for writing repository conversion tools, but
3541 3541 should be used with extreme care. For example, neither the working
3542 3542 directory nor the dirstate is updated, so file status may be incorrect
3543 3543 after running this command. Only used if you are one of the few people that
3544 3544 deeply unstand both conversion tools and file level histories. If you are
3545 3545 reading this help, you are not one of this people (most of them sailed west
3546 3546 from Mithlond anyway.
3547 3547
3548 3548 So one last time DO NOT USE THIS COMMAND.
3549 3549
3550 3550 Returns 0 on success.
3551 3551 """
3552 3552
3553 3553 node1 = scmutil.revsingle(repo, rev1).node()
3554 3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3555 3555
3556 3556 with repo.wlock():
3557 3557 repo.setparents(node1, node2)
3558 3558
3559 3559
3560 3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3561 3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3562 3562 """dump the side data for a cl/manifest/file revision
3563 3563
3564 3564 Use --verbose to dump the sidedata content."""
3565 3565 opts = pycompat.byteskwargs(opts)
3566 3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3567 3567 if rev is not None:
3568 3568 raise error.InputError(
3569 3569 _(b'cannot specify a revision with other arguments')
3570 3570 )
3571 3571 file_, rev = None, file_
3572 3572 elif rev is None:
3573 3573 raise error.InputError(_(b'please specify a revision'))
3574 3574 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3575 3575 r = getattr(r, '_revlog', r)
3576 3576 try:
3577 3577 sidedata = r.sidedata(r.lookup(rev))
3578 3578 except KeyError:
3579 3579 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3580 3580 if sidedata:
3581 3581 sidedata = list(sidedata.items())
3582 3582 sidedata.sort()
3583 3583 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3584 3584 for key, value in sidedata:
3585 3585 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3586 3586 if ui.verbose:
3587 3587 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3588 3588
3589 3589
3590 3590 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3591 3591 def debugssl(ui, repo, source=None, **opts):
3592 3592 """test a secure connection to a server
3593 3593
3594 3594 This builds the certificate chain for the server on Windows, installing the
3595 3595 missing intermediates and trusted root via Windows Update if necessary. It
3596 3596 does nothing on other platforms.
3597 3597
3598 3598 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3599 3599 that server is used. See :hg:`help urls` for more information.
3600 3600
3601 3601 If the update succeeds, retry the original operation. Otherwise, the cause
3602 3602 of the SSL error is likely another issue.
3603 3603 """
3604 3604 if not pycompat.iswindows:
3605 3605 raise error.Abort(
3606 3606 _(b'certificate chain building is only possible on Windows')
3607 3607 )
3608 3608
3609 3609 if not source:
3610 3610 if not repo:
3611 3611 raise error.Abort(
3612 3612 _(
3613 3613 b"there is no Mercurial repository here, and no "
3614 3614 b"server specified"
3615 3615 )
3616 3616 )
3617 3617 source = b"default"
3618 3618
3619 3619 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3620 3620 url = path.url
3621 3621
3622 3622 defaultport = {b'https': 443, b'ssh': 22}
3623 3623 if url.scheme in defaultport:
3624 3624 try:
3625 3625 addr = (url.host, int(url.port or defaultport[url.scheme]))
3626 3626 except ValueError:
3627 3627 raise error.Abort(_(b"malformed port number in URL"))
3628 3628 else:
3629 3629 raise error.Abort(_(b"only https and ssh connections are supported"))
3630 3630
3631 3631 from . import win32
3632 3632
3633 3633 s = ssl.wrap_socket(
3634 3634 socket.socket(),
3635 3635 ssl_version=ssl.PROTOCOL_TLS,
3636 3636 cert_reqs=ssl.CERT_NONE,
3637 3637 ca_certs=None,
3638 3638 )
3639 3639
3640 3640 try:
3641 3641 s.connect(addr)
3642 3642 cert = s.getpeercert(True)
3643 3643
3644 3644 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3645 3645
3646 3646 complete = win32.checkcertificatechain(cert, build=False)
3647 3647
3648 3648 if not complete:
3649 3649 ui.status(_(b'certificate chain is incomplete, updating... '))
3650 3650
3651 3651 if not win32.checkcertificatechain(cert):
3652 3652 ui.status(_(b'failed.\n'))
3653 3653 else:
3654 3654 ui.status(_(b'done.\n'))
3655 3655 else:
3656 3656 ui.status(_(b'full certificate chain is available\n'))
3657 3657 finally:
3658 3658 s.close()
3659 3659
3660 3660
3661 3661 @command(
3662 3662 b'debug::stable-tail-sort',
3663 3663 [
3664 3664 (
3665 3665 b'T',
3666 3666 b'template',
3667 3667 b'{rev}\n',
3668 3668 _(b'display with template'),
3669 3669 _(b'TEMPLATE'),
3670 3670 ),
3671 3671 ],
3672 3672 b'REV',
3673 3673 )
3674 3674 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3675 3675 """display the stable-tail sort of the ancestors of a given node"""
3676 3676 rev = logcmdutil.revsingle(repo, revspec).rev()
3677 3677 cl = repo.changelog
3678 3678
3679 3679 displayer = logcmdutil.maketemplater(ui, repo, template)
3680 3680 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3681 3681 for ancestor_rev in sorted_revs:
3682 3682 displayer.show(repo[ancestor_rev])
3683 3683
3684 3684
3685 3685 @command(
3686 3686 b'debug::stable-tail-sort-leaps',
3687 3687 [
3688 3688 (
3689 3689 b'T',
3690 3690 b'template',
3691 3691 b'{rev}',
3692 3692 _(b'display with template'),
3693 3693 _(b'TEMPLATE'),
3694 3694 ),
3695 3695 (b's', b'specific', False, _(b'restrict to specific leaps')),
3696 3696 ],
3697 3697 b'REV',
3698 3698 )
3699 3699 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3700 3700 """display the leaps in the stable-tail sort of a node, one per line"""
3701 3701 rev = logcmdutil.revsingle(repo, rspec).rev()
3702 3702
3703 3703 if specific:
3704 3704 get_leaps = stabletailsort._find_specific_leaps_naive
3705 3705 else:
3706 3706 get_leaps = stabletailsort._find_all_leaps_naive
3707 3707
3708 3708 displayer = logcmdutil.maketemplater(ui, repo, template)
3709 3709 for source, target in get_leaps(repo.changelog, rev):
3710 3710 displayer.show(repo[source])
3711 3711 displayer.show(repo[target])
3712 3712 ui.write(b'\n')
3713 3713
3714 3714
3715 3715 @command(
3716 3716 b"debugbackupbundle",
3717 3717 [
3718 3718 (
3719 3719 b"",
3720 3720 b"recover",
3721 3721 b"",
3722 3722 b"brings the specified changeset back into the repository",
3723 3723 )
3724 3724 ]
3725 3725 + cmdutil.logopts,
3726 3726 _(b"hg debugbackupbundle [--recover HASH]"),
3727 3727 )
3728 3728 def debugbackupbundle(ui, repo, *pats, **opts):
3729 3729 """lists the changesets available in backup bundles
3730 3730
3731 3731 Without any arguments, this command prints a list of the changesets in each
3732 3732 backup bundle.
3733 3733
3734 3734 --recover takes a changeset hash and unbundles the first bundle that
3735 3735 contains that hash, which puts that changeset back in your repository.
3736 3736
3737 3737 --verbose will print the entire commit message and the bundle path for that
3738 3738 backup.
3739 3739 """
3740 3740 backups = list(
3741 3741 filter(
3742 3742 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3743 3743 )
3744 3744 )
3745 3745 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3746 3746
3747 3747 opts = pycompat.byteskwargs(opts)
3748 3748 opts[b"bundle"] = b""
3749 3749 opts[b"force"] = None
3750 3750 limit = logcmdutil.getlimit(opts)
3751 3751
3752 3752 def display(other, chlist, displayer):
3753 3753 if opts.get(b"newest_first"):
3754 3754 chlist.reverse()
3755 3755 count = 0
3756 3756 for n in chlist:
3757 3757 if limit is not None and count >= limit:
3758 3758 break
3759 3759 parents = [
3760 3760 True for p in other.changelog.parents(n) if p != repo.nullid
3761 3761 ]
3762 3762 if opts.get(b"no_merges") and len(parents) == 2:
3763 3763 continue
3764 3764 count += 1
3765 3765 displayer.show(other[n])
3766 3766
3767 3767 recovernode = opts.get(b"recover")
3768 3768 if recovernode:
3769 3769 if scmutil.isrevsymbol(repo, recovernode):
3770 3770 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3771 3771 return
3772 3772 elif backups:
3773 3773 msg = _(
3774 3774 b"Recover changesets using: hg debugbackupbundle --recover "
3775 3775 b"<changeset hash>\n\nAvailable backup changesets:"
3776 3776 )
3777 3777 ui.status(msg, label=b"status.removed")
3778 3778 else:
3779 3779 ui.status(_(b"no backup changesets found\n"))
3780 3780 return
3781 3781
3782 3782 for backup in backups:
3783 3783 # Much of this is copied from the hg incoming logic
3784 3784 source = os.path.relpath(backup, encoding.getcwd())
3785 3785 path = urlutil.get_unique_pull_path_obj(
3786 3786 b'debugbackupbundle',
3787 3787 ui,
3788 3788 source,
3789 3789 )
3790 3790 try:
3791 3791 other = hg.peer(repo, opts, path)
3792 3792 except error.LookupError as ex:
3793 3793 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3794 3794 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3795 3795 ui.warn(msg, hint=hint)
3796 3796 continue
3797 3797 branches = (path.branch, opts.get(b'branch', []))
3798 3798 revs, checkout = hg.addbranchrevs(
3799 3799 repo, other, branches, opts.get(b"rev")
3800 3800 )
3801 3801
3802 3802 if revs:
3803 3803 revs = [other.lookup(rev) for rev in revs]
3804 3804
3805 3805 with ui.silent():
3806 3806 try:
3807 3807 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3808 3808 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3809 3809 )
3810 3810 except error.LookupError:
3811 3811 continue
3812 3812
3813 3813 try:
3814 3814 if not chlist:
3815 3815 continue
3816 3816 if recovernode:
3817 3817 with repo.lock(), repo.transaction(b"unbundle") as tr:
3818 3818 if scmutil.isrevsymbol(other, recovernode):
3819 3819 ui.status(_(b"Unbundling %s\n") % (recovernode))
3820 3820 f = hg.openpath(ui, path.loc)
3821 3821 gen = exchange.readbundle(ui, f, path.loc)
3822 3822 if isinstance(gen, bundle2.unbundle20):
3823 3823 bundle2.applybundle(
3824 3824 repo,
3825 3825 gen,
3826 3826 tr,
3827 3827 source=b"unbundle",
3828 3828 url=b"bundle:" + path.loc,
3829 3829 )
3830 3830 else:
3831 3831 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3832 3832 break
3833 3833 else:
3834 3834 backupdate = encoding.strtolocal(
3835 3835 time.strftime(
3836 3836 "%a %H:%M, %Y-%m-%d",
3837 3837 time.localtime(os.path.getmtime(path.loc)),
3838 3838 )
3839 3839 )
3840 3840 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3841 3841 if ui.verbose:
3842 3842 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3843 3843 else:
3844 3844 opts[
3845 3845 b"template"
3846 3846 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3847 3847 displayer = logcmdutil.changesetdisplayer(
3848 3848 ui, other, opts, False
3849 3849 )
3850 3850 display(other, chlist, displayer)
3851 3851 displayer.close()
3852 3852 finally:
3853 3853 cleanupfn()
3854 3854
3855 3855
3856 3856 @command(
3857 3857 b'debugsub',
3858 3858 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3859 3859 _(b'[-r REV] [REV]'),
3860 3860 )
3861 3861 def debugsub(ui, repo, rev=None):
3862 3862 ctx = scmutil.revsingle(repo, rev, None)
3863 3863 for k, v in sorted(ctx.substate.items()):
3864 3864 ui.writenoi18n(b'path %s\n' % k)
3865 3865 ui.writenoi18n(b' source %s\n' % v[0])
3866 3866 ui.writenoi18n(b' revision %s\n' % v[1])
3867 3867
3868 3868
3869 3869 @command(
3870 3870 b'debugshell',
3871 3871 [
3872 3872 (
3873 3873 b'c',
3874 3874 b'command',
3875 3875 b'',
3876 3876 _(b'program passed in as a string'),
3877 3877 _(b'COMMAND'),
3878 3878 )
3879 3879 ],
3880 3880 _(b'[-c COMMAND]'),
3881 3881 optionalrepo=True,
3882 3882 )
3883 3883 def debugshell(ui, repo, **opts):
3884 3884 """run an interactive Python interpreter
3885 3885
3886 3886 The local namespace is provided with a reference to the ui and
3887 3887 the repo instance (if available).
3888 3888 """
3889 3889 import code
3890 3890
3891 3891 imported_objects = {
3892 3892 'ui': ui,
3893 3893 'repo': repo,
3894 3894 }
3895 3895
3896 3896 # py2exe disables initialization of the site module, which is responsible
3897 3897 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3898 3898 # the stuff that site normally does here, so that the interpreter can be
3899 3899 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3900 3900 # py.exe, or py2exe.
3901 3901 if getattr(sys, "frozen", None) == 'console_exe':
3902 3902 try:
3903 3903 import site
3904 3904
3905 3905 site.setcopyright()
3906 3906 site.sethelper()
3907 3907 site.setquit()
3908 3908 except ImportError:
3909 3909 site = None # Keep PyCharm happy
3910 3910
3911 3911 command = opts.get('command')
3912 3912 if command:
3913 3913 compiled = code.compile_command(encoding.strfromlocal(command))
3914 3914 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3915 3915 return
3916 3916
3917 3917 code.interact(local=imported_objects)
3918 3918
3919 3919
3920 3920 @command(
3921 3921 b'debug-revlog-stats',
3922 3922 [
3923 3923 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3924 3924 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3925 3925 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3926 3926 ]
3927 3927 + cmdutil.formatteropts,
3928 3928 )
3929 3929 def debug_revlog_stats(ui, repo, **opts):
3930 3930 """display statistics about revlogs in the store"""
3931 3931 opts = pycompat.byteskwargs(opts)
3932 3932 changelog = opts[b"changelog"]
3933 3933 manifest = opts[b"manifest"]
3934 3934 filelogs = opts[b"filelogs"]
3935 3935
3936 3936 if changelog is None and manifest is None and filelogs is None:
3937 3937 changelog = True
3938 3938 manifest = True
3939 3939 filelogs = True
3940 3940
3941 3941 repo = repo.unfiltered()
3942 3942 fm = ui.formatter(b'debug-revlog-stats', opts)
3943 3943 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3944 3944 fm.end()
3945 3945
3946 3946
3947 3947 @command(
3948 3948 b'debugsuccessorssets',
3949 3949 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3950 3950 _(b'[REV]'),
3951 3951 )
3952 3952 def debugsuccessorssets(ui, repo, *revs, **opts):
3953 3953 """show set of successors for revision
3954 3954
3955 3955 A successors set of changeset A is a consistent group of revisions that
3956 3956 succeed A. It contains non-obsolete changesets only unless closests
3957 3957 successors set is set.
3958 3958
3959 3959 In most cases a changeset A has a single successors set containing a single
3960 3960 successor (changeset A replaced by A').
3961 3961
3962 3962 A changeset that is made obsolete with no successors are called "pruned".
3963 3963 Such changesets have no successors sets at all.
3964 3964
3965 3965 A changeset that has been "split" will have a successors set containing
3966 3966 more than one successor.
3967 3967
3968 3968 A changeset that has been rewritten in multiple different ways is called
3969 3969 "divergent". Such changesets have multiple successor sets (each of which
3970 3970 may also be split, i.e. have multiple successors).
3971 3971
3972 3972 Results are displayed as follows::
3973 3973
3974 3974 <rev1>
3975 3975 <successors-1A>
3976 3976 <rev2>
3977 3977 <successors-2A>
3978 3978 <successors-2B1> <successors-2B2> <successors-2B3>
3979 3979
3980 3980 Here rev2 has two possible (i.e. divergent) successors sets. The first
3981 3981 holds one element, whereas the second holds three (i.e. the changeset has
3982 3982 been split).
3983 3983 """
3984 3984 # passed to successorssets caching computation from one call to another
3985 3985 cache = {}
3986 3986 ctx2str = bytes
3987 3987 node2str = short
3988 3988 for rev in logcmdutil.revrange(repo, revs):
3989 3989 ctx = repo[rev]
3990 3990 ui.write(b'%s\n' % ctx2str(ctx))
3991 3991 for succsset in obsutil.successorssets(
3992 3992 repo, ctx.node(), closest=opts['closest'], cache=cache
3993 3993 ):
3994 3994 if succsset:
3995 3995 ui.write(b' ')
3996 3996 ui.write(node2str(succsset[0]))
3997 3997 for node in succsset[1:]:
3998 3998 ui.write(b' ')
3999 3999 ui.write(node2str(node))
4000 4000 ui.write(b'\n')
4001 4001
4002 4002
4003 4003 @command(b'debugtagscache', [])
4004 4004 def debugtagscache(ui, repo):
4005 4005 """display the contents of .hg/cache/hgtagsfnodes1"""
4006 4006 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
4007 4007 flog = repo.file(b'.hgtags')
4008 4008 for r in repo:
4009 4009 node = repo[r].node()
4010 4010 tagsnode = cache.getfnode(node, computemissing=False)
4011 4011 if tagsnode:
4012 4012 tagsnodedisplay = hex(tagsnode)
4013 4013 if not flog.hasnode(tagsnode):
4014 4014 tagsnodedisplay += b' (unknown node)'
4015 4015 elif tagsnode is None:
4016 4016 tagsnodedisplay = b'missing'
4017 4017 else:
4018 4018 tagsnodedisplay = b'invalid'
4019 4019
4020 4020 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4021 4021
4022 4022
4023 4023 @command(
4024 4024 b'debugtemplate',
4025 4025 [
4026 4026 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4027 4027 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4028 4028 ],
4029 4029 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4030 4030 optionalrepo=True,
4031 4031 )
4032 4032 def debugtemplate(ui, repo, tmpl, **opts):
4033 4033 """parse and apply a template
4034 4034
4035 4035 If -r/--rev is given, the template is processed as a log template and
4036 4036 applied to the given changesets. Otherwise, it is processed as a generic
4037 4037 template.
4038 4038
4039 4039 Use --verbose to print the parsed tree.
4040 4040 """
4041 4041 revs = None
4042 4042 if opts['rev']:
4043 4043 if repo is None:
4044 4044 raise error.RepoError(
4045 4045 _(b'there is no Mercurial repository here (.hg not found)')
4046 4046 )
4047 4047 revs = logcmdutil.revrange(repo, opts['rev'])
4048 4048
4049 4049 props = {}
4050 4050 for d in opts['define']:
4051 4051 try:
4052 4052 k, v = (e.strip() for e in d.split(b'=', 1))
4053 4053 if not k or k == b'ui':
4054 4054 raise ValueError
4055 4055 props[k] = v
4056 4056 except ValueError:
4057 4057 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4058 4058
4059 4059 if ui.verbose:
4060 4060 aliases = ui.configitems(b'templatealias')
4061 4061 tree = templater.parse(tmpl)
4062 4062 ui.note(templater.prettyformat(tree), b'\n')
4063 4063 newtree = templater.expandaliases(tree, aliases)
4064 4064 if newtree != tree:
4065 4065 ui.notenoi18n(
4066 4066 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4067 4067 )
4068 4068
4069 4069 if revs is None:
4070 4070 tres = formatter.templateresources(ui, repo)
4071 4071 t = formatter.maketemplater(ui, tmpl, resources=tres)
4072 4072 if ui.verbose:
4073 4073 kwds, funcs = t.symbolsuseddefault()
4074 4074 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4075 4075 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4076 4076 ui.write(t.renderdefault(props))
4077 4077 else:
4078 4078 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4079 4079 if ui.verbose:
4080 4080 kwds, funcs = displayer.t.symbolsuseddefault()
4081 4081 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4082 4082 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4083 4083 for r in revs:
4084 4084 displayer.show(repo[r], **pycompat.strkwargs(props))
4085 4085 displayer.close()
4086 4086
4087 4087
4088 4088 @command(
4089 4089 b'debuguigetpass',
4090 4090 [
4091 4091 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4092 4092 ],
4093 4093 _(b'[-p TEXT]'),
4094 4094 norepo=True,
4095 4095 )
4096 4096 def debuguigetpass(ui, prompt=b''):
4097 4097 """show prompt to type password"""
4098 4098 r = ui.getpass(prompt)
4099 4099 if r is None:
4100 4100 r = b"<default response>"
4101 4101 ui.writenoi18n(b'response: %s\n' % r)
4102 4102
4103 4103
4104 4104 @command(
4105 4105 b'debuguiprompt',
4106 4106 [
4107 4107 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4108 4108 ],
4109 4109 _(b'[-p TEXT]'),
4110 4110 norepo=True,
4111 4111 )
4112 4112 def debuguiprompt(ui, prompt=b''):
4113 4113 """show plain prompt"""
4114 4114 r = ui.prompt(prompt)
4115 4115 ui.writenoi18n(b'response: %s\n' % r)
4116 4116
4117 4117
4118 4118 @command(b'debugupdatecaches', [])
4119 4119 def debugupdatecaches(ui, repo, *pats, **opts):
4120 4120 """warm all known caches in the repository"""
4121 4121 with repo.wlock(), repo.lock():
4122 4122 repo.updatecaches(caches=repository.CACHES_ALL)
4123 4123
4124 4124
4125 4125 @command(
4126 4126 b'debugupgraderepo',
4127 4127 [
4128 4128 (
4129 4129 b'o',
4130 4130 b'optimize',
4131 4131 [],
4132 4132 _(b'extra optimization to perform'),
4133 4133 _(b'NAME'),
4134 4134 ),
4135 4135 (b'', b'run', False, _(b'performs an upgrade')),
4136 4136 (b'', b'backup', True, _(b'keep the old repository content around')),
4137 4137 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4138 4138 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4139 4139 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4140 4140 ],
4141 4141 )
4142 4142 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4143 4143 """upgrade a repository to use different features
4144 4144
4145 4145 If no arguments are specified, the repository is evaluated for upgrade
4146 4146 and a list of problems and potential optimizations is printed.
4147 4147
4148 4148 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4149 4149 can be influenced via additional arguments. More details will be provided
4150 4150 by the command output when run without ``--run``.
4151 4151
4152 4152 During the upgrade, the repository will be locked and no writes will be
4153 4153 allowed.
4154 4154
4155 4155 At the end of the upgrade, the repository may not be readable while new
4156 4156 repository data is swapped in. This window will be as long as it takes to
4157 4157 rename some directories inside the ``.hg`` directory. On most machines, this
4158 4158 should complete almost instantaneously and the chances of a consumer being
4159 4159 unable to access the repository should be low.
4160 4160
4161 4161 By default, all revlogs will be upgraded. You can restrict this using flags
4162 4162 such as `--manifest`:
4163 4163
4164 4164 * `--manifest`: only optimize the manifest
4165 4165 * `--no-manifest`: optimize all revlog but the manifest
4166 4166 * `--changelog`: optimize the changelog only
4167 4167 * `--no-changelog --no-manifest`: optimize filelogs only
4168 4168 * `--filelogs`: optimize the filelogs only
4169 4169 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4170 4170 """
4171 4171 return upgrade.upgraderepo(
4172 4172 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4173 4173 )
4174 4174
4175 4175
4176 4176 @command(
4177 4177 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4178 4178 )
4179 4179 def debugwalk(ui, repo, *pats, **opts):
4180 4180 """show how files match on given patterns"""
4181 4181 opts = pycompat.byteskwargs(opts)
4182 4182 m = scmutil.match(repo[None], pats, opts)
4183 4183 if ui.verbose:
4184 4184 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4185 4185 items = list(repo[None].walk(m))
4186 4186 if not items:
4187 4187 return
4188 4188 f = lambda fn: fn
4189 4189 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4190 4190 f = lambda fn: util.normpath(fn)
4191 4191 fmt = b'f %%-%ds %%-%ds %%s' % (
4192 4192 max([len(abs) for abs in items]),
4193 4193 max([len(repo.pathto(abs)) for abs in items]),
4194 4194 )
4195 4195 for abs in items:
4196 4196 line = fmt % (
4197 4197 abs,
4198 4198 f(repo.pathto(abs)),
4199 4199 m.exact(abs) and b'exact' or b'',
4200 4200 )
4201 4201 ui.write(b"%s\n" % line.rstrip())
4202 4202
4203 4203
4204 4204 @command(b'debugwhyunstable', [], _(b'REV'))
4205 4205 def debugwhyunstable(ui, repo, rev):
4206 4206 """explain instabilities of a changeset"""
4207 4207 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4208 4208 dnodes = b''
4209 4209 if entry.get(b'divergentnodes'):
4210 4210 dnodes = (
4211 4211 b' '.join(
4212 4212 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4213 4213 for ctx in entry[b'divergentnodes']
4214 4214 )
4215 4215 + b' '
4216 4216 )
4217 4217 ui.write(
4218 4218 b'%s: %s%s %s\n'
4219 4219 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4220 4220 )
4221 4221
4222 4222
4223 4223 @command(
4224 4224 b'debugwireargs',
4225 4225 [
4226 4226 (b'', b'three', b'', b'three'),
4227 4227 (b'', b'four', b'', b'four'),
4228 4228 (b'', b'five', b'', b'five'),
4229 4229 ]
4230 4230 + cmdutil.remoteopts,
4231 4231 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4232 4232 norepo=True,
4233 4233 )
4234 4234 def debugwireargs(ui, repopath, *vals, **opts):
4235 4235 opts = pycompat.byteskwargs(opts)
4236 4236 repo = hg.peer(ui, opts, repopath)
4237 4237 try:
4238 4238 for opt in cmdutil.remoteopts:
4239 4239 del opts[opt[1]]
4240 4240 args = {}
4241 4241 for k, v in opts.items():
4242 4242 if v:
4243 4243 args[k] = v
4244 4244 args = pycompat.strkwargs(args)
4245 4245 # run twice to check that we don't mess up the stream for the next command
4246 4246 res1 = repo.debugwireargs(*vals, **args)
4247 4247 res2 = repo.debugwireargs(*vals, **args)
4248 4248 ui.write(b"%s\n" % res1)
4249 4249 if res1 != res2:
4250 4250 ui.warn(b"%s\n" % res2)
4251 4251 finally:
4252 4252 repo.close()
4253 4253
4254 4254
4255 4255 def _parsewirelangblocks(fh):
4256 4256 activeaction = None
4257 4257 blocklines = []
4258 4258 lastindent = 0
4259 4259
4260 4260 for line in fh:
4261 4261 line = line.rstrip()
4262 4262 if not line:
4263 4263 continue
4264 4264
4265 4265 if line.startswith(b'#'):
4266 4266 continue
4267 4267
4268 4268 if not line.startswith(b' '):
4269 4269 # New block. Flush previous one.
4270 4270 if activeaction:
4271 4271 yield activeaction, blocklines
4272 4272
4273 4273 activeaction = line
4274 4274 blocklines = []
4275 4275 lastindent = 0
4276 4276 continue
4277 4277
4278 4278 # Else we start with an indent.
4279 4279
4280 4280 if not activeaction:
4281 4281 raise error.Abort(_(b'indented line outside of block'))
4282 4282
4283 4283 indent = len(line) - len(line.lstrip())
4284 4284
4285 4285 # If this line is indented more than the last line, concatenate it.
4286 4286 if indent > lastindent and blocklines:
4287 4287 blocklines[-1] += line.lstrip()
4288 4288 else:
4289 4289 blocklines.append(line)
4290 4290 lastindent = indent
4291 4291
4292 4292 # Flush last block.
4293 4293 if activeaction:
4294 4294 yield activeaction, blocklines
4295 4295
4296 4296
4297 4297 @command(
4298 4298 b'debugwireproto',
4299 4299 [
4300 4300 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4301 4301 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4302 4302 (
4303 4303 b'',
4304 4304 b'noreadstderr',
4305 4305 False,
4306 4306 _(b'do not read from stderr of the remote'),
4307 4307 ),
4308 4308 (
4309 4309 b'',
4310 4310 b'nologhandshake',
4311 4311 False,
4312 4312 _(b'do not log I/O related to the peer handshake'),
4313 4313 ),
4314 4314 ]
4315 4315 + cmdutil.remoteopts,
4316 4316 _(b'[PATH]'),
4317 4317 optionalrepo=True,
4318 4318 )
4319 4319 def debugwireproto(ui, repo, path=None, **opts):
4320 4320 """send wire protocol commands to a server
4321 4321
4322 4322 This command can be used to issue wire protocol commands to remote
4323 4323 peers and to debug the raw data being exchanged.
4324 4324
4325 4325 ``--localssh`` will start an SSH server against the current repository
4326 4326 and connect to that. By default, the connection will perform a handshake
4327 4327 and establish an appropriate peer instance.
4328 4328
4329 4329 ``--peer`` can be used to bypass the handshake protocol and construct a
4330 4330 peer instance using the specified class type. Valid values are ``raw``,
4331 4331 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4332 4332 don't support higher-level command actions.
4333 4333
4334 4334 ``--noreadstderr`` can be used to disable automatic reading from stderr
4335 4335 of the peer (for SSH connections only). Disabling automatic reading of
4336 4336 stderr is useful for making output more deterministic.
4337 4337
4338 4338 Commands are issued via a mini language which is specified via stdin.
4339 4339 The language consists of individual actions to perform. An action is
4340 4340 defined by a block. A block is defined as a line with no leading
4341 4341 space followed by 0 or more lines with leading space. Blocks are
4342 4342 effectively a high-level command with additional metadata.
4343 4343
4344 4344 Lines beginning with ``#`` are ignored.
4345 4345
4346 4346 The following sections denote available actions.
4347 4347
4348 4348 raw
4349 4349 ---
4350 4350
4351 4351 Send raw data to the server.
4352 4352
4353 4353 The block payload contains the raw data to send as one atomic send
4354 4354 operation. The data may not actually be delivered in a single system
4355 4355 call: it depends on the abilities of the transport being used.
4356 4356
4357 4357 Each line in the block is de-indented and concatenated. Then, that
4358 4358 value is evaluated as a Python b'' literal. This allows the use of
4359 4359 backslash escaping, etc.
4360 4360
4361 4361 raw+
4362 4362 ----
4363 4363
4364 4364 Behaves like ``raw`` except flushes output afterwards.
4365 4365
4366 4366 command <X>
4367 4367 -----------
4368 4368
4369 4369 Send a request to run a named command, whose name follows the ``command``
4370 4370 string.
4371 4371
4372 4372 Arguments to the command are defined as lines in this block. The format of
4373 4373 each line is ``<key> <value>``. e.g.::
4374 4374
4375 4375 command listkeys
4376 4376 namespace bookmarks
4377 4377
4378 4378 If the value begins with ``eval:``, it will be interpreted as a Python
4379 4379 literal expression. Otherwise values are interpreted as Python b'' literals.
4380 4380 This allows sending complex types and encoding special byte sequences via
4381 4381 backslash escaping.
4382 4382
4383 4383 The following arguments have special meaning:
4384 4384
4385 4385 ``PUSHFILE``
4386 4386 When defined, the *push* mechanism of the peer will be used instead
4387 4387 of the static request-response mechanism and the content of the
4388 4388 file specified in the value of this argument will be sent as the
4389 4389 command payload.
4390 4390
4391 4391 This can be used to submit a local bundle file to the remote.
4392 4392
4393 4393 batchbegin
4394 4394 ----------
4395 4395
4396 4396 Instruct the peer to begin a batched send.
4397 4397
4398 4398 All ``command`` blocks are queued for execution until the next
4399 4399 ``batchsubmit`` block.
4400 4400
4401 4401 batchsubmit
4402 4402 -----------
4403 4403
4404 4404 Submit previously queued ``command`` blocks as a batch request.
4405 4405
4406 4406 This action MUST be paired with a ``batchbegin`` action.
4407 4407
4408 4408 httprequest <method> <path>
4409 4409 ---------------------------
4410 4410
4411 4411 (HTTP peer only)
4412 4412
4413 4413 Send an HTTP request to the peer.
4414 4414
4415 4415 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4416 4416
4417 4417 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4418 4418 headers to add to the request. e.g. ``Accept: foo``.
4419 4419
4420 4420 The following arguments are special:
4421 4421
4422 4422 ``BODYFILE``
4423 4423 The content of the file defined as the value to this argument will be
4424 4424 transferred verbatim as the HTTP request body.
4425 4425
4426 4426 ``frame <type> <flags> <payload>``
4427 4427 Send a unified protocol frame as part of the request body.
4428 4428
4429 4429 All frames will be collected and sent as the body to the HTTP
4430 4430 request.
4431 4431
4432 4432 close
4433 4433 -----
4434 4434
4435 4435 Close the connection to the server.
4436 4436
4437 4437 flush
4438 4438 -----
4439 4439
4440 4440 Flush data written to the server.
4441 4441
4442 4442 readavailable
4443 4443 -------------
4444 4444
4445 4445 Close the write end of the connection and read all available data from
4446 4446 the server.
4447 4447
4448 4448 If the connection to the server encompasses multiple pipes, we poll both
4449 4449 pipes and read available data.
4450 4450
4451 4451 readline
4452 4452 --------
4453 4453
4454 4454 Read a line of output from the server. If there are multiple output
4455 4455 pipes, reads only the main pipe.
4456 4456
4457 4457 ereadline
4458 4458 ---------
4459 4459
4460 4460 Like ``readline``, but read from the stderr pipe, if available.
4461 4461
4462 4462 read <X>
4463 4463 --------
4464 4464
4465 4465 ``read()`` N bytes from the server's main output pipe.
4466 4466
4467 4467 eread <X>
4468 4468 ---------
4469 4469
4470 4470 ``read()`` N bytes from the server's stderr pipe, if available.
4471 4471
4472 4472 Specifying Unified Frame-Based Protocol Frames
4473 4473 ----------------------------------------------
4474 4474
4475 4475 It is possible to emit a *Unified Frame-Based Protocol* by using special
4476 4476 syntax.
4477 4477
4478 4478 A frame is composed as a type, flags, and payload. These can be parsed
4479 4479 from a string of the form:
4480 4480
4481 4481 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4482 4482
4483 4483 ``request-id`` and ``stream-id`` are integers defining the request and
4484 4484 stream identifiers.
4485 4485
4486 4486 ``type`` can be an integer value for the frame type or the string name
4487 4487 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4488 4488 ``command-name``.
4489 4489
4490 4490 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4491 4491 components. Each component (and there can be just one) can be an integer
4492 4492 or a flag name for stream flags or frame flags, respectively. Values are
4493 4493 resolved to integers and then bitwise OR'd together.
4494 4494
4495 4495 ``payload`` represents the raw frame payload. If it begins with
4496 4496 ``cbor:``, the following string is evaluated as Python code and the
4497 4497 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4498 4498 as a Python byte string literal.
4499 4499 """
4500 4500 opts = pycompat.byteskwargs(opts)
4501 4501
4502 4502 if opts[b'localssh'] and not repo:
4503 4503 raise error.Abort(_(b'--localssh requires a repository'))
4504 4504
4505 4505 if opts[b'peer'] and opts[b'peer'] not in (
4506 4506 b'raw',
4507 4507 b'ssh1',
4508 4508 ):
4509 4509 raise error.Abort(
4510 4510 _(b'invalid value for --peer'),
4511 4511 hint=_(b'valid values are "raw" and "ssh1"'),
4512 4512 )
4513 4513
4514 4514 if path and opts[b'localssh']:
4515 4515 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4516 4516
4517 4517 if ui.interactive():
4518 4518 ui.write(_(b'(waiting for commands on stdin)\n'))
4519 4519
4520 4520 blocks = list(_parsewirelangblocks(ui.fin))
4521 4521
4522 4522 proc = None
4523 4523 stdin = None
4524 4524 stdout = None
4525 4525 stderr = None
4526 4526 opener = None
4527 4527
4528 4528 if opts[b'localssh']:
4529 4529 # We start the SSH server in its own process so there is process
4530 4530 # separation. This prevents a whole class of potential bugs around
4531 4531 # shared state from interfering with server operation.
4532 4532 args = procutil.hgcmd() + [
4533 4533 b'-R',
4534 4534 repo.root,
4535 4535 b'debugserve',
4536 4536 b'--sshstdio',
4537 4537 ]
4538 4538 proc = subprocess.Popen(
4539 4539 pycompat.rapply(procutil.tonativestr, args),
4540 4540 stdin=subprocess.PIPE,
4541 4541 stdout=subprocess.PIPE,
4542 4542 stderr=subprocess.PIPE,
4543 4543 bufsize=0,
4544 4544 )
4545 4545
4546 4546 stdin = proc.stdin
4547 4547 stdout = proc.stdout
4548 4548 stderr = proc.stderr
4549 4549
4550 4550 # We turn the pipes into observers so we can log I/O.
4551 4551 if ui.verbose or opts[b'peer'] == b'raw':
4552 4552 stdin = util.makeloggingfileobject(
4553 4553 ui, proc.stdin, b'i', logdata=True
4554 4554 )
4555 4555 stdout = util.makeloggingfileobject(
4556 4556 ui, proc.stdout, b'o', logdata=True
4557 4557 )
4558 4558 stderr = util.makeloggingfileobject(
4559 4559 ui, proc.stderr, b'e', logdata=True
4560 4560 )
4561 4561
4562 4562 # --localssh also implies the peer connection settings.
4563 4563
4564 4564 url = b'ssh://localserver'
4565 4565 autoreadstderr = not opts[b'noreadstderr']
4566 4566
4567 4567 if opts[b'peer'] == b'ssh1':
4568 4568 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4569 4569 peer = sshpeer.sshv1peer(
4570 4570 ui,
4571 4571 url,
4572 4572 proc,
4573 4573 stdin,
4574 4574 stdout,
4575 4575 stderr,
4576 4576 None,
4577 4577 autoreadstderr=autoreadstderr,
4578 4578 )
4579 4579 elif opts[b'peer'] == b'raw':
4580 4580 ui.write(_(b'using raw connection to peer\n'))
4581 4581 peer = None
4582 4582 else:
4583 4583 ui.write(_(b'creating ssh peer from handshake results\n'))
4584 4584 peer = sshpeer._make_peer(
4585 4585 ui,
4586 4586 url,
4587 4587 proc,
4588 4588 stdin,
4589 4589 stdout,
4590 4590 stderr,
4591 4591 autoreadstderr=autoreadstderr,
4592 4592 )
4593 4593
4594 4594 elif path:
4595 4595 # We bypass hg.peer() so we can proxy the sockets.
4596 4596 # TODO consider not doing this because we skip
4597 4597 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4598 4598 u = urlutil.url(path)
4599 4599 if u.scheme != b'http':
4600 4600 raise error.Abort(_(b'only http:// paths are currently supported'))
4601 4601
4602 4602 url, authinfo = u.authinfo()
4603 4603 openerargs = {
4604 4604 'useragent': b'Mercurial debugwireproto',
4605 4605 }
4606 4606
4607 4607 # Turn pipes/sockets into observers so we can log I/O.
4608 4608 if ui.verbose:
4609 4609 openerargs.update(
4610 4610 {
4611 4611 'loggingfh': ui,
4612 4612 'loggingname': b's',
4613 4613 'loggingopts': {
4614 4614 'logdata': True,
4615 4615 'logdataapis': False,
4616 4616 },
4617 4617 }
4618 4618 )
4619 4619
4620 4620 if ui.debugflag:
4621 4621 openerargs['loggingopts']['logdataapis'] = True
4622 4622
4623 4623 # Don't send default headers when in raw mode. This allows us to
4624 4624 # bypass most of the behavior of our URL handling code so we can
4625 4625 # have near complete control over what's sent on the wire.
4626 4626 if opts[b'peer'] == b'raw':
4627 4627 openerargs['sendaccept'] = False
4628 4628
4629 4629 opener = urlmod.opener(ui, authinfo, **openerargs)
4630 4630
4631 4631 if opts[b'peer'] == b'raw':
4632 4632 ui.write(_(b'using raw connection to peer\n'))
4633 4633 peer = None
4634 4634 elif opts[b'peer']:
4635 4635 raise error.Abort(
4636 4636 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4637 4637 )
4638 4638 else:
4639 4639 peer_path = urlutil.try_path(ui, path)
4640 4640 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4641 4641
4642 4642 # We /could/ populate stdin/stdout with sock.makefile()...
4643 4643 else:
4644 4644 raise error.Abort(_(b'unsupported connection configuration'))
4645 4645
4646 4646 batchedcommands = None
4647 4647
4648 4648 # Now perform actions based on the parsed wire language instructions.
4649 4649 for action, lines in blocks:
4650 4650 if action in (b'raw', b'raw+'):
4651 4651 if not stdin:
4652 4652 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4653 4653
4654 4654 # Concatenate the data together.
4655 4655 data = b''.join(l.lstrip() for l in lines)
4656 4656 data = stringutil.unescapestr(data)
4657 4657 stdin.write(data)
4658 4658
4659 4659 if action == b'raw+':
4660 4660 stdin.flush()
4661 4661 elif action == b'flush':
4662 4662 if not stdin:
4663 4663 raise error.Abort(_(b'cannot call flush on this peer'))
4664 4664 stdin.flush()
4665 4665 elif action.startswith(b'command'):
4666 4666 if not peer:
4667 4667 raise error.Abort(
4668 4668 _(
4669 4669 b'cannot send commands unless peer instance '
4670 4670 b'is available'
4671 4671 )
4672 4672 )
4673 4673
4674 4674 command = action.split(b' ', 1)[1]
4675 4675
4676 4676 args = {}
4677 4677 for line in lines:
4678 4678 # We need to allow empty values.
4679 4679 fields = line.lstrip().split(b' ', 1)
4680 4680 if len(fields) == 1:
4681 4681 key = fields[0]
4682 4682 value = b''
4683 4683 else:
4684 4684 key, value = fields
4685 4685
4686 4686 if value.startswith(b'eval:'):
4687 4687 value = stringutil.evalpythonliteral(value[5:])
4688 4688 else:
4689 4689 value = stringutil.unescapestr(value)
4690 4690
4691 4691 args[key] = value
4692 4692
4693 4693 if batchedcommands is not None:
4694 4694 batchedcommands.append((command, args))
4695 4695 continue
4696 4696
4697 4697 ui.status(_(b'sending %s command\n') % command)
4698 4698
4699 4699 if b'PUSHFILE' in args:
4700 4700 with open(args[b'PUSHFILE'], 'rb') as fh:
4701 4701 del args[b'PUSHFILE']
4702 4702 res, output = peer._callpush(
4703 4703 command, fh, **pycompat.strkwargs(args)
4704 4704 )
4705 4705 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4706 4706 ui.status(
4707 4707 _(b'remote output: %s\n') % stringutil.escapestr(output)
4708 4708 )
4709 4709 else:
4710 4710 with peer.commandexecutor() as e:
4711 4711 res = e.callcommand(command, args).result()
4712 4712
4713 4713 ui.status(
4714 4714 _(b'response: %s\n')
4715 4715 % stringutil.pprint(res, bprefix=True, indent=2)
4716 4716 )
4717 4717
4718 4718 elif action == b'batchbegin':
4719 4719 if batchedcommands is not None:
4720 4720 raise error.Abort(_(b'nested batchbegin not allowed'))
4721 4721
4722 4722 batchedcommands = []
4723 4723 elif action == b'batchsubmit':
4724 4724 # There is a batching API we could go through. But it would be
4725 4725 # difficult to normalize requests into function calls. It is easier
4726 4726 # to bypass this layer and normalize to commands + args.
4727 4727 ui.status(
4728 4728 _(b'sending batch with %d sub-commands\n')
4729 4729 % len(batchedcommands)
4730 4730 )
4731 4731 assert peer is not None
4732 4732 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4733 4733 ui.status(
4734 4734 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4735 4735 )
4736 4736
4737 4737 batchedcommands = None
4738 4738
4739 4739 elif action.startswith(b'httprequest '):
4740 4740 if not opener:
4741 4741 raise error.Abort(
4742 4742 _(b'cannot use httprequest without an HTTP peer')
4743 4743 )
4744 4744
4745 4745 request = action.split(b' ', 2)
4746 4746 if len(request) != 3:
4747 4747 raise error.Abort(
4748 4748 _(
4749 4749 b'invalid httprequest: expected format is '
4750 4750 b'"httprequest <method> <path>'
4751 4751 )
4752 4752 )
4753 4753
4754 4754 method, httppath = request[1:]
4755 4755 headers = {}
4756 4756 body = None
4757 4757 frames = []
4758 4758 for line in lines:
4759 4759 line = line.lstrip()
4760 4760 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4761 4761 if m:
4762 4762 # Headers need to use native strings.
4763 4763 key = pycompat.strurl(m.group(1))
4764 4764 value = pycompat.strurl(m.group(2))
4765 4765 headers[key] = value
4766 4766 continue
4767 4767
4768 4768 if line.startswith(b'BODYFILE '):
4769 4769 with open(line.split(b' ', 1), b'rb') as fh:
4770 4770 body = fh.read()
4771 4771 elif line.startswith(b'frame '):
4772 4772 frame = wireprotoframing.makeframefromhumanstring(
4773 4773 line[len(b'frame ') :]
4774 4774 )
4775 4775
4776 4776 frames.append(frame)
4777 4777 else:
4778 4778 raise error.Abort(
4779 4779 _(b'unknown argument to httprequest: %s') % line
4780 4780 )
4781 4781
4782 4782 url = path + httppath
4783 4783
4784 4784 if frames:
4785 4785 body = b''.join(bytes(f) for f in frames)
4786 4786
4787 4787 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4788 4788
4789 4789 # urllib.Request insists on using has_data() as a proxy for
4790 4790 # determining the request method. Override that to use our
4791 4791 # explicitly requested method.
4792 4792 req.get_method = lambda: pycompat.sysstr(method)
4793 4793
4794 4794 try:
4795 4795 res = opener.open(req)
4796 4796 body = res.read()
4797 4797 except util.urlerr.urlerror as e:
4798 4798 # read() method must be called, but only exists in Python 2
4799 4799 getattr(e, 'read', lambda: None)()
4800 4800 continue
4801 4801
4802 4802 ct = res.headers.get('Content-Type')
4803 4803 if ct == 'application/mercurial-cbor':
4804 4804 ui.write(
4805 4805 _(b'cbor> %s\n')
4806 4806 % stringutil.pprint(
4807 4807 cborutil.decodeall(body), bprefix=True, indent=2
4808 4808 )
4809 4809 )
4810 4810
4811 4811 elif action == b'close':
4812 4812 assert peer is not None
4813 4813 peer.close()
4814 4814 elif action == b'readavailable':
4815 4815 if not stdout or not stderr:
4816 4816 raise error.Abort(
4817 4817 _(b'readavailable not available on this peer')
4818 4818 )
4819 4819
4820 4820 stdin.close()
4821 4821 stdout.read()
4822 4822 stderr.read()
4823 4823
4824 4824 elif action == b'readline':
4825 4825 if not stdout:
4826 4826 raise error.Abort(_(b'readline not available on this peer'))
4827 4827 stdout.readline()
4828 4828 elif action == b'ereadline':
4829 4829 if not stderr:
4830 4830 raise error.Abort(_(b'ereadline not available on this peer'))
4831 4831 stderr.readline()
4832 4832 elif action.startswith(b'read '):
4833 4833 count = int(action.split(b' ', 1)[1])
4834 4834 if not stdout:
4835 4835 raise error.Abort(_(b'read not available on this peer'))
4836 4836 stdout.read(count)
4837 4837 elif action.startswith(b'eread '):
4838 4838 count = int(action.split(b' ', 1)[1])
4839 4839 if not stderr:
4840 4840 raise error.Abort(_(b'eread not available on this peer'))
4841 4841 stderr.read(count)
4842 4842 else:
4843 4843 raise error.Abort(_(b'unknown action: %s') % action)
4844 4844
4845 4845 if batchedcommands is not None:
4846 4846 raise error.Abort(_(b'unclosed "batchbegin" request'))
4847 4847
4848 4848 if peer:
4849 4849 peer.close()
4850 4850
4851 4851 if proc:
4852 4852 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now