##// END OF EJS Templates
debugbackupbundle: migrate `opts` to native kwargs
Matt Harbison -
r51862:b28f794f default
parent child Browse files
Show More
@@ -1,4842 +1,4841 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 open,
37 37 )
38 38 from . import (
39 39 bundle2,
40 40 bundlerepo,
41 41 changegroup,
42 42 cmdutil,
43 43 color,
44 44 context,
45 45 copies,
46 46 dagparser,
47 47 dirstateutils,
48 48 encoding,
49 49 error,
50 50 exchange,
51 51 extensions,
52 52 filelog,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 manifest,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 requirements,
75 75 revlog,
76 76 revset,
77 77 revsetlang,
78 78 scmutil,
79 79 setdiscovery,
80 80 simplemerge,
81 81 sshpeer,
82 82 sslutil,
83 83 streamclone,
84 84 strip,
85 85 tags as tagsmod,
86 86 templater,
87 87 treediscovery,
88 88 upgrade,
89 89 url as urlmod,
90 90 util,
91 91 verify,
92 92 vfs as vfsmod,
93 93 wireprotoframing,
94 94 wireprotoserver,
95 95 )
96 96 from .interfaces import repository
97 97 from .stabletailgraph import stabletailsort
98 98 from .utils import (
99 99 cborutil,
100 100 compression,
101 101 dateutil,
102 102 procutil,
103 103 stringutil,
104 104 urlutil,
105 105 )
106 106
107 107 from .revlogutils import (
108 108 constants as revlog_constants,
109 109 debug as revlog_debug,
110 110 deltas as deltautil,
111 111 nodemap,
112 112 rewrite,
113 113 sidedata,
114 114 )
115 115
116 116 release = lockmod.release
117 117
118 118 table = {}
119 119 table.update(strip.command._table)
120 120 command = registrar.command(table)
121 121
122 122
123 123 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
124 124 def debugancestor(ui, repo, *args):
125 125 """find the ancestor revision of two revisions in a given index"""
126 126 if len(args) == 3:
127 127 index, rev1, rev2 = args
128 128 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
129 129 lookup = r.lookup
130 130 elif len(args) == 2:
131 131 if not repo:
132 132 raise error.Abort(
133 133 _(b'there is no Mercurial repository here (.hg not found)')
134 134 )
135 135 rev1, rev2 = args
136 136 r = repo.changelog
137 137 lookup = repo.lookup
138 138 else:
139 139 raise error.Abort(_(b'either two or three arguments required'))
140 140 a = r.ancestor(lookup(rev1), lookup(rev2))
141 141 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
142 142
143 143
144 144 @command(b'debugantivirusrunning', [])
145 145 def debugantivirusrunning(ui, repo):
146 146 """attempt to trigger an antivirus scanner to see if one is active"""
147 147 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
148 148 f.write(
149 149 util.b85decode(
150 150 # This is a base85-armored version of the EICAR test file. See
151 151 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
152 152 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
153 153 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
154 154 )
155 155 )
156 156 # Give an AV engine time to scan the file.
157 157 time.sleep(2)
158 158 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
159 159
160 160
161 161 @command(b'debugapplystreamclonebundle', [], b'FILE')
162 162 def debugapplystreamclonebundle(ui, repo, fname):
163 163 """apply a stream clone bundle file"""
164 164 f = hg.openpath(ui, fname)
165 165 gen = exchange.readbundle(ui, f, fname)
166 166 gen.apply(repo)
167 167
168 168
169 169 @command(
170 170 b'debugbuilddag',
171 171 [
172 172 (
173 173 b'm',
174 174 b'mergeable-file',
175 175 None,
176 176 _(b'add single file mergeable changes'),
177 177 ),
178 178 (
179 179 b'o',
180 180 b'overwritten-file',
181 181 None,
182 182 _(b'add single file all revs overwrite'),
183 183 ),
184 184 (b'n', b'new-file', None, _(b'add new file at each rev')),
185 185 (
186 186 b'',
187 187 b'from-existing',
188 188 None,
189 189 _(b'continue from a non-empty repository'),
190 190 ),
191 191 ],
192 192 _(b'[OPTION]... [TEXT]'),
193 193 )
194 194 def debugbuilddag(
195 195 ui,
196 196 repo,
197 197 text=None,
198 198 mergeable_file=False,
199 199 overwritten_file=False,
200 200 new_file=False,
201 201 from_existing=False,
202 202 ):
203 203 """builds a repo with a given DAG from scratch in the current empty repo
204 204
205 205 The description of the DAG is read from stdin if not given on the
206 206 command line.
207 207
208 208 Elements:
209 209
210 210 - "+n" is a linear run of n nodes based on the current default parent
211 211 - "." is a single node based on the current default parent
212 212 - "$" resets the default parent to null (implied at the start);
213 213 otherwise the default parent is always the last node created
214 214 - "<p" sets the default parent to the backref p
215 215 - "*p" is a fork at parent p, which is a backref
216 216 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
217 217 - "/p2" is a merge of the preceding node and p2
218 218 - ":tag" defines a local tag for the preceding node
219 219 - "@branch" sets the named branch for subsequent nodes
220 220 - "#...\\n" is a comment up to the end of the line
221 221
222 222 Whitespace between the above elements is ignored.
223 223
224 224 A backref is either
225 225
226 226 - a number n, which references the node curr-n, where curr is the current
227 227 node, or
228 228 - the name of a local tag you placed earlier using ":tag", or
229 229 - empty to denote the default parent.
230 230
231 231 All string valued-elements are either strictly alphanumeric, or must
232 232 be enclosed in double quotes ("..."), with "\\" as escape character.
233 233 """
234 234
235 235 if text is None:
236 236 ui.status(_(b"reading DAG from stdin\n"))
237 237 text = ui.fin.read()
238 238
239 239 cl = repo.changelog
240 240 if len(cl) > 0 and not from_existing:
241 241 raise error.Abort(_(b'repository is not empty'))
242 242
243 243 # determine number of revs in DAG
244 244 total = 0
245 245 for type, data in dagparser.parsedag(text):
246 246 if type == b'n':
247 247 total += 1
248 248
249 249 if mergeable_file:
250 250 linesperrev = 2
251 251 # make a file with k lines per rev
252 252 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
253 253 initialmergedlines.append(b"")
254 254
255 255 tags = []
256 256 progress = ui.makeprogress(
257 257 _(b'building'), unit=_(b'revisions'), total=total
258 258 )
259 259 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
260 260 at = -1
261 261 atbranch = b'default'
262 262 nodeids = []
263 263 id = 0
264 264 progress.update(id)
265 265 for type, data in dagparser.parsedag(text):
266 266 if type == b'n':
267 267 ui.note((b'node %s\n' % pycompat.bytestr(data)))
268 268 id, ps = data
269 269
270 270 files = []
271 271 filecontent = {}
272 272
273 273 p2 = None
274 274 if mergeable_file:
275 275 fn = b"mf"
276 276 p1 = repo[ps[0]]
277 277 if len(ps) > 1:
278 278 p2 = repo[ps[1]]
279 279 pa = p1.ancestor(p2)
280 280 base, local, other = [
281 281 x[fn].data() for x in (pa, p1, p2)
282 282 ]
283 283 m3 = simplemerge.Merge3Text(base, local, other)
284 284 ml = [
285 285 l.strip()
286 286 for l in simplemerge.render_minimized(m3)[0]
287 287 ]
288 288 ml.append(b"")
289 289 elif at > 0:
290 290 ml = p1[fn].data().split(b"\n")
291 291 else:
292 292 ml = initialmergedlines
293 293 ml[id * linesperrev] += b" r%i" % id
294 294 mergedtext = b"\n".join(ml)
295 295 files.append(fn)
296 296 filecontent[fn] = mergedtext
297 297
298 298 if overwritten_file:
299 299 fn = b"of"
300 300 files.append(fn)
301 301 filecontent[fn] = b"r%i\n" % id
302 302
303 303 if new_file:
304 304 fn = b"nf%i" % id
305 305 files.append(fn)
306 306 filecontent[fn] = b"r%i\n" % id
307 307 if len(ps) > 1:
308 308 if not p2:
309 309 p2 = repo[ps[1]]
310 310 for fn in p2:
311 311 if fn.startswith(b"nf"):
312 312 files.append(fn)
313 313 filecontent[fn] = p2[fn].data()
314 314
315 315 def fctxfn(repo, cx, path):
316 316 if path in filecontent:
317 317 return context.memfilectx(
318 318 repo, cx, path, filecontent[path]
319 319 )
320 320 return None
321 321
322 322 if len(ps) == 0 or ps[0] < 0:
323 323 pars = [None, None]
324 324 elif len(ps) == 1:
325 325 pars = [nodeids[ps[0]], None]
326 326 else:
327 327 pars = [nodeids[p] for p in ps]
328 328 cx = context.memctx(
329 329 repo,
330 330 pars,
331 331 b"r%i" % id,
332 332 files,
333 333 fctxfn,
334 334 date=(id, 0),
335 335 user=b"debugbuilddag",
336 336 extra={b'branch': atbranch},
337 337 )
338 338 nodeid = repo.commitctx(cx)
339 339 nodeids.append(nodeid)
340 340 at = id
341 341 elif type == b'l':
342 342 id, name = data
343 343 ui.note((b'tag %s\n' % name))
344 344 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
345 345 elif type == b'a':
346 346 ui.note((b'branch %s\n' % data))
347 347 atbranch = data
348 348 progress.update(id)
349 349
350 350 if tags:
351 351 repo.vfs.write(b"localtags", b"".join(tags))
352 352
353 353
354 354 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
355 355 indent_string = b' ' * indent
356 356 if all:
357 357 ui.writenoi18n(
358 358 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
359 359 % indent_string
360 360 )
361 361
362 362 def showchunks(named):
363 363 ui.write(b"\n%s%s\n" % (indent_string, named))
364 364 for deltadata in gen.deltaiter():
365 365 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
366 366 ui.write(
367 367 b"%s%s %s %s %s %s %d\n"
368 368 % (
369 369 indent_string,
370 370 hex(node),
371 371 hex(p1),
372 372 hex(p2),
373 373 hex(cs),
374 374 hex(deltabase),
375 375 len(delta),
376 376 )
377 377 )
378 378
379 379 gen.changelogheader()
380 380 showchunks(b"changelog")
381 381 gen.manifestheader()
382 382 showchunks(b"manifest")
383 383 for chunkdata in iter(gen.filelogheader, {}):
384 384 fname = chunkdata[b'filename']
385 385 showchunks(fname)
386 386 else:
387 387 if isinstance(gen, bundle2.unbundle20):
388 388 raise error.Abort(_(b'use debugbundle2 for this file'))
389 389 gen.changelogheader()
390 390 for deltadata in gen.deltaiter():
391 391 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
392 392 ui.write(b"%s%s\n" % (indent_string, hex(node)))
393 393
394 394
395 395 def _debugobsmarkers(ui, part, indent=0, **opts):
396 396 """display version and markers contained in 'data'"""
397 397 data = part.read()
398 398 indent_string = b' ' * indent
399 399 try:
400 400 version, markers = obsolete._readmarkers(data)
401 401 except error.UnknownVersion as exc:
402 402 msg = b"%sunsupported version: %s (%d bytes)\n"
403 403 msg %= indent_string, exc.version, len(data)
404 404 ui.write(msg)
405 405 else:
406 406 msg = b"%sversion: %d (%d bytes)\n"
407 407 msg %= indent_string, version, len(data)
408 408 ui.write(msg)
409 409 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
410 410 for rawmarker in sorted(markers):
411 411 m = obsutil.marker(None, rawmarker)
412 412 fm.startitem()
413 413 fm.plain(indent_string)
414 414 cmdutil.showmarker(fm, m)
415 415 fm.end()
416 416
417 417
418 418 def _debugphaseheads(ui, data, indent=0):
419 419 """display version and markers contained in 'data'"""
420 420 indent_string = b' ' * indent
421 421 headsbyphase = phases.binarydecode(data)
422 422 for phase in phases.allphases:
423 423 for head in headsbyphase[phase]:
424 424 ui.write(indent_string)
425 425 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
426 426
427 427
428 428 def _quasirepr(thing):
429 429 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
430 430 return b'{%s}' % (
431 431 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
432 432 )
433 433 return pycompat.bytestr(repr(thing))
434 434
435 435
436 436 def _debugbundle2(ui, gen, all=None, **opts):
437 437 """lists the contents of a bundle2"""
438 438 if not isinstance(gen, bundle2.unbundle20):
439 439 raise error.Abort(_(b'not a bundle2 file'))
440 440 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
441 441 parttypes = opts.get('part_type', [])
442 442 for part in gen.iterparts():
443 443 if parttypes and part.type not in parttypes:
444 444 continue
445 445 msg = b'%s -- %s (mandatory: %r)\n'
446 446 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
447 447 if part.type == b'changegroup':
448 448 version = part.params.get(b'version', b'01')
449 449 cg = changegroup.getunbundler(version, part, b'UN')
450 450 if not ui.quiet:
451 451 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
452 452 if part.type == b'obsmarkers':
453 453 if not ui.quiet:
454 454 _debugobsmarkers(ui, part, indent=4, **opts)
455 455 if part.type == b'phase-heads':
456 456 if not ui.quiet:
457 457 _debugphaseheads(ui, part, indent=4)
458 458
459 459
460 460 @command(
461 461 b'debugbundle',
462 462 [
463 463 (b'a', b'all', None, _(b'show all details')),
464 464 (b'', b'part-type', [], _(b'show only the named part type')),
465 465 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
466 466 ],
467 467 _(b'FILE'),
468 468 norepo=True,
469 469 )
470 470 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
471 471 """lists the contents of a bundle"""
472 472 with hg.openpath(ui, bundlepath) as f:
473 473 if spec:
474 474 spec = exchange.getbundlespec(ui, f)
475 475 ui.write(b'%s\n' % spec)
476 476 return
477 477
478 478 gen = exchange.readbundle(ui, f, bundlepath)
479 479 if isinstance(gen, bundle2.unbundle20):
480 480 return _debugbundle2(ui, gen, all=all, **opts)
481 481 _debugchangegroup(ui, gen, all=all, **opts)
482 482
483 483
484 484 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
485 485 def debugcapabilities(ui, path, **opts):
486 486 """lists the capabilities of a remote peer"""
487 487 peer = hg.peer(ui, pycompat.byteskwargs(opts), path)
488 488 try:
489 489 caps = peer.capabilities()
490 490 ui.writenoi18n(b'Main capabilities:\n')
491 491 for c in sorted(caps):
492 492 ui.write(b' %s\n' % c)
493 493 b2caps = bundle2.bundle2caps(peer)
494 494 if b2caps:
495 495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 496 for key, values in sorted(b2caps.items()):
497 497 ui.write(b' %s\n' % key)
498 498 for v in values:
499 499 ui.write(b' %s\n' % v)
500 500 finally:
501 501 peer.close()
502 502
503 503
504 504 @command(
505 505 b'debugchangedfiles',
506 506 [
507 507 (
508 508 b'',
509 509 b'compute',
510 510 False,
511 511 b"compute information instead of reading it from storage",
512 512 ),
513 513 ],
514 514 b'REV',
515 515 )
516 516 def debugchangedfiles(ui, repo, rev, **opts):
517 517 """list the stored files changes for a revision"""
518 518 ctx = logcmdutil.revsingle(repo, rev, None)
519 519 files = None
520 520
521 521 if opts['compute']:
522 522 files = metadata.compute_all_files_changes(ctx)
523 523 else:
524 524 sd = repo.changelog.sidedata(ctx.rev())
525 525 files_block = sd.get(sidedata.SD_FILES)
526 526 if files_block is not None:
527 527 files = metadata.decode_files_sidedata(sd)
528 528 if files is not None:
529 529 for f in sorted(files.touched):
530 530 if f in files.added:
531 531 action = b"added"
532 532 elif f in files.removed:
533 533 action = b"removed"
534 534 elif f in files.merged:
535 535 action = b"merged"
536 536 elif f in files.salvaged:
537 537 action = b"salvaged"
538 538 else:
539 539 action = b"touched"
540 540
541 541 copy_parent = b""
542 542 copy_source = b""
543 543 if f in files.copied_from_p1:
544 544 copy_parent = b"p1"
545 545 copy_source = files.copied_from_p1[f]
546 546 elif f in files.copied_from_p2:
547 547 copy_parent = b"p2"
548 548 copy_source = files.copied_from_p2[f]
549 549
550 550 data = (action, copy_parent, f, copy_source)
551 551 template = b"%-8s %2s: %s, %s;\n"
552 552 ui.write(template % data)
553 553
554 554
555 555 @command(b'debugcheckstate', [], b'')
556 556 def debugcheckstate(ui, repo):
557 557 """validate the correctness of the current dirstate"""
558 558 errors = verify.verifier(repo)._verify_dirstate()
559 559 if errors:
560 560 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 561 raise error.Abort(errstr)
562 562
563 563
564 564 @command(
565 565 b'debugcolor',
566 566 [(b'', b'style', None, _(b'show all configured styles'))],
567 567 b'hg debugcolor',
568 568 )
569 569 def debugcolor(ui, repo, **opts):
570 570 """show available color, effects or style"""
571 571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 572 if opts.get('style'):
573 573 return _debugdisplaystyle(ui)
574 574 else:
575 575 return _debugdisplaycolor(ui)
576 576
577 577
578 578 def _debugdisplaycolor(ui):
579 579 ui = ui.copy()
580 580 ui._styles.clear()
581 581 for effect in color._activeeffects(ui).keys():
582 582 ui._styles[effect] = effect
583 583 if ui._terminfoparams:
584 584 for k, v in ui.configitems(b'color'):
585 585 if k.startswith(b'color.'):
586 586 ui._styles[k] = k[6:]
587 587 elif k.startswith(b'terminfo.'):
588 588 ui._styles[k] = k[9:]
589 589 ui.write(_(b'available colors:\n'))
590 590 # sort label with a '_' after the other to group '_background' entry.
591 591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 592 for colorname, label in items:
593 593 ui.write(b'%s\n' % colorname, label=label)
594 594
595 595
596 596 def _debugdisplaystyle(ui):
597 597 ui.write(_(b'available style:\n'))
598 598 if not ui._styles:
599 599 return
600 600 width = max(len(s) for s in ui._styles)
601 601 for label, effects in sorted(ui._styles.items()):
602 602 ui.write(b'%s' % label, label=label)
603 603 if effects:
604 604 # 50
605 605 ui.write(b': ')
606 606 ui.write(b' ' * (max(0, width - len(label))))
607 607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 608 ui.write(b'\n')
609 609
610 610
611 611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 612 def debugcreatestreamclonebundle(ui, repo, fname):
613 613 """create a stream clone bundle file
614 614
615 615 Stream bundles are special bundles that are essentially archives of
616 616 revlog files. They are commonly used for cloning very quickly.
617 617
618 618 This command creates a "version 1" stream clone, which is deprecated in
619 619 favor of newer versions of the stream protocol. Bundles using such newer
620 620 versions can be generated using the `hg bundle` command.
621 621 """
622 622 # TODO we may want to turn this into an abort when this functionality
623 623 # is moved into `hg bundle`.
624 624 if phases.hassecret(repo):
625 625 ui.warn(
626 626 _(
627 627 b'(warning: stream clone bundle will contain secret '
628 628 b'revisions)\n'
629 629 )
630 630 )
631 631
632 632 requirements, gen = streamclone.generatebundlev1(repo)
633 633 changegroup.writechunks(ui, gen, fname)
634 634
635 635 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
636 636
637 637
638 638 @command(
639 639 b'debugdag',
640 640 [
641 641 (b't', b'tags', None, _(b'use tags as labels')),
642 642 (b'b', b'branches', None, _(b'annotate with branch names')),
643 643 (b'', b'dots', None, _(b'use dots for runs')),
644 644 (b's', b'spaces', None, _(b'separate elements by spaces')),
645 645 ],
646 646 _(b'[OPTION]... [FILE [REV]...]'),
647 647 optionalrepo=True,
648 648 )
649 649 def debugdag(ui, repo, file_=None, *revs, **opts):
650 650 """format the changelog or an index DAG as a concise textual description
651 651
652 652 If you pass a revlog index, the revlog's DAG is emitted. If you list
653 653 revision numbers, they get labeled in the output as rN.
654 654
655 655 Otherwise, the changelog DAG of the current repo is emitted.
656 656 """
657 657 spaces = opts.get('spaces')
658 658 dots = opts.get('dots')
659 659 if file_:
660 660 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
661 661 revs = {int(r) for r in revs}
662 662
663 663 def events():
664 664 for r in rlog:
665 665 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
666 666 if r in revs:
667 667 yield b'l', (r, b"r%i" % r)
668 668
669 669 elif repo:
670 670 cl = repo.changelog
671 671 tags = opts.get('tags')
672 672 branches = opts.get('branches')
673 673 if tags:
674 674 labels = {}
675 675 for l, n in repo.tags().items():
676 676 labels.setdefault(cl.rev(n), []).append(l)
677 677
678 678 def events():
679 679 b = b"default"
680 680 for r in cl:
681 681 if branches:
682 682 newb = cl.read(cl.node(r))[5][b'branch']
683 683 if newb != b:
684 684 yield b'a', newb
685 685 b = newb
686 686 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
687 687 if tags:
688 688 ls = labels.get(r)
689 689 if ls:
690 690 for l in ls:
691 691 yield b'l', (r, l)
692 692
693 693 else:
694 694 raise error.Abort(_(b'need repo for changelog dag'))
695 695
696 696 for line in dagparser.dagtextlines(
697 697 events(),
698 698 addspaces=spaces,
699 699 wraplabels=True,
700 700 wrapannotations=True,
701 701 wrapnonlinear=dots,
702 702 usedots=dots,
703 703 maxlinewidth=70,
704 704 ):
705 705 ui.write(line)
706 706 ui.write(b"\n")
707 707
708 708
709 709 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
710 710 def debugdata(ui, repo, file_, rev=None, **opts):
711 711 """dump the contents of a data file revision"""
712 712 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
713 713 if rev is not None:
714 714 raise error.InputError(
715 715 _(b'cannot specify a revision with other arguments')
716 716 )
717 717 file_, rev = None, file_
718 718 elif rev is None:
719 719 raise error.InputError(_(b'please specify a revision'))
720 720 r = cmdutil.openstorage(
721 721 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
722 722 )
723 723 try:
724 724 ui.write(r.rawdata(r.lookup(rev)))
725 725 except KeyError:
726 726 raise error.Abort(_(b'invalid revision identifier %s') % rev)
727 727
728 728
729 729 @command(
730 730 b'debugdate',
731 731 [(b'e', b'extended', None, _(b'try extended date formats'))],
732 732 _(b'[-e] DATE [RANGE]'),
733 733 norepo=True,
734 734 optionalrepo=True,
735 735 )
736 736 def debugdate(ui, date, range=None, **opts):
737 737 """parse and display a date"""
738 738 if opts["extended"]:
739 739 d = dateutil.parsedate(date, dateutil.extendeddateformats)
740 740 else:
741 741 d = dateutil.parsedate(date)
742 742 ui.writenoi18n(b"internal: %d %d\n" % d)
743 743 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
744 744 if range:
745 745 m = dateutil.matchdate(range)
746 746 ui.writenoi18n(b"match: %s\n" % m(d[0]))
747 747
748 748
749 749 @command(
750 750 b'debugdeltachain',
751 751 cmdutil.debugrevlogopts + cmdutil.formatteropts,
752 752 _(b'-c|-m|FILE'),
753 753 optionalrepo=True,
754 754 )
755 755 def debugdeltachain(ui, repo, file_=None, **opts):
756 756 """dump information about delta chains in a revlog
757 757
758 758 Output can be templatized. Available template keywords are:
759 759
760 760 :``rev``: revision number
761 761 :``p1``: parent 1 revision number (for reference)
762 762 :``p2``: parent 2 revision number (for reference)
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 - base: a full snapshot
768 768 - snap: an intermediate snapshot
769 769 - p1: a delta against the first parent
770 770 - p2: a delta against the second parent
771 771 - skip1: a delta against the same base as p1
772 772 (when p1 has empty delta
773 773 - skip2: a delta against the same base as p2
774 774 (when p2 has empty delta
775 775 - prev: a delta against the previous revision
776 776 - other: a delta against an arbitrary revision
777 777 :``compsize``: compressed size of revision
778 778 :``uncompsize``: uncompressed size of revision
779 779 :``chainsize``: total size of compressed revisions in chain
780 780 :``chainratio``: total chain size divided by uncompressed revision size
781 781 (new delta chains typically start at ratio 2.00)
782 782 :``lindist``: linear distance from base revision in delta chain to end
783 783 of this revision
784 784 :``extradist``: total size of revisions not part of this delta chain from
785 785 base of delta chain to end of this revision; a measurement
786 786 of how much extra data we need to read/seek across to read
787 787 the delta chain for this revision
788 788 :``extraratio``: extradist divided by chainsize; another representation of
789 789 how much unrelated data is needed to load this delta chain
790 790
791 791 If the repository is configured to use the sparse read, additional keywords
792 792 are available:
793 793
794 794 :``readsize``: total size of data read from the disk for a revision
795 795 (sum of the sizes of all the blocks)
796 796 :``largestblock``: size of the largest block of data read from the disk
797 797 :``readdensity``: density of useful bytes in the data read from the disk
798 798 :``srchunks``: in how many data hunks the whole revision would be read
799 799
800 800 The sparse read can be enabled with experimental.sparse-read = True
801 801 """
802 802 r = cmdutil.openrevlog(
803 803 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
804 804 )
805 805 index = r.index
806 806 start = r.start
807 807 length = r.length
808 808 generaldelta = r._generaldelta
809 809 withsparseread = getattr(r, '_withsparseread', False)
810 810
811 811 # security to avoid crash on corrupted revlogs
812 812 total_revs = len(index)
813 813
814 814 chain_size_cache = {}
815 815
816 816 def revinfo(rev):
817 817 e = index[rev]
818 818 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
819 819 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
820 820
821 821 base = e[revlog_constants.ENTRY_DELTA_BASE]
822 822 p1 = e[revlog_constants.ENTRY_PARENT_1]
823 823 p2 = e[revlog_constants.ENTRY_PARENT_2]
824 824
825 825 # If the parents of a revision has an empty delta, we never try to delta
826 826 # against that parent, but directly against the delta base of that
827 827 # parent (recursively). It avoids adding a useless entry in the chain.
828 828 #
829 829 # However we need to detect that as a special case for delta-type, that
830 830 # is not simply "other".
831 831 p1_base = p1
832 832 if p1 != nullrev and p1 < total_revs:
833 833 e1 = index[p1]
834 834 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
835 835 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
836 836 if (
837 837 new_base == p1_base
838 838 or new_base == nullrev
839 839 or new_base >= total_revs
840 840 ):
841 841 break
842 842 p1_base = new_base
843 843 e1 = index[p1_base]
844 844 p2_base = p2
845 845 if p2 != nullrev and p2 < total_revs:
846 846 e2 = index[p2]
847 847 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
848 848 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
849 849 if (
850 850 new_base == p2_base
851 851 or new_base == nullrev
852 852 or new_base >= total_revs
853 853 ):
854 854 break
855 855 p2_base = new_base
856 856 e2 = index[p2_base]
857 857
858 858 if generaldelta:
859 859 if base == p1:
860 860 deltatype = b'p1'
861 861 elif base == p2:
862 862 deltatype = b'p2'
863 863 elif base == rev:
864 864 deltatype = b'base'
865 865 elif base == p1_base:
866 866 deltatype = b'skip1'
867 867 elif base == p2_base:
868 868 deltatype = b'skip2'
869 869 elif r.issnapshot(rev):
870 870 deltatype = b'snap'
871 871 elif base == rev - 1:
872 872 deltatype = b'prev'
873 873 else:
874 874 deltatype = b'other'
875 875 else:
876 876 if base == rev:
877 877 deltatype = b'base'
878 878 else:
879 879 deltatype = b'prev'
880 880
881 881 chain = r._deltachain(rev)[0]
882 882 chain_size = 0
883 883 for iter_rev in reversed(chain):
884 884 cached = chain_size_cache.get(iter_rev)
885 885 if cached is not None:
886 886 chain_size += cached
887 887 break
888 888 e = index[iter_rev]
889 889 chain_size += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
890 890 chain_size_cache[rev] = chain_size
891 891
892 892 return p1, p2, compsize, uncompsize, deltatype, chain, chain_size
893 893
894 894 fm = ui.formatter(b'debugdeltachain', pycompat.byteskwargs(opts))
895 895
896 896 fm.plain(
897 897 b' rev p1 p2 chain# chainlen prev delta '
898 898 b'size rawsize chainsize ratio lindist extradist '
899 899 b'extraratio'
900 900 )
901 901 if withsparseread:
902 902 fm.plain(b' readsize largestblk rddensity srchunks')
903 903 fm.plain(b'\n')
904 904
905 905 chainbases = {}
906 906 for rev in r:
907 907 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
908 908 chainbase = chain[0]
909 909 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
910 910 basestart = start(chainbase)
911 911 revstart = start(rev)
912 912 lineardist = revstart + comp - basestart
913 913 extradist = lineardist - chainsize
914 914 try:
915 915 prevrev = chain[-2]
916 916 except IndexError:
917 917 prevrev = -1
918 918
919 919 if uncomp != 0:
920 920 chainratio = float(chainsize) / float(uncomp)
921 921 else:
922 922 chainratio = chainsize
923 923
924 924 if chainsize != 0:
925 925 extraratio = float(extradist) / float(chainsize)
926 926 else:
927 927 extraratio = extradist
928 928
929 929 fm.startitem()
930 930 fm.write(
931 931 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
932 932 b'uncompsize chainsize chainratio lindist extradist '
933 933 b'extraratio',
934 934 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
935 935 rev,
936 936 p1,
937 937 p2,
938 938 chainid,
939 939 len(chain),
940 940 prevrev,
941 941 deltatype,
942 942 comp,
943 943 uncomp,
944 944 chainsize,
945 945 chainratio,
946 946 lineardist,
947 947 extradist,
948 948 extraratio,
949 949 rev=rev,
950 950 chainid=chainid,
951 951 chainlen=len(chain),
952 952 prevrev=prevrev,
953 953 deltatype=deltatype,
954 954 compsize=comp,
955 955 uncompsize=uncomp,
956 956 chainsize=chainsize,
957 957 chainratio=chainratio,
958 958 lindist=lineardist,
959 959 extradist=extradist,
960 960 extraratio=extraratio,
961 961 )
962 962 if withsparseread:
963 963 readsize = 0
964 964 largestblock = 0
965 965 srchunks = 0
966 966
967 967 for revschunk in deltautil.slicechunk(r, chain):
968 968 srchunks += 1
969 969 blkend = start(revschunk[-1]) + length(revschunk[-1])
970 970 blksize = blkend - start(revschunk[0])
971 971
972 972 readsize += blksize
973 973 if largestblock < blksize:
974 974 largestblock = blksize
975 975
976 976 if readsize:
977 977 readdensity = float(chainsize) / float(readsize)
978 978 else:
979 979 readdensity = 1
980 980
981 981 fm.write(
982 982 b'readsize largestblock readdensity srchunks',
983 983 b' %10d %10d %9.5f %8d',
984 984 readsize,
985 985 largestblock,
986 986 readdensity,
987 987 srchunks,
988 988 readsize=readsize,
989 989 largestblock=largestblock,
990 990 readdensity=readdensity,
991 991 srchunks=srchunks,
992 992 )
993 993
994 994 fm.plain(b'\n')
995 995
996 996 fm.end()
997 997
998 998
999 999 @command(
1000 1000 b'debug-delta-find',
1001 1001 cmdutil.debugrevlogopts
1002 1002 + cmdutil.formatteropts
1003 1003 + [
1004 1004 (
1005 1005 b'',
1006 1006 b'source',
1007 1007 b'full',
1008 1008 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
1009 1009 ),
1010 1010 ],
1011 1011 _(b'-c|-m|FILE REV'),
1012 1012 optionalrepo=True,
1013 1013 )
1014 1014 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1015 1015 """display the computation to get to a valid delta for storing REV
1016 1016
1017 1017 This command will replay the process used to find the "best" delta to store
1018 1018 a revision and display information about all the steps used to get to that
1019 1019 result.
1020 1020
1021 1021 By default, the process is fed with a the full-text for the revision. This
1022 1022 can be controlled with the --source flag.
1023 1023
1024 1024 The revision use the revision number of the target storage (not changelog
1025 1025 revision number).
1026 1026
1027 1027 note: the process is initiated from a full text of the revision to store.
1028 1028 """
1029 1029 if arg_2 is None:
1030 1030 file_ = None
1031 1031 rev = arg_1
1032 1032 else:
1033 1033 file_ = arg_1
1034 1034 rev = arg_2
1035 1035
1036 1036 rev = int(rev)
1037 1037
1038 1038 revlog = cmdutil.openrevlog(
1039 1039 repo, b'debugdeltachain', file_, pycompat.byteskwargs(opts)
1040 1040 )
1041 1041 p1r, p2r = revlog.parentrevs(rev)
1042 1042
1043 1043 if source == b'full':
1044 1044 base_rev = nullrev
1045 1045 elif source == b'storage':
1046 1046 base_rev = revlog.deltaparent(rev)
1047 1047 elif source == b'p1':
1048 1048 base_rev = p1r
1049 1049 elif source == b'p2':
1050 1050 base_rev = p2r
1051 1051 elif source == b'prev':
1052 1052 base_rev = rev - 1
1053 1053 else:
1054 1054 raise error.InputError(b"invalid --source value: %s" % source)
1055 1055
1056 1056 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1057 1057
1058 1058
1059 1059 @command(
1060 1060 b'debugdirstate|debugstate',
1061 1061 [
1062 1062 (
1063 1063 b'',
1064 1064 b'nodates',
1065 1065 None,
1066 1066 _(b'do not display the saved mtime (DEPRECATED)'),
1067 1067 ),
1068 1068 (b'', b'dates', True, _(b'display the saved mtime')),
1069 1069 (b'', b'datesort', None, _(b'sort by saved mtime')),
1070 1070 (
1071 1071 b'',
1072 1072 b'docket',
1073 1073 False,
1074 1074 _(b'display the docket (metadata file) instead'),
1075 1075 ),
1076 1076 (
1077 1077 b'',
1078 1078 b'all',
1079 1079 False,
1080 1080 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1081 1081 ),
1082 1082 ],
1083 1083 _(b'[OPTION]...'),
1084 1084 )
1085 1085 def debugstate(ui, repo, **opts):
1086 1086 """show the contents of the current dirstate"""
1087 1087
1088 1088 if opts.get("docket"):
1089 1089 if not repo.dirstate._use_dirstate_v2:
1090 1090 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1091 1091
1092 1092 docket = repo.dirstate._map.docket
1093 1093 (
1094 1094 start_offset,
1095 1095 root_nodes,
1096 1096 nodes_with_entry,
1097 1097 nodes_with_copy,
1098 1098 unused_bytes,
1099 1099 _unused,
1100 1100 ignore_pattern,
1101 1101 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1102 1102
1103 1103 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1104 1104 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1105 1105 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1106 1106 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1107 1107 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1108 1108 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1109 1109 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1110 1110 ui.write(
1111 1111 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1112 1112 )
1113 1113 return
1114 1114
1115 1115 nodates = not opts['dates']
1116 1116 if opts.get('nodates') is not None:
1117 1117 nodates = True
1118 1118 datesort = opts.get('datesort')
1119 1119
1120 1120 if datesort:
1121 1121
1122 1122 def keyfunc(entry):
1123 1123 filename, _state, _mode, _size, mtime = entry
1124 1124 return (mtime, filename)
1125 1125
1126 1126 else:
1127 1127 keyfunc = None # sort by filename
1128 1128 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1129 1129 entries.sort(key=keyfunc)
1130 1130 for entry in entries:
1131 1131 filename, state, mode, size, mtime = entry
1132 1132 if mtime == -1:
1133 1133 timestr = b'unset '
1134 1134 elif nodates:
1135 1135 timestr = b'set '
1136 1136 else:
1137 1137 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1138 1138 timestr = encoding.strtolocal(timestr)
1139 1139 if mode & 0o20000:
1140 1140 mode = b'lnk'
1141 1141 else:
1142 1142 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1143 1143 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1144 1144 for f in repo.dirstate.copies():
1145 1145 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1146 1146
1147 1147
1148 1148 @command(
1149 1149 b'debugdirstateignorepatternshash',
1150 1150 [],
1151 1151 _(b''),
1152 1152 )
1153 1153 def debugdirstateignorepatternshash(ui, repo, **opts):
1154 1154 """show the hash of ignore patterns stored in dirstate if v2,
1155 1155 or nothing for dirstate-v2
1156 1156 """
1157 1157 if repo.dirstate._use_dirstate_v2:
1158 1158 docket = repo.dirstate._map.docket
1159 1159 hash_len = 20 # 160 bits for SHA-1
1160 1160 hash_bytes = docket.tree_metadata[-hash_len:]
1161 1161 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1162 1162
1163 1163
1164 1164 @command(
1165 1165 b'debugdiscovery',
1166 1166 [
1167 1167 (b'', b'old', None, _(b'use old-style discovery')),
1168 1168 (
1169 1169 b'',
1170 1170 b'nonheads',
1171 1171 None,
1172 1172 _(b'use old-style discovery with non-heads included'),
1173 1173 ),
1174 1174 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1175 1175 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1176 1176 (
1177 1177 b'',
1178 1178 b'local-as-revs',
1179 1179 b"",
1180 1180 b'treat local has having these revisions only',
1181 1181 ),
1182 1182 (
1183 1183 b'',
1184 1184 b'remote-as-revs',
1185 1185 b"",
1186 1186 b'use local as remote, with only these revisions',
1187 1187 ),
1188 1188 ]
1189 1189 + cmdutil.remoteopts
1190 1190 + cmdutil.formatteropts,
1191 1191 _(b'[--rev REV] [OTHER]'),
1192 1192 )
1193 1193 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1194 1194 """runs the changeset discovery protocol in isolation
1195 1195
1196 1196 The local peer can be "replaced" by a subset of the local repository by
1197 1197 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1198 1198 can be "replaced" by a subset of the local repository using the
1199 1199 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1200 1200 discovery situations.
1201 1201
1202 1202 The following developer oriented config are relevant for people playing with this command:
1203 1203
1204 1204 * devel.discovery.exchange-heads=True
1205 1205
1206 1206 If False, the discovery will not start with
1207 1207 remote head fetching and local head querying.
1208 1208
1209 1209 * devel.discovery.grow-sample=True
1210 1210
1211 1211 If False, the sample size used in set discovery will not be increased
1212 1212 through the process
1213 1213
1214 1214 * devel.discovery.grow-sample.dynamic=True
1215 1215
1216 1216 When discovery.grow-sample.dynamic is True, the default, the sample size is
1217 1217 adapted to the shape of the undecided set (it is set to the max of:
1218 1218 <target-size>, len(roots(undecided)), len(heads(undecided)
1219 1219
1220 1220 * devel.discovery.grow-sample.rate=1.05
1221 1221
1222 1222 the rate at which the sample grow
1223 1223
1224 1224 * devel.discovery.randomize=True
1225 1225
1226 1226 If andom sampling during discovery are deterministic. It is meant for
1227 1227 integration tests.
1228 1228
1229 1229 * devel.discovery.sample-size=200
1230 1230
1231 1231 Control the initial size of the discovery sample
1232 1232
1233 1233 * devel.discovery.sample-size.initial=100
1234 1234
1235 1235 Control the initial size of the discovery for initial change
1236 1236 """
1237 1237 unfi = repo.unfiltered()
1238 1238
1239 1239 # setup potential extra filtering
1240 1240 local_revs = opts["local_as_revs"]
1241 1241 remote_revs = opts["remote_as_revs"]
1242 1242
1243 1243 # make sure tests are repeatable
1244 1244 random.seed(int(opts['seed']))
1245 1245
1246 1246 if not remote_revs:
1247 1247 path = urlutil.get_unique_pull_path_obj(
1248 1248 b'debugdiscovery', ui, remoteurl
1249 1249 )
1250 1250 branches = (path.branch, [])
1251 1251 remote = hg.peer(repo, pycompat.byteskwargs(opts), path)
1252 1252 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1253 1253 else:
1254 1254 branches = (None, [])
1255 1255 remote_filtered_revs = logcmdutil.revrange(
1256 1256 unfi, [b"not (::(%s))" % remote_revs]
1257 1257 )
1258 1258 remote_filtered_revs = frozenset(remote_filtered_revs)
1259 1259
1260 1260 def remote_func(x):
1261 1261 return remote_filtered_revs
1262 1262
1263 1263 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1264 1264
1265 1265 remote = repo.peer()
1266 1266 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1267 1267
1268 1268 if local_revs:
1269 1269 local_filtered_revs = logcmdutil.revrange(
1270 1270 unfi, [b"not (::(%s))" % local_revs]
1271 1271 )
1272 1272 local_filtered_revs = frozenset(local_filtered_revs)
1273 1273
1274 1274 def local_func(x):
1275 1275 return local_filtered_revs
1276 1276
1277 1277 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1278 1278 repo = repo.filtered(b'debug-discovery-local-filter')
1279 1279
1280 1280 data = {}
1281 1281 if opts.get('old'):
1282 1282
1283 1283 def doit(pushedrevs, remoteheads, remote=remote):
1284 1284 if not hasattr(remote, 'branches'):
1285 1285 # enable in-client legacy support
1286 1286 remote = localrepo.locallegacypeer(remote.local())
1287 1287 if remote_revs:
1288 1288 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1289 1289 remote._repo = r
1290 1290 common, _in, hds = treediscovery.findcommonincoming(
1291 1291 repo, remote, force=True, audit=data
1292 1292 )
1293 1293 common = set(common)
1294 1294 if not opts.get('nonheads'):
1295 1295 ui.writenoi18n(
1296 1296 b"unpruned common: %s\n"
1297 1297 % b" ".join(sorted(short(n) for n in common))
1298 1298 )
1299 1299
1300 1300 clnode = repo.changelog.node
1301 1301 common = repo.revs(b'heads(::%ln)', common)
1302 1302 common = {clnode(r) for r in common}
1303 1303 return common, hds
1304 1304
1305 1305 else:
1306 1306
1307 1307 def doit(pushedrevs, remoteheads, remote=remote):
1308 1308 nodes = None
1309 1309 if pushedrevs:
1310 1310 revs = logcmdutil.revrange(repo, pushedrevs)
1311 1311 nodes = [repo[r].node() for r in revs]
1312 1312 common, any, hds = setdiscovery.findcommonheads(
1313 1313 ui,
1314 1314 repo,
1315 1315 remote,
1316 1316 ancestorsof=nodes,
1317 1317 audit=data,
1318 1318 abortwhenunrelated=False,
1319 1319 )
1320 1320 return common, hds
1321 1321
1322 1322 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1323 1323 localrevs = opts['rev']
1324 1324
1325 1325 fm = ui.formatter(b'debugdiscovery', pycompat.byteskwargs(opts))
1326 1326 if fm.strict_format:
1327 1327
1328 1328 @contextlib.contextmanager
1329 1329 def may_capture_output():
1330 1330 ui.pushbuffer()
1331 1331 yield
1332 1332 data[b'output'] = ui.popbuffer()
1333 1333
1334 1334 else:
1335 1335 may_capture_output = util.nullcontextmanager
1336 1336 with may_capture_output():
1337 1337 with util.timedcm('debug-discovery') as t:
1338 1338 common, hds = doit(localrevs, remoterevs)
1339 1339
1340 1340 # compute all statistics
1341 1341 if len(common) == 1 and repo.nullid in common:
1342 1342 common = set()
1343 1343 heads_common = set(common)
1344 1344 heads_remote = set(hds)
1345 1345 heads_local = set(repo.heads())
1346 1346 # note: they cannot be a local or remote head that is in common and not
1347 1347 # itself a head of common.
1348 1348 heads_common_local = heads_common & heads_local
1349 1349 heads_common_remote = heads_common & heads_remote
1350 1350 heads_common_both = heads_common & heads_remote & heads_local
1351 1351
1352 1352 all = repo.revs(b'all()')
1353 1353 common = repo.revs(b'::%ln', common)
1354 1354 roots_common = repo.revs(b'roots(::%ld)', common)
1355 1355 missing = repo.revs(b'not ::%ld', common)
1356 1356 heads_missing = repo.revs(b'heads(%ld)', missing)
1357 1357 roots_missing = repo.revs(b'roots(%ld)', missing)
1358 1358 assert len(common) + len(missing) == len(all)
1359 1359
1360 1360 initial_undecided = repo.revs(
1361 1361 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1362 1362 )
1363 1363 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1364 1364 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1365 1365 common_initial_undecided = initial_undecided & common
1366 1366 missing_initial_undecided = initial_undecided & missing
1367 1367
1368 1368 data[b'elapsed'] = t.elapsed
1369 1369 data[b'nb-common-heads'] = len(heads_common)
1370 1370 data[b'nb-common-heads-local'] = len(heads_common_local)
1371 1371 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1372 1372 data[b'nb-common-heads-both'] = len(heads_common_both)
1373 1373 data[b'nb-common-roots'] = len(roots_common)
1374 1374 data[b'nb-head-local'] = len(heads_local)
1375 1375 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1376 1376 data[b'nb-head-remote'] = len(heads_remote)
1377 1377 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1378 1378 heads_common_remote
1379 1379 )
1380 1380 data[b'nb-revs'] = len(all)
1381 1381 data[b'nb-revs-common'] = len(common)
1382 1382 data[b'nb-revs-missing'] = len(missing)
1383 1383 data[b'nb-missing-heads'] = len(heads_missing)
1384 1384 data[b'nb-missing-roots'] = len(roots_missing)
1385 1385 data[b'nb-ini_und'] = len(initial_undecided)
1386 1386 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1387 1387 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1388 1388 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1389 1389 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1390 1390
1391 1391 fm.startitem()
1392 1392 fm.data(**pycompat.strkwargs(data))
1393 1393 # display discovery summary
1394 1394 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1395 1395 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1396 1396 if b'total-round-trips-heads' in data:
1397 1397 fm.plain(
1398 1398 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1399 1399 )
1400 1400 if b'total-round-trips-branches' in data:
1401 1401 fm.plain(
1402 1402 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1403 1403 % data
1404 1404 )
1405 1405 if b'total-round-trips-between' in data:
1406 1406 fm.plain(
1407 1407 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1408 1408 )
1409 1409 fm.plain(b"queries: %(total-queries)9d\n" % data)
1410 1410 if b'total-queries-branches' in data:
1411 1411 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1412 1412 if b'total-queries-between' in data:
1413 1413 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1414 1414 fm.plain(b"heads summary:\n")
1415 1415 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1416 1416 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1417 1417 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1418 1418 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1419 1419 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1420 1420 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1421 1421 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1422 1422 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1423 1423 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1424 1424 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1425 1425 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1426 1426 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1427 1427 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1428 1428 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1429 1429 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1430 1430 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1431 1431 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1432 1432 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1433 1433 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1434 1434 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1435 1435 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1436 1436 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1437 1437
1438 1438 if ui.verbose:
1439 1439 fm.plain(
1440 1440 b"common heads: %s\n"
1441 1441 % b" ".join(sorted(short(n) for n in heads_common))
1442 1442 )
1443 1443 fm.end()
1444 1444
1445 1445
1446 1446 _chunksize = 4 << 10
1447 1447
1448 1448
1449 1449 @command(
1450 1450 b'debugdownload',
1451 1451 [
1452 1452 (b'o', b'output', b'', _(b'path')),
1453 1453 ],
1454 1454 optionalrepo=True,
1455 1455 )
1456 1456 def debugdownload(ui, repo, url, output=None, **opts):
1457 1457 """download a resource using Mercurial logic and config"""
1458 1458 fh = urlmod.open(ui, url, output)
1459 1459
1460 1460 dest = ui
1461 1461 if output:
1462 1462 dest = open(output, b"wb", _chunksize)
1463 1463 try:
1464 1464 data = fh.read(_chunksize)
1465 1465 while data:
1466 1466 dest.write(data)
1467 1467 data = fh.read(_chunksize)
1468 1468 finally:
1469 1469 if output:
1470 1470 dest.close()
1471 1471
1472 1472
1473 1473 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1474 1474 def debugextensions(ui, repo, **opts):
1475 1475 '''show information about active extensions'''
1476 1476 exts = extensions.extensions(ui)
1477 1477 hgver = util.version()
1478 1478 fm = ui.formatter(b'debugextensions', pycompat.byteskwargs(opts))
1479 1479 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1480 1480 isinternal = extensions.ismoduleinternal(extmod)
1481 1481 extsource = None
1482 1482
1483 1483 if hasattr(extmod, '__file__'):
1484 1484 extsource = pycompat.fsencode(extmod.__file__)
1485 1485 elif getattr(sys, 'oxidized', False):
1486 1486 extsource = pycompat.sysexecutable
1487 1487 if isinternal:
1488 1488 exttestedwith = [] # never expose magic string to users
1489 1489 else:
1490 1490 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1491 1491 extbuglink = getattr(extmod, 'buglink', None)
1492 1492
1493 1493 fm.startitem()
1494 1494
1495 1495 if ui.quiet or ui.verbose:
1496 1496 fm.write(b'name', b'%s\n', extname)
1497 1497 else:
1498 1498 fm.write(b'name', b'%s', extname)
1499 1499 if isinternal or hgver in exttestedwith:
1500 1500 fm.plain(b'\n')
1501 1501 elif not exttestedwith:
1502 1502 fm.plain(_(b' (untested!)\n'))
1503 1503 else:
1504 1504 lasttestedversion = exttestedwith[-1]
1505 1505 fm.plain(b' (%s!)\n' % lasttestedversion)
1506 1506
1507 1507 fm.condwrite(
1508 1508 ui.verbose and extsource,
1509 1509 b'source',
1510 1510 _(b' location: %s\n'),
1511 1511 extsource or b"",
1512 1512 )
1513 1513
1514 1514 if ui.verbose:
1515 1515 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1516 1516 fm.data(bundled=isinternal)
1517 1517
1518 1518 fm.condwrite(
1519 1519 ui.verbose and exttestedwith,
1520 1520 b'testedwith',
1521 1521 _(b' tested with: %s\n'),
1522 1522 fm.formatlist(exttestedwith, name=b'ver'),
1523 1523 )
1524 1524
1525 1525 fm.condwrite(
1526 1526 ui.verbose and extbuglink,
1527 1527 b'buglink',
1528 1528 _(b' bug reporting: %s\n'),
1529 1529 extbuglink or b"",
1530 1530 )
1531 1531
1532 1532 fm.end()
1533 1533
1534 1534
1535 1535 @command(
1536 1536 b'debugfileset',
1537 1537 [
1538 1538 (
1539 1539 b'r',
1540 1540 b'rev',
1541 1541 b'',
1542 1542 _(b'apply the filespec on this revision'),
1543 1543 _(b'REV'),
1544 1544 ),
1545 1545 (
1546 1546 b'',
1547 1547 b'all-files',
1548 1548 False,
1549 1549 _(b'test files from all revisions and working directory'),
1550 1550 ),
1551 1551 (
1552 1552 b's',
1553 1553 b'show-matcher',
1554 1554 None,
1555 1555 _(b'print internal representation of matcher'),
1556 1556 ),
1557 1557 (
1558 1558 b'p',
1559 1559 b'show-stage',
1560 1560 [],
1561 1561 _(b'print parsed tree at the given stage'),
1562 1562 _(b'NAME'),
1563 1563 ),
1564 1564 ],
1565 1565 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1566 1566 )
1567 1567 def debugfileset(ui, repo, expr, **opts):
1568 1568 '''parse and apply a fileset specification'''
1569 1569 from . import fileset
1570 1570
1571 1571 fileset.symbols # force import of fileset so we have predicates to optimize
1572 1572
1573 1573 ctx = logcmdutil.revsingle(repo, opts.get('rev'), None)
1574 1574
1575 1575 stages = [
1576 1576 (b'parsed', pycompat.identity),
1577 1577 (b'analyzed', filesetlang.analyze),
1578 1578 (b'optimized', filesetlang.optimize),
1579 1579 ]
1580 1580 stagenames = {n for n, f in stages}
1581 1581
1582 1582 showalways = set()
1583 1583 if ui.verbose and not opts['show_stage']:
1584 1584 # show parsed tree by --verbose (deprecated)
1585 1585 showalways.add(b'parsed')
1586 1586 if opts['show_stage'] == [b'all']:
1587 1587 showalways.update(stagenames)
1588 1588 else:
1589 1589 for n in opts['show_stage']:
1590 1590 if n not in stagenames:
1591 1591 raise error.Abort(_(b'invalid stage name: %s') % n)
1592 1592 showalways.update(opts['show_stage'])
1593 1593
1594 1594 tree = filesetlang.parse(expr)
1595 1595 for n, f in stages:
1596 1596 tree = f(tree)
1597 1597 if n in showalways:
1598 1598 if opts['show_stage'] or n != b'parsed':
1599 1599 ui.write(b"* %s:\n" % n)
1600 1600 ui.write(filesetlang.prettyformat(tree), b"\n")
1601 1601
1602 1602 files = set()
1603 1603 if opts['all_files']:
1604 1604 for r in repo:
1605 1605 c = repo[r]
1606 1606 files.update(c.files())
1607 1607 files.update(c.substate)
1608 1608 if opts['all_files'] or ctx.rev() is None:
1609 1609 wctx = repo[None]
1610 1610 files.update(
1611 1611 repo.dirstate.walk(
1612 1612 scmutil.matchall(repo),
1613 1613 subrepos=list(wctx.substate),
1614 1614 unknown=True,
1615 1615 ignored=True,
1616 1616 )
1617 1617 )
1618 1618 files.update(wctx.substate)
1619 1619 else:
1620 1620 files.update(ctx.files())
1621 1621 files.update(ctx.substate)
1622 1622
1623 1623 m = ctx.matchfileset(repo.getcwd(), expr)
1624 1624 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
1625 1625 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1626 1626 for f in sorted(files):
1627 1627 if not m(f):
1628 1628 continue
1629 1629 ui.write(b"%s\n" % f)
1630 1630
1631 1631
1632 1632 @command(
1633 1633 b"debug-repair-issue6528",
1634 1634 [
1635 1635 (
1636 1636 b'',
1637 1637 b'to-report',
1638 1638 b'',
1639 1639 _(b'build a report of affected revisions to this file'),
1640 1640 _(b'FILE'),
1641 1641 ),
1642 1642 (
1643 1643 b'',
1644 1644 b'from-report',
1645 1645 b'',
1646 1646 _(b'repair revisions listed in this report file'),
1647 1647 _(b'FILE'),
1648 1648 ),
1649 1649 (
1650 1650 b'',
1651 1651 b'paranoid',
1652 1652 False,
1653 1653 _(b'check that both detection methods do the same thing'),
1654 1654 ),
1655 1655 ]
1656 1656 + cmdutil.dryrunopts,
1657 1657 )
1658 1658 def debug_repair_issue6528(ui, repo, **opts):
1659 1659 """find affected revisions and repair them. See issue6528 for more details.
1660 1660
1661 1661 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1662 1662 computation of affected revisions for a given repository across clones.
1663 1663 The report format is line-based (with empty lines ignored):
1664 1664
1665 1665 ```
1666 1666 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1667 1667 ```
1668 1668
1669 1669 There can be multiple broken revisions per filelog, they are separated by
1670 1670 a comma with no spaces. The only space is between the revision(s) and the
1671 1671 filename.
1672 1672
1673 1673 Note that this does *not* mean that this repairs future affected revisions,
1674 1674 that needs a separate fix at the exchange level that was introduced in
1675 1675 Mercurial 5.9.1.
1676 1676
1677 1677 There is a `--paranoid` flag to test that the fast implementation is correct
1678 1678 by checking it against the slow implementation. Since this matter is quite
1679 1679 urgent and testing every edge-case is probably quite costly, we use this
1680 1680 method to test on large repositories as a fuzzing method of sorts.
1681 1681 """
1682 1682 cmdutil.check_incompatible_arguments(
1683 1683 opts, 'to_report', ['from_report', 'dry_run']
1684 1684 )
1685 1685 dry_run = opts.get('dry_run')
1686 1686 to_report = opts.get('to_report')
1687 1687 from_report = opts.get('from_report')
1688 1688 paranoid = opts.get('paranoid')
1689 1689 # TODO maybe add filelog pattern and revision pattern parameters to help
1690 1690 # narrow down the search for users that know what they're looking for?
1691 1691
1692 1692 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1693 1693 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1694 1694 raise error.Abort(_(msg))
1695 1695
1696 1696 rewrite.repair_issue6528(
1697 1697 ui,
1698 1698 repo,
1699 1699 dry_run=dry_run,
1700 1700 to_report=to_report,
1701 1701 from_report=from_report,
1702 1702 paranoid=paranoid,
1703 1703 )
1704 1704
1705 1705
1706 1706 @command(b'debugformat', [] + cmdutil.formatteropts)
1707 1707 def debugformat(ui, repo, **opts):
1708 1708 """display format information about the current repository
1709 1709
1710 1710 Use --verbose to get extra information about current config value and
1711 1711 Mercurial default."""
1712 1712 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1713 1713 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1714 1714
1715 1715 def makeformatname(name):
1716 1716 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1717 1717
1718 1718 fm = ui.formatter(b'debugformat', pycompat.byteskwargs(opts))
1719 1719 if fm.isplain():
1720 1720
1721 1721 def formatvalue(value):
1722 1722 if hasattr(value, 'startswith'):
1723 1723 return value
1724 1724 if value:
1725 1725 return b'yes'
1726 1726 else:
1727 1727 return b'no'
1728 1728
1729 1729 else:
1730 1730 formatvalue = pycompat.identity
1731 1731
1732 1732 fm.plain(b'format-variant')
1733 1733 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1734 1734 fm.plain(b' repo')
1735 1735 if ui.verbose:
1736 1736 fm.plain(b' config default')
1737 1737 fm.plain(b'\n')
1738 1738 for fv in upgrade.allformatvariant:
1739 1739 fm.startitem()
1740 1740 repovalue = fv.fromrepo(repo)
1741 1741 configvalue = fv.fromconfig(repo)
1742 1742
1743 1743 if repovalue != configvalue:
1744 1744 namelabel = b'formatvariant.name.mismatchconfig'
1745 1745 repolabel = b'formatvariant.repo.mismatchconfig'
1746 1746 elif repovalue != fv.default:
1747 1747 namelabel = b'formatvariant.name.mismatchdefault'
1748 1748 repolabel = b'formatvariant.repo.mismatchdefault'
1749 1749 else:
1750 1750 namelabel = b'formatvariant.name.uptodate'
1751 1751 repolabel = b'formatvariant.repo.uptodate'
1752 1752
1753 1753 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1754 1754 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1755 1755 if fv.default != configvalue:
1756 1756 configlabel = b'formatvariant.config.special'
1757 1757 else:
1758 1758 configlabel = b'formatvariant.config.default'
1759 1759 fm.condwrite(
1760 1760 ui.verbose,
1761 1761 b'config',
1762 1762 b' %6s',
1763 1763 formatvalue(configvalue),
1764 1764 label=configlabel,
1765 1765 )
1766 1766 fm.condwrite(
1767 1767 ui.verbose,
1768 1768 b'default',
1769 1769 b' %7s',
1770 1770 formatvalue(fv.default),
1771 1771 label=b'formatvariant.default',
1772 1772 )
1773 1773 fm.plain(b'\n')
1774 1774 fm.end()
1775 1775
1776 1776
1777 1777 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1778 1778 def debugfsinfo(ui, path=b"."):
1779 1779 """show information detected about current filesystem"""
1780 1780 ui.writenoi18n(b'path: %s\n' % path)
1781 1781 ui.writenoi18n(
1782 1782 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1783 1783 )
1784 1784 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1785 1785 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1786 1786 ui.writenoi18n(
1787 1787 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1788 1788 )
1789 1789 ui.writenoi18n(
1790 1790 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1791 1791 )
1792 1792 casesensitive = b'(unknown)'
1793 1793 try:
1794 1794 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1795 1795 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1796 1796 except OSError:
1797 1797 pass
1798 1798 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1799 1799
1800 1800
1801 1801 @command(
1802 1802 b'debuggetbundle',
1803 1803 [
1804 1804 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1805 1805 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1806 1806 (
1807 1807 b't',
1808 1808 b'type',
1809 1809 b'bzip2',
1810 1810 _(b'bundle compression type to use'),
1811 1811 _(b'TYPE'),
1812 1812 ),
1813 1813 ],
1814 1814 _(b'REPO FILE [-H|-C ID]...'),
1815 1815 norepo=True,
1816 1816 )
1817 1817 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1818 1818 """retrieves a bundle from a repo
1819 1819
1820 1820 Every ID must be a full-length hex node id string. Saves the bundle to the
1821 1821 given file.
1822 1822 """
1823 1823 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
1824 1824 if not repo.capable(b'getbundle'):
1825 1825 raise error.Abort(b"getbundle() not supported by target repository")
1826 1826 args = {}
1827 1827 if common:
1828 1828 args['common'] = [bin(s) for s in common]
1829 1829 if head:
1830 1830 args['heads'] = [bin(s) for s in head]
1831 1831 # TODO: get desired bundlecaps from command line.
1832 1832 args['bundlecaps'] = None
1833 1833 bundle = repo.getbundle(b'debug', **args)
1834 1834
1835 1835 bundletype = opts.get('type', b'bzip2').lower()
1836 1836 btypes = {
1837 1837 b'none': b'HG10UN',
1838 1838 b'bzip2': b'HG10BZ',
1839 1839 b'gzip': b'HG10GZ',
1840 1840 b'bundle2': b'HG20',
1841 1841 }
1842 1842 bundletype = btypes.get(bundletype)
1843 1843 if bundletype not in bundle2.bundletypes:
1844 1844 raise error.Abort(_(b'unknown bundle type specified with --type'))
1845 1845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1846 1846
1847 1847
1848 1848 @command(b'debugignore', [], b'[FILE]...')
1849 1849 def debugignore(ui, repo, *files, **opts):
1850 1850 """display the combined ignore pattern and information about ignored files
1851 1851
1852 1852 With no argument display the combined ignore pattern.
1853 1853
1854 1854 Given space separated file names, shows if the given file is ignored and
1855 1855 if so, show the ignore rule (file and line number) that matched it.
1856 1856 """
1857 1857 ignore = repo.dirstate._ignore
1858 1858 if not files:
1859 1859 # Show all the patterns
1860 1860 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1861 1861 else:
1862 1862 m = scmutil.match(repo[None], pats=files)
1863 1863 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1864 1864 for f in m.files():
1865 1865 nf = util.normpath(f)
1866 1866 ignored = None
1867 1867 ignoredata = None
1868 1868 if nf != b'.':
1869 1869 if ignore(nf):
1870 1870 ignored = nf
1871 1871 ignoredata = repo.dirstate._ignorefileandline(nf)
1872 1872 else:
1873 1873 for p in pathutil.finddirs(nf):
1874 1874 if ignore(p):
1875 1875 ignored = p
1876 1876 ignoredata = repo.dirstate._ignorefileandline(p)
1877 1877 break
1878 1878 if ignored:
1879 1879 if ignored == nf:
1880 1880 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1881 1881 else:
1882 1882 ui.write(
1883 1883 _(
1884 1884 b"%s is ignored because of "
1885 1885 b"containing directory %s\n"
1886 1886 )
1887 1887 % (uipathfn(f), ignored)
1888 1888 )
1889 1889 ignorefile, lineno, line = ignoredata
1890 1890 ui.write(
1891 1891 _(b"(ignore rule in %s, line %d: '%s')\n")
1892 1892 % (ignorefile, lineno, line)
1893 1893 )
1894 1894 else:
1895 1895 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1896 1896
1897 1897
1898 1898 @command(
1899 1899 b'debug-revlog-index|debugindex',
1900 1900 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1901 1901 _(b'-c|-m|FILE'),
1902 1902 )
1903 1903 def debugindex(ui, repo, file_=None, **opts):
1904 1904 """dump index data for a revlog"""
1905 1905 opts = pycompat.byteskwargs(opts)
1906 1906 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1907 1907
1908 1908 fm = ui.formatter(b'debugindex', opts)
1909 1909
1910 1910 revlog = getattr(store, '_revlog', store)
1911 1911
1912 1912 return revlog_debug.debug_index(
1913 1913 ui,
1914 1914 repo,
1915 1915 formatter=fm,
1916 1916 revlog=revlog,
1917 1917 full_node=ui.debugflag,
1918 1918 )
1919 1919
1920 1920
1921 1921 @command(
1922 1922 b'debugindexdot',
1923 1923 cmdutil.debugrevlogopts,
1924 1924 _(b'-c|-m|FILE'),
1925 1925 optionalrepo=True,
1926 1926 )
1927 1927 def debugindexdot(ui, repo, file_=None, **opts):
1928 1928 """dump an index DAG as a graphviz dot file"""
1929 1929 r = cmdutil.openstorage(
1930 1930 repo, b'debugindexdot', file_, pycompat.byteskwargs(opts)
1931 1931 )
1932 1932 ui.writenoi18n(b"digraph G {\n")
1933 1933 for i in r:
1934 1934 node = r.node(i)
1935 1935 pp = r.parents(node)
1936 1936 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1937 1937 if pp[1] != repo.nullid:
1938 1938 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1939 1939 ui.write(b"}\n")
1940 1940
1941 1941
1942 1942 @command(b'debugindexstats', [])
1943 1943 def debugindexstats(ui, repo):
1944 1944 """show stats related to the changelog index"""
1945 1945 repo.changelog.shortest(repo.nullid, 1)
1946 1946 index = repo.changelog.index
1947 1947 if not hasattr(index, 'stats'):
1948 1948 raise error.Abort(_(b'debugindexstats only works with native code'))
1949 1949 for k, v in sorted(index.stats().items()):
1950 1950 ui.write(b'%s: %d\n' % (k, v))
1951 1951
1952 1952
1953 1953 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1954 1954 def debuginstall(ui, **opts):
1955 1955 """test Mercurial installation
1956 1956
1957 1957 Returns 0 on success.
1958 1958 """
1959 1959 problems = 0
1960 1960
1961 1961 fm = ui.formatter(b'debuginstall', pycompat.byteskwargs(opts))
1962 1962 fm.startitem()
1963 1963
1964 1964 # encoding might be unknown or wrong. don't translate these messages.
1965 1965 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1966 1966 err = None
1967 1967 try:
1968 1968 codecs.lookup(pycompat.sysstr(encoding.encoding))
1969 1969 except LookupError as inst:
1970 1970 err = stringutil.forcebytestr(inst)
1971 1971 problems += 1
1972 1972 fm.condwrite(
1973 1973 err,
1974 1974 b'encodingerror',
1975 1975 b" %s\n (check that your locale is properly set)\n",
1976 1976 err,
1977 1977 )
1978 1978
1979 1979 # Python
1980 1980 pythonlib = None
1981 1981 if hasattr(os, '__file__'):
1982 1982 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1983 1983 elif getattr(sys, 'oxidized', False):
1984 1984 pythonlib = pycompat.sysexecutable
1985 1985
1986 1986 fm.write(
1987 1987 b'pythonexe',
1988 1988 _(b"checking Python executable (%s)\n"),
1989 1989 pycompat.sysexecutable or _(b"unknown"),
1990 1990 )
1991 1991 fm.write(
1992 1992 b'pythonimplementation',
1993 1993 _(b"checking Python implementation (%s)\n"),
1994 1994 pycompat.sysbytes(platform.python_implementation()),
1995 1995 )
1996 1996 fm.write(
1997 1997 b'pythonver',
1998 1998 _(b"checking Python version (%s)\n"),
1999 1999 (b"%d.%d.%d" % sys.version_info[:3]),
2000 2000 )
2001 2001 fm.write(
2002 2002 b'pythonlib',
2003 2003 _(b"checking Python lib (%s)...\n"),
2004 2004 pythonlib or _(b"unknown"),
2005 2005 )
2006 2006
2007 2007 try:
2008 2008 from . import rustext # pytype: disable=import-error
2009 2009
2010 2010 rustext.__doc__ # trigger lazy import
2011 2011 except ImportError:
2012 2012 rustext = None
2013 2013
2014 2014 security = set(sslutil.supportedprotocols)
2015 2015 if sslutil.hassni:
2016 2016 security.add(b'sni')
2017 2017
2018 2018 fm.write(
2019 2019 b'pythonsecurity',
2020 2020 _(b"checking Python security support (%s)\n"),
2021 2021 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2022 2022 )
2023 2023
2024 2024 # These are warnings, not errors. So don't increment problem count. This
2025 2025 # may change in the future.
2026 2026 if b'tls1.2' not in security:
2027 2027 fm.plain(
2028 2028 _(
2029 2029 b' TLS 1.2 not supported by Python install; '
2030 2030 b'network connections lack modern security\n'
2031 2031 )
2032 2032 )
2033 2033 if b'sni' not in security:
2034 2034 fm.plain(
2035 2035 _(
2036 2036 b' SNI not supported by Python install; may have '
2037 2037 b'connectivity issues with some servers\n'
2038 2038 )
2039 2039 )
2040 2040
2041 2041 fm.plain(
2042 2042 _(
2043 2043 b"checking Rust extensions (%s)\n"
2044 2044 % (b'missing' if rustext is None else b'installed')
2045 2045 ),
2046 2046 )
2047 2047
2048 2048 # TODO print CA cert info
2049 2049
2050 2050 # hg version
2051 2051 hgver = util.version()
2052 2052 fm.write(
2053 2053 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2054 2054 )
2055 2055 fm.write(
2056 2056 b'hgverextra',
2057 2057 _(b"checking Mercurial custom build (%s)\n"),
2058 2058 b'+'.join(hgver.split(b'+')[1:]),
2059 2059 )
2060 2060
2061 2061 # compiled modules
2062 2062 hgmodules = None
2063 2063 if hasattr(sys.modules[__name__], '__file__'):
2064 2064 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2065 2065 elif getattr(sys, 'oxidized', False):
2066 2066 hgmodules = pycompat.sysexecutable
2067 2067
2068 2068 fm.write(
2069 2069 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2070 2070 )
2071 2071 fm.write(
2072 2072 b'hgmodules',
2073 2073 _(b"checking installed modules (%s)...\n"),
2074 2074 hgmodules or _(b"unknown"),
2075 2075 )
2076 2076
2077 2077 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2078 2078 rustext = rustandc # for now, that's the only case
2079 2079 cext = policy.policy in (b'c', b'allow') or rustandc
2080 2080 nopure = cext or rustext
2081 2081 if nopure:
2082 2082 err = None
2083 2083 try:
2084 2084 if cext:
2085 2085 from .cext import ( # pytype: disable=import-error
2086 2086 base85,
2087 2087 bdiff,
2088 2088 mpatch,
2089 2089 osutil,
2090 2090 )
2091 2091
2092 2092 # quiet pyflakes
2093 2093 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2094 2094 if rustext:
2095 2095 from .rustext import ( # pytype: disable=import-error
2096 2096 ancestor,
2097 2097 dirstate,
2098 2098 )
2099 2099
2100 2100 dir(ancestor), dir(dirstate) # quiet pyflakes
2101 2101 except Exception as inst:
2102 2102 err = stringutil.forcebytestr(inst)
2103 2103 problems += 1
2104 2104 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2105 2105
2106 2106 compengines = util.compengines._engines.values()
2107 2107 fm.write(
2108 2108 b'compengines',
2109 2109 _(b'checking registered compression engines (%s)\n'),
2110 2110 fm.formatlist(
2111 2111 sorted(e.name() for e in compengines),
2112 2112 name=b'compengine',
2113 2113 fmt=b'%s',
2114 2114 sep=b', ',
2115 2115 ),
2116 2116 )
2117 2117 fm.write(
2118 2118 b'compenginesavail',
2119 2119 _(b'checking available compression engines (%s)\n'),
2120 2120 fm.formatlist(
2121 2121 sorted(e.name() for e in compengines if e.available()),
2122 2122 name=b'compengine',
2123 2123 fmt=b'%s',
2124 2124 sep=b', ',
2125 2125 ),
2126 2126 )
2127 2127 wirecompengines = compression.compengines.supportedwireengines(
2128 2128 compression.SERVERROLE
2129 2129 )
2130 2130 fm.write(
2131 2131 b'compenginesserver',
2132 2132 _(
2133 2133 b'checking available compression engines '
2134 2134 b'for wire protocol (%s)\n'
2135 2135 ),
2136 2136 fm.formatlist(
2137 2137 [e.name() for e in wirecompengines if e.wireprotosupport()],
2138 2138 name=b'compengine',
2139 2139 fmt=b'%s',
2140 2140 sep=b', ',
2141 2141 ),
2142 2142 )
2143 2143 re2 = b'missing'
2144 2144 if util.has_re2():
2145 2145 re2 = b'available'
2146 2146 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2147 2147 fm.data(re2=bool(util._re2))
2148 2148
2149 2149 # templates
2150 2150 p = templater.templatedir()
2151 2151 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2152 2152 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2153 2153 if p:
2154 2154 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2155 2155 if m:
2156 2156 # template found, check if it is working
2157 2157 err = None
2158 2158 try:
2159 2159 templater.templater.frommapfile(m)
2160 2160 except Exception as inst:
2161 2161 err = stringutil.forcebytestr(inst)
2162 2162 p = None
2163 2163 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2164 2164 else:
2165 2165 p = None
2166 2166 fm.condwrite(
2167 2167 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2168 2168 )
2169 2169 fm.condwrite(
2170 2170 not m,
2171 2171 b'defaulttemplatenotfound',
2172 2172 _(b" template '%s' not found\n"),
2173 2173 b"default",
2174 2174 )
2175 2175 if not p:
2176 2176 problems += 1
2177 2177 fm.condwrite(
2178 2178 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2179 2179 )
2180 2180
2181 2181 # editor
2182 2182 editor = ui.geteditor()
2183 2183 editor = util.expandpath(editor)
2184 2184 editorbin = procutil.shellsplit(editor)[0]
2185 2185 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2186 2186 cmdpath = procutil.findexe(editorbin)
2187 2187 fm.condwrite(
2188 2188 not cmdpath and editor == b'vi',
2189 2189 b'vinotfound',
2190 2190 _(
2191 2191 b" No commit editor set and can't find %s in PATH\n"
2192 2192 b" (specify a commit editor in your configuration"
2193 2193 b" file)\n"
2194 2194 ),
2195 2195 not cmdpath and editor == b'vi' and editorbin,
2196 2196 )
2197 2197 fm.condwrite(
2198 2198 not cmdpath and editor != b'vi',
2199 2199 b'editornotfound',
2200 2200 _(
2201 2201 b" Can't find editor '%s' in PATH\n"
2202 2202 b" (specify a commit editor in your configuration"
2203 2203 b" file)\n"
2204 2204 ),
2205 2205 not cmdpath and editorbin,
2206 2206 )
2207 2207 if not cmdpath and editor != b'vi':
2208 2208 problems += 1
2209 2209
2210 2210 # check username
2211 2211 username = None
2212 2212 err = None
2213 2213 try:
2214 2214 username = ui.username()
2215 2215 except error.Abort as e:
2216 2216 err = e.message
2217 2217 problems += 1
2218 2218
2219 2219 fm.condwrite(
2220 2220 username, b'username', _(b"checking username (%s)\n"), username
2221 2221 )
2222 2222 fm.condwrite(
2223 2223 err,
2224 2224 b'usernameerror',
2225 2225 _(
2226 2226 b"checking username...\n %s\n"
2227 2227 b" (specify a username in your configuration file)\n"
2228 2228 ),
2229 2229 err,
2230 2230 )
2231 2231
2232 2232 for name, mod in extensions.extensions():
2233 2233 handler = getattr(mod, 'debuginstall', None)
2234 2234 if handler is not None:
2235 2235 problems += handler(ui, fm)
2236 2236
2237 2237 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2238 2238 if not problems:
2239 2239 fm.data(problems=problems)
2240 2240 fm.condwrite(
2241 2241 problems,
2242 2242 b'problems',
2243 2243 _(b"%d problems detected, please check your install!\n"),
2244 2244 problems,
2245 2245 )
2246 2246 fm.end()
2247 2247
2248 2248 return problems
2249 2249
2250 2250
2251 2251 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2252 2252 def debugknown(ui, repopath, *ids, **opts):
2253 2253 """test whether node ids are known to a repo
2254 2254
2255 2255 Every ID must be a full-length hex node id string. Returns a list of 0s
2256 2256 and 1s indicating unknown/known.
2257 2257 """
2258 2258 repo = hg.peer(ui, pycompat.byteskwargs(opts), repopath)
2259 2259 if not repo.capable(b'known'):
2260 2260 raise error.Abort(b"known() not supported by target repository")
2261 2261 flags = repo.known([bin(s) for s in ids])
2262 2262 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2263 2263
2264 2264
2265 2265 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2266 2266 def debuglabelcomplete(ui, repo, *args):
2267 2267 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2268 2268 debugnamecomplete(ui, repo, *args)
2269 2269
2270 2270
2271 2271 @command(
2272 2272 b'debuglocks',
2273 2273 [
2274 2274 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2275 2275 (
2276 2276 b'W',
2277 2277 b'force-free-wlock',
2278 2278 None,
2279 2279 _(b'free the working state lock (DANGEROUS)'),
2280 2280 ),
2281 2281 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2282 2282 (
2283 2283 b'S',
2284 2284 b'set-wlock',
2285 2285 None,
2286 2286 _(b'set the working state lock until stopped'),
2287 2287 ),
2288 2288 ],
2289 2289 _(b'[OPTION]...'),
2290 2290 )
2291 2291 def debuglocks(ui, repo, **opts):
2292 2292 """show or modify state of locks
2293 2293
2294 2294 By default, this command will show which locks are held. This
2295 2295 includes the user and process holding the lock, the amount of time
2296 2296 the lock has been held, and the machine name where the process is
2297 2297 running if it's not local.
2298 2298
2299 2299 Locks protect the integrity of Mercurial's data, so should be
2300 2300 treated with care. System crashes or other interruptions may cause
2301 2301 locks to not be properly released, though Mercurial will usually
2302 2302 detect and remove such stale locks automatically.
2303 2303
2304 2304 However, detecting stale locks may not always be possible (for
2305 2305 instance, on a shared filesystem). Removing locks may also be
2306 2306 blocked by filesystem permissions.
2307 2307
2308 2308 Setting a lock will prevent other commands from changing the data.
2309 2309 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2310 2310 The set locks are removed when the command exits.
2311 2311
2312 2312 Returns 0 if no locks are held.
2313 2313
2314 2314 """
2315 2315
2316 2316 if opts.get('force_free_lock'):
2317 2317 repo.svfs.tryunlink(b'lock')
2318 2318 if opts.get('force_free_wlock'):
2319 2319 repo.vfs.tryunlink(b'wlock')
2320 2320 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2321 2321 return 0
2322 2322
2323 2323 locks = []
2324 2324 try:
2325 2325 if opts.get('set_wlock'):
2326 2326 try:
2327 2327 locks.append(repo.wlock(False))
2328 2328 except error.LockHeld:
2329 2329 raise error.Abort(_(b'wlock is already held'))
2330 2330 if opts.get('set_lock'):
2331 2331 try:
2332 2332 locks.append(repo.lock(False))
2333 2333 except error.LockHeld:
2334 2334 raise error.Abort(_(b'lock is already held'))
2335 2335 if len(locks):
2336 2336 try:
2337 2337 if ui.interactive():
2338 2338 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2339 2339 ui.promptchoice(prompt)
2340 2340 else:
2341 2341 msg = b"%d locks held, waiting for signal\n"
2342 2342 msg %= len(locks)
2343 2343 ui.status(msg)
2344 2344 while True: # XXX wait for a signal
2345 2345 time.sleep(0.1)
2346 2346 except KeyboardInterrupt:
2347 2347 msg = b"signal-received releasing locks\n"
2348 2348 ui.status(msg)
2349 2349 return 0
2350 2350 finally:
2351 2351 release(*locks)
2352 2352
2353 2353 now = time.time()
2354 2354 held = 0
2355 2355
2356 2356 def report(vfs, name, method):
2357 2357 # this causes stale locks to get reaped for more accurate reporting
2358 2358 try:
2359 2359 l = method(False)
2360 2360 except error.LockHeld:
2361 2361 l = None
2362 2362
2363 2363 if l:
2364 2364 l.release()
2365 2365 else:
2366 2366 try:
2367 2367 st = vfs.lstat(name)
2368 2368 age = now - st[stat.ST_MTIME]
2369 2369 user = util.username(st.st_uid)
2370 2370 locker = vfs.readlock(name)
2371 2371 if b":" in locker:
2372 2372 host, pid = locker.split(b':')
2373 2373 if host == socket.gethostname():
2374 2374 locker = b'user %s, process %s' % (user or b'None', pid)
2375 2375 else:
2376 2376 locker = b'user %s, process %s, host %s' % (
2377 2377 user or b'None',
2378 2378 pid,
2379 2379 host,
2380 2380 )
2381 2381 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2382 2382 return 1
2383 2383 except FileNotFoundError:
2384 2384 pass
2385 2385
2386 2386 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2387 2387 return 0
2388 2388
2389 2389 held += report(repo.svfs, b"lock", repo.lock)
2390 2390 held += report(repo.vfs, b"wlock", repo.wlock)
2391 2391
2392 2392 return held
2393 2393
2394 2394
2395 2395 @command(
2396 2396 b'debugmanifestfulltextcache',
2397 2397 [
2398 2398 (b'', b'clear', False, _(b'clear the cache')),
2399 2399 (
2400 2400 b'a',
2401 2401 b'add',
2402 2402 [],
2403 2403 _(b'add the given manifest nodes to the cache'),
2404 2404 _(b'NODE'),
2405 2405 ),
2406 2406 ],
2407 2407 b'',
2408 2408 )
2409 2409 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2410 2410 """show, clear or amend the contents of the manifest fulltext cache"""
2411 2411
2412 2412 def getcache():
2413 2413 r = repo.manifestlog.getstorage(b'')
2414 2414 try:
2415 2415 return r._fulltextcache
2416 2416 except AttributeError:
2417 2417 msg = _(
2418 2418 b"Current revlog implementation doesn't appear to have a "
2419 2419 b"manifest fulltext cache\n"
2420 2420 )
2421 2421 raise error.Abort(msg)
2422 2422
2423 2423 if opts.get('clear'):
2424 2424 with repo.wlock():
2425 2425 cache = getcache()
2426 2426 cache.clear(clear_persisted_data=True)
2427 2427 return
2428 2428
2429 2429 if add:
2430 2430 with repo.wlock():
2431 2431 m = repo.manifestlog
2432 2432 store = m.getstorage(b'')
2433 2433 for n in add:
2434 2434 try:
2435 2435 manifest = m[store.lookup(n)]
2436 2436 except error.LookupError as e:
2437 2437 raise error.Abort(
2438 2438 bytes(e), hint=b"Check your manifest node id"
2439 2439 )
2440 2440 manifest.read() # stores revisision in cache too
2441 2441 return
2442 2442
2443 2443 cache = getcache()
2444 2444 if not len(cache):
2445 2445 ui.write(_(b'cache empty\n'))
2446 2446 else:
2447 2447 ui.write(
2448 2448 _(
2449 2449 b'cache contains %d manifest entries, in order of most to '
2450 2450 b'least recent:\n'
2451 2451 )
2452 2452 % (len(cache),)
2453 2453 )
2454 2454 totalsize = 0
2455 2455 for nodeid in cache:
2456 2456 # Use cache.get to not update the LRU order
2457 2457 data = cache.peek(nodeid)
2458 2458 size = len(data)
2459 2459 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2460 2460 ui.write(
2461 2461 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2462 2462 )
2463 2463 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2464 2464 ui.write(
2465 2465 _(b'total cache data size %s, on-disk %s\n')
2466 2466 % (util.bytecount(totalsize), util.bytecount(ondisk))
2467 2467 )
2468 2468
2469 2469
2470 2470 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2471 2471 def debugmergestate(ui, repo, *args, **opts):
2472 2472 """print merge state
2473 2473
2474 2474 Use --verbose to print out information about whether v1 or v2 merge state
2475 2475 was chosen."""
2476 2476
2477 2477 if ui.verbose:
2478 2478 ms = mergestatemod.mergestate(repo)
2479 2479
2480 2480 # sort so that reasonable information is on top
2481 2481 v1records = ms._readrecordsv1()
2482 2482 v2records = ms._readrecordsv2()
2483 2483
2484 2484 if not v1records and not v2records:
2485 2485 pass
2486 2486 elif not v2records:
2487 2487 ui.writenoi18n(b'no version 2 merge state\n')
2488 2488 elif ms._v1v2match(v1records, v2records):
2489 2489 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2490 2490 else:
2491 2491 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2492 2492
2493 2493 if not opts['template']:
2494 2494 opts['template'] = (
2495 2495 b'{if(commits, "", "no merge state found\n")}'
2496 2496 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2497 2497 b'{files % "file: {path} (state \\"{state}\\")\n'
2498 2498 b'{if(local_path, "'
2499 2499 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2500 2500 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2501 2501 b' other path: {other_path} (node {other_node})\n'
2502 2502 b'")}'
2503 2503 b'{if(rename_side, "'
2504 2504 b' rename side: {rename_side}\n'
2505 2505 b' renamed path: {renamed_path}\n'
2506 2506 b'")}'
2507 2507 b'{extras % " extra: {key} = {value}\n"}'
2508 2508 b'"}'
2509 2509 b'{extras % "extra: {file} ({key} = {value})\n"}'
2510 2510 )
2511 2511
2512 2512 ms = mergestatemod.mergestate.read(repo)
2513 2513
2514 2514 fm = ui.formatter(b'debugmergestate', pycompat.byteskwargs(opts))
2515 2515 fm.startitem()
2516 2516
2517 2517 fm_commits = fm.nested(b'commits')
2518 2518 if ms.active():
2519 2519 for name, node, label_index in (
2520 2520 (b'local', ms.local, 0),
2521 2521 (b'other', ms.other, 1),
2522 2522 ):
2523 2523 fm_commits.startitem()
2524 2524 fm_commits.data(name=name)
2525 2525 fm_commits.data(node=hex(node))
2526 2526 if ms._labels and len(ms._labels) > label_index:
2527 2527 fm_commits.data(label=ms._labels[label_index])
2528 2528 fm_commits.end()
2529 2529
2530 2530 fm_files = fm.nested(b'files')
2531 2531 if ms.active():
2532 2532 for f in ms:
2533 2533 fm_files.startitem()
2534 2534 fm_files.data(path=f)
2535 2535 state = ms._state[f]
2536 2536 fm_files.data(state=state[0])
2537 2537 if state[0] in (
2538 2538 mergestatemod.MERGE_RECORD_UNRESOLVED,
2539 2539 mergestatemod.MERGE_RECORD_RESOLVED,
2540 2540 ):
2541 2541 fm_files.data(local_key=state[1])
2542 2542 fm_files.data(local_path=state[2])
2543 2543 fm_files.data(ancestor_path=state[3])
2544 2544 fm_files.data(ancestor_node=state[4])
2545 2545 fm_files.data(other_path=state[5])
2546 2546 fm_files.data(other_node=state[6])
2547 2547 fm_files.data(local_flags=state[7])
2548 2548 elif state[0] in (
2549 2549 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2550 2550 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2551 2551 ):
2552 2552 fm_files.data(renamed_path=state[1])
2553 2553 fm_files.data(rename_side=state[2])
2554 2554 fm_extras = fm_files.nested(b'extras')
2555 2555 for k, v in sorted(ms.extras(f).items()):
2556 2556 fm_extras.startitem()
2557 2557 fm_extras.data(key=k)
2558 2558 fm_extras.data(value=v)
2559 2559 fm_extras.end()
2560 2560
2561 2561 fm_files.end()
2562 2562
2563 2563 fm_extras = fm.nested(b'extras')
2564 2564 for f, d in sorted(ms.allextras().items()):
2565 2565 if f in ms:
2566 2566 # If file is in mergestate, we have already processed it's extras
2567 2567 continue
2568 2568 for k, v in d.items():
2569 2569 fm_extras.startitem()
2570 2570 fm_extras.data(file=f)
2571 2571 fm_extras.data(key=k)
2572 2572 fm_extras.data(value=v)
2573 2573 fm_extras.end()
2574 2574
2575 2575 fm.end()
2576 2576
2577 2577
2578 2578 @command(b'debugnamecomplete', [], _(b'NAME...'))
2579 2579 def debugnamecomplete(ui, repo, *args):
2580 2580 '''complete "names" - tags, open branch names, bookmark names'''
2581 2581
2582 2582 names = set()
2583 2583 # since we previously only listed open branches, we will handle that
2584 2584 # specially (after this for loop)
2585 2585 for name, ns in repo.names.items():
2586 2586 if name != b'branches':
2587 2587 names.update(ns.listnames(repo))
2588 2588 names.update(
2589 2589 tag
2590 2590 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2591 2591 if not closed
2592 2592 )
2593 2593 completions = set()
2594 2594 if not args:
2595 2595 args = [b'']
2596 2596 for a in args:
2597 2597 completions.update(n for n in names if n.startswith(a))
2598 2598 ui.write(b'\n'.join(sorted(completions)))
2599 2599 ui.write(b'\n')
2600 2600
2601 2601
2602 2602 @command(
2603 2603 b'debugnodemap',
2604 2604 (
2605 2605 cmdutil.debugrevlogopts
2606 2606 + [
2607 2607 (
2608 2608 b'',
2609 2609 b'dump-new',
2610 2610 False,
2611 2611 _(b'write a (new) persistent binary nodemap on stdout'),
2612 2612 ),
2613 2613 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2614 2614 (
2615 2615 b'',
2616 2616 b'check',
2617 2617 False,
2618 2618 _(b'check that the data on disk data are correct.'),
2619 2619 ),
2620 2620 (
2621 2621 b'',
2622 2622 b'metadata',
2623 2623 False,
2624 2624 _(b'display the on disk meta data for the nodemap'),
2625 2625 ),
2626 2626 ]
2627 2627 ),
2628 2628 _(b'-c|-m|FILE'),
2629 2629 )
2630 2630 def debugnodemap(ui, repo, file_=None, **opts):
2631 2631 """write and inspect on disk nodemap"""
2632 2632 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
2633 2633 if file_ is not None:
2634 2634 raise error.InputError(
2635 2635 _(b'cannot specify a file with other arguments')
2636 2636 )
2637 2637 elif file_ is None:
2638 2638 opts['changelog'] = True
2639 2639 r = cmdutil.openstorage(
2640 2640 repo.unfiltered(), b'debugnodemap', file_, pycompat.byteskwargs(opts)
2641 2641 )
2642 2642 if isinstance(r, (manifest.manifestrevlog, filelog.filelog)):
2643 2643 r = r._revlog
2644 2644 if opts['dump_new']:
2645 2645 if hasattr(r.index, "nodemap_data_all"):
2646 2646 data = r.index.nodemap_data_all()
2647 2647 else:
2648 2648 data = nodemap.persistent_data(r.index)
2649 2649 ui.write(data)
2650 2650 elif opts['dump_disk']:
2651 2651 nm_data = nodemap.persisted_data(r)
2652 2652 if nm_data is not None:
2653 2653 docket, data = nm_data
2654 2654 ui.write(data[:])
2655 2655 elif opts['check']:
2656 2656 nm_data = nodemap.persisted_data(r)
2657 2657 if nm_data is not None:
2658 2658 docket, data = nm_data
2659 2659 return nodemap.check_data(ui, r.index, data)
2660 2660 elif opts['metadata']:
2661 2661 nm_data = nodemap.persisted_data(r)
2662 2662 if nm_data is not None:
2663 2663 docket, data = nm_data
2664 2664 ui.write((b"uid: %s\n") % docket.uid)
2665 2665 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2666 2666 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2667 2667 ui.write((b"data-length: %d\n") % docket.data_length)
2668 2668 ui.write((b"data-unused: %d\n") % docket.data_unused)
2669 2669 unused_perc = docket.data_unused * 100.0 / docket.data_length
2670 2670 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2671 2671
2672 2672
2673 2673 @command(
2674 2674 b'debugobsolete',
2675 2675 [
2676 2676 (b'', b'flags', 0, _(b'markers flag')),
2677 2677 (
2678 2678 b'',
2679 2679 b'record-parents',
2680 2680 False,
2681 2681 _(b'record parent information for the precursor'),
2682 2682 ),
2683 2683 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2684 2684 (
2685 2685 b'',
2686 2686 b'exclusive',
2687 2687 False,
2688 2688 _(b'restrict display to markers only relevant to REV'),
2689 2689 ),
2690 2690 (b'', b'index', False, _(b'display index of the marker')),
2691 2691 (b'', b'delete', [], _(b'delete markers specified by indices')),
2692 2692 ]
2693 2693 + cmdutil.commitopts2
2694 2694 + cmdutil.formatteropts,
2695 2695 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2696 2696 )
2697 2697 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2698 2698 """create arbitrary obsolete marker
2699 2699
2700 2700 With no arguments, displays the list of obsolescence markers."""
2701 2701
2702 2702 def parsenodeid(s):
2703 2703 try:
2704 2704 # We do not use revsingle/revrange functions here to accept
2705 2705 # arbitrary node identifiers, possibly not present in the
2706 2706 # local repository.
2707 2707 n = bin(s)
2708 2708 if len(n) != repo.nodeconstants.nodelen:
2709 2709 raise ValueError
2710 2710 return n
2711 2711 except ValueError:
2712 2712 raise error.InputError(
2713 2713 b'changeset references must be full hexadecimal '
2714 2714 b'node identifiers'
2715 2715 )
2716 2716
2717 2717 if opts.get('delete'):
2718 2718 indices = []
2719 2719 for v in opts.get('delete'):
2720 2720 try:
2721 2721 indices.append(int(v))
2722 2722 except ValueError:
2723 2723 raise error.InputError(
2724 2724 _(b'invalid index value: %r') % v,
2725 2725 hint=_(b'use integers for indices'),
2726 2726 )
2727 2727
2728 2728 if repo.currenttransaction():
2729 2729 raise error.Abort(
2730 2730 _(b'cannot delete obsmarkers in the middle of transaction.')
2731 2731 )
2732 2732
2733 2733 with repo.lock():
2734 2734 n = repair.deleteobsmarkers(repo.obsstore, indices)
2735 2735 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2736 2736
2737 2737 return
2738 2738
2739 2739 if precursor is not None:
2740 2740 if opts['rev']:
2741 2741 raise error.InputError(
2742 2742 b'cannot select revision when creating marker'
2743 2743 )
2744 2744 metadata = {}
2745 2745 metadata[b'user'] = encoding.fromlocal(opts['user'] or ui.username())
2746 2746 succs = tuple(parsenodeid(succ) for succ in successors)
2747 2747 l = repo.lock()
2748 2748 try:
2749 2749 tr = repo.transaction(b'debugobsolete')
2750 2750 try:
2751 2751 date = opts.get('date')
2752 2752 if date:
2753 2753 date = dateutil.parsedate(date)
2754 2754 else:
2755 2755 date = None
2756 2756 prec = parsenodeid(precursor)
2757 2757 parents = None
2758 2758 if opts['record_parents']:
2759 2759 if prec not in repo.unfiltered():
2760 2760 raise error.Abort(
2761 2761 b'cannot used --record-parents on '
2762 2762 b'unknown changesets'
2763 2763 )
2764 2764 parents = repo.unfiltered()[prec].parents()
2765 2765 parents = tuple(p.node() for p in parents)
2766 2766 repo.obsstore.create(
2767 2767 tr,
2768 2768 prec,
2769 2769 succs,
2770 2770 opts['flags'],
2771 2771 parents=parents,
2772 2772 date=date,
2773 2773 metadata=metadata,
2774 2774 ui=ui,
2775 2775 )
2776 2776 tr.close()
2777 2777 except ValueError as exc:
2778 2778 raise error.Abort(
2779 2779 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2780 2780 )
2781 2781 finally:
2782 2782 tr.release()
2783 2783 finally:
2784 2784 l.release()
2785 2785 else:
2786 2786 if opts['rev']:
2787 2787 revs = logcmdutil.revrange(repo, opts['rev'])
2788 2788 nodes = [repo[r].node() for r in revs]
2789 2789 markers = list(
2790 2790 obsutil.getmarkers(
2791 2791 repo, nodes=nodes, exclusive=opts['exclusive']
2792 2792 )
2793 2793 )
2794 2794 markers.sort(key=lambda x: x._data)
2795 2795 else:
2796 2796 markers = obsutil.getmarkers(repo)
2797 2797
2798 2798 markerstoiter = markers
2799 2799 isrelevant = lambda m: True
2800 2800 if opts.get('rev') and opts.get('index'):
2801 2801 markerstoiter = obsutil.getmarkers(repo)
2802 2802 markerset = set(markers)
2803 2803 isrelevant = lambda m: m in markerset
2804 2804
2805 2805 fm = ui.formatter(b'debugobsolete', pycompat.byteskwargs(opts))
2806 2806 for i, m in enumerate(markerstoiter):
2807 2807 if not isrelevant(m):
2808 2808 # marker can be irrelevant when we're iterating over a set
2809 2809 # of markers (markerstoiter) which is bigger than the set
2810 2810 # of markers we want to display (markers)
2811 2811 # this can happen if both --index and --rev options are
2812 2812 # provided and thus we need to iterate over all of the markers
2813 2813 # to get the correct indices, but only display the ones that
2814 2814 # are relevant to --rev value
2815 2815 continue
2816 2816 fm.startitem()
2817 2817 ind = i if opts.get('index') else None
2818 2818 cmdutil.showmarker(fm, m, index=ind)
2819 2819 fm.end()
2820 2820
2821 2821
2822 2822 @command(
2823 2823 b'debugp1copies',
2824 2824 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2825 2825 _(b'[-r REV]'),
2826 2826 )
2827 2827 def debugp1copies(ui, repo, **opts):
2828 2828 """dump copy information compared to p1"""
2829 2829
2830 2830 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2831 2831 for dst, src in ctx.p1copies().items():
2832 2832 ui.write(b'%s -> %s\n' % (src, dst))
2833 2833
2834 2834
2835 2835 @command(
2836 2836 b'debugp2copies',
2837 2837 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2838 2838 _(b'[-r REV]'),
2839 2839 )
2840 2840 def debugp2copies(ui, repo, **opts):
2841 2841 """dump copy information compared to p2"""
2842 2842
2843 2843 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
2844 2844 for dst, src in ctx.p2copies().items():
2845 2845 ui.write(b'%s -> %s\n' % (src, dst))
2846 2846
2847 2847
2848 2848 @command(
2849 2849 b'debugpathcomplete',
2850 2850 [
2851 2851 (b'f', b'full', None, _(b'complete an entire path')),
2852 2852 (b'n', b'normal', None, _(b'show only normal files')),
2853 2853 (b'a', b'added', None, _(b'show only added files')),
2854 2854 (b'r', b'removed', None, _(b'show only removed files')),
2855 2855 ],
2856 2856 _(b'FILESPEC...'),
2857 2857 )
2858 2858 def debugpathcomplete(ui, repo, *specs, **opts):
2859 2859 """complete part or all of a tracked path
2860 2860
2861 2861 This command supports shells that offer path name completion. It
2862 2862 currently completes only files already known to the dirstate.
2863 2863
2864 2864 Completion extends only to the next path segment unless
2865 2865 --full is specified, in which case entire paths are used."""
2866 2866
2867 2867 def complete(path, acceptable):
2868 2868 dirstate = repo.dirstate
2869 2869 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2870 2870 rootdir = repo.root + pycompat.ossep
2871 2871 if spec != repo.root and not spec.startswith(rootdir):
2872 2872 return [], []
2873 2873 if os.path.isdir(spec):
2874 2874 spec += b'/'
2875 2875 spec = spec[len(rootdir) :]
2876 2876 fixpaths = pycompat.ossep != b'/'
2877 2877 if fixpaths:
2878 2878 spec = spec.replace(pycompat.ossep, b'/')
2879 2879 speclen = len(spec)
2880 2880 fullpaths = opts['full']
2881 2881 files, dirs = set(), set()
2882 2882 adddir, addfile = dirs.add, files.add
2883 2883 for f, st in dirstate.items():
2884 2884 if f.startswith(spec) and st.state in acceptable:
2885 2885 if fixpaths:
2886 2886 f = f.replace(b'/', pycompat.ossep)
2887 2887 if fullpaths:
2888 2888 addfile(f)
2889 2889 continue
2890 2890 s = f.find(pycompat.ossep, speclen)
2891 2891 if s >= 0:
2892 2892 adddir(f[:s])
2893 2893 else:
2894 2894 addfile(f)
2895 2895 return files, dirs
2896 2896
2897 2897 acceptable = b''
2898 2898 if opts['normal']:
2899 2899 acceptable += b'nm'
2900 2900 if opts['added']:
2901 2901 acceptable += b'a'
2902 2902 if opts['removed']:
2903 2903 acceptable += b'r'
2904 2904 cwd = repo.getcwd()
2905 2905 if not specs:
2906 2906 specs = [b'.']
2907 2907
2908 2908 files, dirs = set(), set()
2909 2909 for spec in specs:
2910 2910 f, d = complete(spec, acceptable or b'nmar')
2911 2911 files.update(f)
2912 2912 dirs.update(d)
2913 2913 files.update(dirs)
2914 2914 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2915 2915 ui.write(b'\n')
2916 2916
2917 2917
2918 2918 @command(
2919 2919 b'debugpathcopies',
2920 2920 cmdutil.walkopts,
2921 2921 b'hg debugpathcopies REV1 REV2 [FILE]',
2922 2922 inferrepo=True,
2923 2923 )
2924 2924 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2925 2925 """show copies between two revisions"""
2926 2926 ctx1 = scmutil.revsingle(repo, rev1)
2927 2927 ctx2 = scmutil.revsingle(repo, rev2)
2928 2928 m = scmutil.match(ctx1, pats, opts)
2929 2929 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2930 2930 ui.write(b'%s -> %s\n' % (src, dst))
2931 2931
2932 2932
2933 2933 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2934 2934 def debugpeer(ui, path):
2935 2935 """establish a connection to a peer repository"""
2936 2936 # Always enable peer request logging. Requires --debug to display
2937 2937 # though.
2938 2938 overrides = {
2939 2939 (b'devel', b'debug.peer-request'): True,
2940 2940 }
2941 2941
2942 2942 with ui.configoverride(overrides):
2943 2943 peer = hg.peer(ui, {}, path)
2944 2944
2945 2945 try:
2946 2946 local = peer.local() is not None
2947 2947 canpush = peer.canpush()
2948 2948
2949 2949 ui.write(_(b'url: %s\n') % peer.url())
2950 2950 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2951 2951 ui.write(
2952 2952 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2953 2953 )
2954 2954 finally:
2955 2955 peer.close()
2956 2956
2957 2957
2958 2958 @command(
2959 2959 b'debugpickmergetool',
2960 2960 [
2961 2961 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2962 2962 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2963 2963 ]
2964 2964 + cmdutil.walkopts
2965 2965 + cmdutil.mergetoolopts,
2966 2966 _(b'[PATTERN]...'),
2967 2967 inferrepo=True,
2968 2968 )
2969 2969 def debugpickmergetool(ui, repo, *pats, **opts):
2970 2970 """examine which merge tool is chosen for specified file
2971 2971
2972 2972 As described in :hg:`help merge-tools`, Mercurial examines
2973 2973 configurations below in this order to decide which merge tool is
2974 2974 chosen for specified file.
2975 2975
2976 2976 1. ``--tool`` option
2977 2977 2. ``HGMERGE`` environment variable
2978 2978 3. configurations in ``merge-patterns`` section
2979 2979 4. configuration of ``ui.merge``
2980 2980 5. configurations in ``merge-tools`` section
2981 2981 6. ``hgmerge`` tool (for historical reason only)
2982 2982 7. default tool for fallback (``:merge`` or ``:prompt``)
2983 2983
2984 2984 This command writes out examination result in the style below::
2985 2985
2986 2986 FILE = MERGETOOL
2987 2987
2988 2988 By default, all files known in the first parent context of the
2989 2989 working directory are examined. Use file patterns and/or -I/-X
2990 2990 options to limit target files. -r/--rev is also useful to examine
2991 2991 files in another context without actual updating to it.
2992 2992
2993 2993 With --debug, this command shows warning messages while matching
2994 2994 against ``merge-patterns`` and so on, too. It is recommended to
2995 2995 use this option with explicit file patterns and/or -I/-X options,
2996 2996 because this option increases amount of output per file according
2997 2997 to configurations in hgrc.
2998 2998
2999 2999 With -v/--verbose, this command shows configurations below at
3000 3000 first (only if specified).
3001 3001
3002 3002 - ``--tool`` option
3003 3003 - ``HGMERGE`` environment variable
3004 3004 - configuration of ``ui.merge``
3005 3005
3006 3006 If merge tool is chosen before matching against
3007 3007 ``merge-patterns``, this command can't show any helpful
3008 3008 information, even with --debug. In such case, information above is
3009 3009 useful to know why a merge tool is chosen.
3010 3010 """
3011 3011 overrides = {}
3012 3012 if opts['tool']:
3013 3013 overrides[(b'ui', b'forcemerge')] = opts['tool']
3014 3014 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
3015 3015
3016 3016 with ui.configoverride(overrides, b'debugmergepatterns'):
3017 3017 hgmerge = encoding.environ.get(b"HGMERGE")
3018 3018 if hgmerge is not None:
3019 3019 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3020 3020 uimerge = ui.config(b"ui", b"merge")
3021 3021 if uimerge:
3022 3022 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3023 3023
3024 3024 ctx = scmutil.revsingle(repo, opts.get('rev'))
3025 3025 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3026 3026 changedelete = opts['changedelete']
3027 3027 for path in ctx.walk(m):
3028 3028 fctx = ctx[path]
3029 3029 with ui.silent(
3030 3030 error=True
3031 3031 ) if not ui.debugflag else util.nullcontextmanager():
3032 3032 tool, toolpath = filemerge._picktool(
3033 3033 repo,
3034 3034 ui,
3035 3035 path,
3036 3036 fctx.isbinary(),
3037 3037 b'l' in fctx.flags(),
3038 3038 changedelete,
3039 3039 )
3040 3040 ui.write(b'%s = %s\n' % (path, tool))
3041 3041
3042 3042
3043 3043 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3044 3044 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3045 3045 """access the pushkey key/value protocol
3046 3046
3047 3047 With two args, list the keys in the given namespace.
3048 3048
3049 3049 With five args, set a key to new if it currently is set to old.
3050 3050 Reports success or failure.
3051 3051 """
3052 3052
3053 3053 target = hg.peer(ui, {}, repopath)
3054 3054 try:
3055 3055 if keyinfo:
3056 3056 key, old, new = keyinfo
3057 3057 with target.commandexecutor() as e:
3058 3058 r = e.callcommand(
3059 3059 b'pushkey',
3060 3060 {
3061 3061 b'namespace': namespace,
3062 3062 b'key': key,
3063 3063 b'old': old,
3064 3064 b'new': new,
3065 3065 },
3066 3066 ).result()
3067 3067
3068 3068 ui.status(pycompat.bytestr(r) + b'\n')
3069 3069 return not r
3070 3070 else:
3071 3071 for k, v in sorted(target.listkeys(namespace).items()):
3072 3072 ui.write(
3073 3073 b"%s\t%s\n"
3074 3074 % (stringutil.escapestr(k), stringutil.escapestr(v))
3075 3075 )
3076 3076 finally:
3077 3077 target.close()
3078 3078
3079 3079
3080 3080 @command(b'debugpvec', [], _(b'A B'))
3081 3081 def debugpvec(ui, repo, a, b=None):
3082 3082 ca = scmutil.revsingle(repo, a)
3083 3083 cb = scmutil.revsingle(repo, b)
3084 3084 pa = pvec.ctxpvec(ca)
3085 3085 pb = pvec.ctxpvec(cb)
3086 3086 if pa == pb:
3087 3087 rel = b"="
3088 3088 elif pa > pb:
3089 3089 rel = b">"
3090 3090 elif pa < pb:
3091 3091 rel = b"<"
3092 3092 elif pa | pb:
3093 3093 rel = b"|"
3094 3094 ui.write(_(b"a: %s\n") % pa)
3095 3095 ui.write(_(b"b: %s\n") % pb)
3096 3096 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3097 3097 ui.write(
3098 3098 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3099 3099 % (
3100 3100 abs(pa._depth - pb._depth),
3101 3101 pvec._hamming(pa._vec, pb._vec),
3102 3102 pa.distance(pb),
3103 3103 rel,
3104 3104 )
3105 3105 )
3106 3106
3107 3107
3108 3108 @command(
3109 3109 b'debugrebuilddirstate|debugrebuildstate',
3110 3110 [
3111 3111 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3112 3112 (
3113 3113 b'',
3114 3114 b'minimal',
3115 3115 None,
3116 3116 _(
3117 3117 b'only rebuild files that are inconsistent with '
3118 3118 b'the working copy parent'
3119 3119 ),
3120 3120 ),
3121 3121 ],
3122 3122 _(b'[-r REV]'),
3123 3123 )
3124 3124 def debugrebuilddirstate(ui, repo, rev, **opts):
3125 3125 """rebuild the dirstate as it would look like for the given revision
3126 3126
3127 3127 If no revision is specified the first current parent will be used.
3128 3128
3129 3129 The dirstate will be set to the files of the given revision.
3130 3130 The actual working directory content or existing dirstate
3131 3131 information such as adds or removes is not considered.
3132 3132
3133 3133 ``minimal`` will only rebuild the dirstate status for files that claim to be
3134 3134 tracked but are not in the parent manifest, or that exist in the parent
3135 3135 manifest but are not in the dirstate. It will not change adds, removes, or
3136 3136 modified files that are in the working copy parent.
3137 3137
3138 3138 One use of this command is to make the next :hg:`status` invocation
3139 3139 check the actual file content.
3140 3140 """
3141 3141 ctx = scmutil.revsingle(repo, rev)
3142 3142 with repo.wlock():
3143 3143 if repo.currenttransaction() is not None:
3144 3144 msg = b'rebuild the dirstate outside of a transaction'
3145 3145 raise error.ProgrammingError(msg)
3146 3146 dirstate = repo.dirstate
3147 3147 changedfiles = None
3148 3148 # See command doc for what minimal does.
3149 3149 if opts.get('minimal'):
3150 3150 manifestfiles = set(ctx.manifest().keys())
3151 3151 dirstatefiles = set(dirstate)
3152 3152 manifestonly = manifestfiles - dirstatefiles
3153 3153 dsonly = dirstatefiles - manifestfiles
3154 3154 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3155 3155 changedfiles = manifestonly | dsnotadded
3156 3156
3157 3157 with dirstate.changing_parents(repo):
3158 3158 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3159 3159
3160 3160
3161 3161 @command(
3162 3162 b'debugrebuildfncache',
3163 3163 [
3164 3164 (
3165 3165 b'',
3166 3166 b'only-data',
3167 3167 False,
3168 3168 _(b'only look for wrong .d files (much faster)'),
3169 3169 )
3170 3170 ],
3171 3171 b'',
3172 3172 )
3173 3173 def debugrebuildfncache(ui, repo, **opts):
3174 3174 """rebuild the fncache file"""
3175 3175 repair.rebuildfncache(ui, repo, opts.get("only_data"))
3176 3176
3177 3177
3178 3178 @command(
3179 3179 b'debugrename',
3180 3180 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3181 3181 _(b'[-r REV] [FILE]...'),
3182 3182 )
3183 3183 def debugrename(ui, repo, *pats, **opts):
3184 3184 """dump rename information"""
3185 3185
3186 3186 ctx = scmutil.revsingle(repo, opts.get('rev'))
3187 3187 m = scmutil.match(ctx, pats, pycompat.byteskwargs(opts))
3188 3188 for abs in ctx.walk(m):
3189 3189 fctx = ctx[abs]
3190 3190 o = fctx.filelog().renamed(fctx.filenode())
3191 3191 rel = repo.pathto(abs)
3192 3192 if o:
3193 3193 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3194 3194 else:
3195 3195 ui.write(_(b"%s not renamed\n") % rel)
3196 3196
3197 3197
3198 3198 @command(b'debugrequires|debugrequirements', [], b'')
3199 3199 def debugrequirements(ui, repo):
3200 3200 """print the current repo requirements"""
3201 3201 for r in sorted(repo.requirements):
3202 3202 ui.write(b"%s\n" % r)
3203 3203
3204 3204
3205 3205 @command(
3206 3206 b'debugrevlog',
3207 3207 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3208 3208 _(b'-c|-m|FILE'),
3209 3209 optionalrepo=True,
3210 3210 )
3211 3211 def debugrevlog(ui, repo, file_=None, **opts):
3212 3212 """show data and statistics about a revlog"""
3213 3213 r = cmdutil.openrevlog(
3214 3214 repo, b'debugrevlog', file_, pycompat.byteskwargs(opts)
3215 3215 )
3216 3216
3217 3217 if opts.get("dump"):
3218 3218 revlog_debug.dump(ui, r)
3219 3219 else:
3220 3220 revlog_debug.debug_revlog(ui, r)
3221 3221 return 0
3222 3222
3223 3223
3224 3224 @command(
3225 3225 b'debugrevlogindex',
3226 3226 cmdutil.debugrevlogopts
3227 3227 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3228 3228 _(b'[-f FORMAT] -c|-m|FILE'),
3229 3229 optionalrepo=True,
3230 3230 )
3231 3231 def debugrevlogindex(ui, repo, file_=None, **opts):
3232 3232 """dump the contents of a revlog index"""
3233 3233 r = cmdutil.openrevlog(
3234 3234 repo, b'debugrevlogindex', file_, pycompat.byteskwargs(opts)
3235 3235 )
3236 3236 format = opts.get('format', 0)
3237 3237 if format not in (0, 1):
3238 3238 raise error.Abort(_(b"unknown format %d") % format)
3239 3239
3240 3240 if ui.debugflag:
3241 3241 shortfn = hex
3242 3242 else:
3243 3243 shortfn = short
3244 3244
3245 3245 # There might not be anything in r, so have a sane default
3246 3246 idlen = 12
3247 3247 for i in r:
3248 3248 idlen = len(shortfn(r.node(i)))
3249 3249 break
3250 3250
3251 3251 if format == 0:
3252 3252 if ui.verbose:
3253 3253 ui.writenoi18n(
3254 3254 b" rev offset length linkrev %s %s p2\n"
3255 3255 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3256 3256 )
3257 3257 else:
3258 3258 ui.writenoi18n(
3259 3259 b" rev linkrev %s %s p2\n"
3260 3260 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3261 3261 )
3262 3262 elif format == 1:
3263 3263 if ui.verbose:
3264 3264 ui.writenoi18n(
3265 3265 (
3266 3266 b" rev flag offset length size link p1"
3267 3267 b" p2 %s\n"
3268 3268 )
3269 3269 % b"nodeid".rjust(idlen)
3270 3270 )
3271 3271 else:
3272 3272 ui.writenoi18n(
3273 3273 b" rev flag size link p1 p2 %s\n"
3274 3274 % b"nodeid".rjust(idlen)
3275 3275 )
3276 3276
3277 3277 for i in r:
3278 3278 node = r.node(i)
3279 3279 if format == 0:
3280 3280 try:
3281 3281 pp = r.parents(node)
3282 3282 except Exception:
3283 3283 pp = [repo.nullid, repo.nullid]
3284 3284 if ui.verbose:
3285 3285 ui.write(
3286 3286 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3287 3287 % (
3288 3288 i,
3289 3289 r.start(i),
3290 3290 r.length(i),
3291 3291 r.linkrev(i),
3292 3292 shortfn(node),
3293 3293 shortfn(pp[0]),
3294 3294 shortfn(pp[1]),
3295 3295 )
3296 3296 )
3297 3297 else:
3298 3298 ui.write(
3299 3299 b"% 6d % 7d %s %s %s\n"
3300 3300 % (
3301 3301 i,
3302 3302 r.linkrev(i),
3303 3303 shortfn(node),
3304 3304 shortfn(pp[0]),
3305 3305 shortfn(pp[1]),
3306 3306 )
3307 3307 )
3308 3308 elif format == 1:
3309 3309 pr = r.parentrevs(i)
3310 3310 if ui.verbose:
3311 3311 ui.write(
3312 3312 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3313 3313 % (
3314 3314 i,
3315 3315 r.flags(i),
3316 3316 r.start(i),
3317 3317 r.length(i),
3318 3318 r.rawsize(i),
3319 3319 r.linkrev(i),
3320 3320 pr[0],
3321 3321 pr[1],
3322 3322 shortfn(node),
3323 3323 )
3324 3324 )
3325 3325 else:
3326 3326 ui.write(
3327 3327 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3328 3328 % (
3329 3329 i,
3330 3330 r.flags(i),
3331 3331 r.rawsize(i),
3332 3332 r.linkrev(i),
3333 3333 pr[0],
3334 3334 pr[1],
3335 3335 shortfn(node),
3336 3336 )
3337 3337 )
3338 3338
3339 3339
3340 3340 @command(
3341 3341 b'debugrevspec',
3342 3342 [
3343 3343 (
3344 3344 b'',
3345 3345 b'optimize',
3346 3346 None,
3347 3347 _(b'print parsed tree after optimizing (DEPRECATED)'),
3348 3348 ),
3349 3349 (
3350 3350 b'',
3351 3351 b'show-revs',
3352 3352 True,
3353 3353 _(b'print list of result revisions (default)'),
3354 3354 ),
3355 3355 (
3356 3356 b's',
3357 3357 b'show-set',
3358 3358 None,
3359 3359 _(b'print internal representation of result set'),
3360 3360 ),
3361 3361 (
3362 3362 b'p',
3363 3363 b'show-stage',
3364 3364 [],
3365 3365 _(b'print parsed tree at the given stage'),
3366 3366 _(b'NAME'),
3367 3367 ),
3368 3368 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3369 3369 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3370 3370 ],
3371 3371 b'REVSPEC',
3372 3372 )
3373 3373 def debugrevspec(ui, repo, expr, **opts):
3374 3374 """parse and apply a revision specification
3375 3375
3376 3376 Use -p/--show-stage option to print the parsed tree at the given stages.
3377 3377 Use -p all to print tree at every stage.
3378 3378
3379 3379 Use --no-show-revs option with -s or -p to print only the set
3380 3380 representation or the parsed tree respectively.
3381 3381
3382 3382 Use --verify-optimized to compare the optimized result with the unoptimized
3383 3383 one. Returns 1 if the optimized result differs.
3384 3384 """
3385 3385 aliases = ui.configitems(b'revsetalias')
3386 3386 stages = [
3387 3387 (b'parsed', lambda tree: tree),
3388 3388 (
3389 3389 b'expanded',
3390 3390 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3391 3391 ),
3392 3392 (b'concatenated', revsetlang.foldconcat),
3393 3393 (b'analyzed', revsetlang.analyze),
3394 3394 (b'optimized', revsetlang.optimize),
3395 3395 ]
3396 3396 if opts['no_optimized']:
3397 3397 stages = stages[:-1]
3398 3398 if opts['verify_optimized'] and opts['no_optimized']:
3399 3399 raise error.Abort(
3400 3400 _(b'cannot use --verify-optimized with --no-optimized')
3401 3401 )
3402 3402 stagenames = {n for n, f in stages}
3403 3403
3404 3404 showalways = set()
3405 3405 showchanged = set()
3406 3406 if ui.verbose and not opts['show_stage']:
3407 3407 # show parsed tree by --verbose (deprecated)
3408 3408 showalways.add(b'parsed')
3409 3409 showchanged.update([b'expanded', b'concatenated'])
3410 3410 if opts['optimize']:
3411 3411 showalways.add(b'optimized')
3412 3412 if opts['show_stage'] and opts['optimize']:
3413 3413 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3414 3414 if opts['show_stage'] == [b'all']:
3415 3415 showalways.update(stagenames)
3416 3416 else:
3417 3417 for n in opts['show_stage']:
3418 3418 if n not in stagenames:
3419 3419 raise error.Abort(_(b'invalid stage name: %s') % n)
3420 3420 showalways.update(opts['show_stage'])
3421 3421
3422 3422 treebystage = {}
3423 3423 printedtree = None
3424 3424 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3425 3425 for n, f in stages:
3426 3426 treebystage[n] = tree = f(tree)
3427 3427 if n in showalways or (n in showchanged and tree != printedtree):
3428 3428 if opts['show_stage'] or n != b'parsed':
3429 3429 ui.write(b"* %s:\n" % n)
3430 3430 ui.write(revsetlang.prettyformat(tree), b"\n")
3431 3431 printedtree = tree
3432 3432
3433 3433 if opts['verify_optimized']:
3434 3434 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3435 3435 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3436 3436 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3437 3437 ui.writenoi18n(
3438 3438 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3439 3439 )
3440 3440 ui.writenoi18n(
3441 3441 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3442 3442 )
3443 3443 arevs = list(arevs)
3444 3444 brevs = list(brevs)
3445 3445 if arevs == brevs:
3446 3446 return 0
3447 3447 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3448 3448 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3449 3449 sm = difflib.SequenceMatcher(None, arevs, brevs)
3450 3450 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3451 3451 if tag in ('delete', 'replace'):
3452 3452 for c in arevs[alo:ahi]:
3453 3453 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3454 3454 if tag in ('insert', 'replace'):
3455 3455 for c in brevs[blo:bhi]:
3456 3456 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3457 3457 if tag == 'equal':
3458 3458 for c in arevs[alo:ahi]:
3459 3459 ui.write(b' %d\n' % c)
3460 3460 return 1
3461 3461
3462 3462 func = revset.makematcher(tree)
3463 3463 revs = func(repo)
3464 3464 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
3465 3465 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3466 3466 if not opts['show_revs']:
3467 3467 return
3468 3468 for c in revs:
3469 3469 ui.write(b"%d\n" % c)
3470 3470
3471 3471
3472 3472 @command(
3473 3473 b'debugserve',
3474 3474 [
3475 3475 (
3476 3476 b'',
3477 3477 b'sshstdio',
3478 3478 False,
3479 3479 _(b'run an SSH server bound to process handles'),
3480 3480 ),
3481 3481 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3482 3482 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3483 3483 ],
3484 3484 b'',
3485 3485 )
3486 3486 def debugserve(ui, repo, **opts):
3487 3487 """run a server with advanced settings
3488 3488
3489 3489 This command is similar to :hg:`serve`. It exists partially as a
3490 3490 workaround to the fact that ``hg serve --stdio`` must have specific
3491 3491 arguments for security reasons.
3492 3492 """
3493 3493 if not opts['sshstdio']:
3494 3494 raise error.Abort(_(b'only --sshstdio is currently supported'))
3495 3495
3496 3496 logfh = None
3497 3497
3498 3498 if opts['logiofd'] and opts['logiofile']:
3499 3499 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3500 3500
3501 3501 if opts['logiofd']:
3502 3502 # Ideally we would be line buffered. But line buffering in binary
3503 3503 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3504 3504 # buffering could have performance impacts. But since this isn't
3505 3505 # performance critical code, it should be fine.
3506 3506 try:
3507 3507 logfh = os.fdopen(int(opts['logiofd']), 'ab', 0)
3508 3508 except OSError as e:
3509 3509 if e.errno != errno.ESPIPE:
3510 3510 raise
3511 3511 # can't seek a pipe, so `ab` mode fails on py3
3512 3512 logfh = os.fdopen(int(opts['logiofd']), 'wb', 0)
3513 3513 elif opts['logiofile']:
3514 3514 logfh = open(opts['logiofile'], b'ab', 0)
3515 3515
3516 3516 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3517 3517 s.serve_forever()
3518 3518
3519 3519
3520 3520 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3521 3521 def debugsetparents(ui, repo, rev1, rev2=None):
3522 3522 """manually set the parents of the current working directory (DANGEROUS)
3523 3523
3524 3524 This command is not what you are looking for and should not be used. Using
3525 3525 this command will most certainly results in slight corruption of the file
3526 3526 level histories withing your repository. DO NOT USE THIS COMMAND.
3527 3527
3528 3528 The command update the p1 and p2 field in the dirstate, and not touching
3529 3529 anything else. This useful for writing repository conversion tools, but
3530 3530 should be used with extreme care. For example, neither the working
3531 3531 directory nor the dirstate is updated, so file status may be incorrect
3532 3532 after running this command. Only used if you are one of the few people that
3533 3533 deeply unstand both conversion tools and file level histories. If you are
3534 3534 reading this help, you are not one of this people (most of them sailed west
3535 3535 from Mithlond anyway.
3536 3536
3537 3537 So one last time DO NOT USE THIS COMMAND.
3538 3538
3539 3539 Returns 0 on success.
3540 3540 """
3541 3541
3542 3542 node1 = scmutil.revsingle(repo, rev1).node()
3543 3543 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3544 3544
3545 3545 with repo.wlock():
3546 3546 repo.setparents(node1, node2)
3547 3547
3548 3548
3549 3549 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3550 3550 def debugsidedata(ui, repo, file_, rev=None, **opts):
3551 3551 """dump the side data for a cl/manifest/file revision
3552 3552
3553 3553 Use --verbose to dump the sidedata content."""
3554 3554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
3555 3555 if rev is not None:
3556 3556 raise error.InputError(
3557 3557 _(b'cannot specify a revision with other arguments')
3558 3558 )
3559 3559 file_, rev = None, file_
3560 3560 elif rev is None:
3561 3561 raise error.InputError(_(b'please specify a revision'))
3562 3562 r = cmdutil.openstorage(
3563 3563 repo, b'debugdata', file_, pycompat.byteskwargs(opts)
3564 3564 )
3565 3565 r = getattr(r, '_revlog', r)
3566 3566 try:
3567 3567 sidedata = r.sidedata(r.lookup(rev))
3568 3568 except KeyError:
3569 3569 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3570 3570 if sidedata:
3571 3571 sidedata = list(sidedata.items())
3572 3572 sidedata.sort()
3573 3573 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3574 3574 for key, value in sidedata:
3575 3575 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3576 3576 if ui.verbose:
3577 3577 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3578 3578
3579 3579
3580 3580 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3581 3581 def debugssl(ui, repo, source=None, **opts):
3582 3582 """test a secure connection to a server
3583 3583
3584 3584 This builds the certificate chain for the server on Windows, installing the
3585 3585 missing intermediates and trusted root via Windows Update if necessary. It
3586 3586 does nothing on other platforms.
3587 3587
3588 3588 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3589 3589 that server is used. See :hg:`help urls` for more information.
3590 3590
3591 3591 If the update succeeds, retry the original operation. Otherwise, the cause
3592 3592 of the SSL error is likely another issue.
3593 3593 """
3594 3594 if not pycompat.iswindows:
3595 3595 raise error.Abort(
3596 3596 _(b'certificate chain building is only possible on Windows')
3597 3597 )
3598 3598
3599 3599 if not source:
3600 3600 if not repo:
3601 3601 raise error.Abort(
3602 3602 _(
3603 3603 b"there is no Mercurial repository here, and no "
3604 3604 b"server specified"
3605 3605 )
3606 3606 )
3607 3607 source = b"default"
3608 3608
3609 3609 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3610 3610 url = path.url
3611 3611
3612 3612 defaultport = {b'https': 443, b'ssh': 22}
3613 3613 if url.scheme in defaultport:
3614 3614 try:
3615 3615 addr = (url.host, int(url.port or defaultport[url.scheme]))
3616 3616 except ValueError:
3617 3617 raise error.Abort(_(b"malformed port number in URL"))
3618 3618 else:
3619 3619 raise error.Abort(_(b"only https and ssh connections are supported"))
3620 3620
3621 3621 from . import win32
3622 3622
3623 3623 s = ssl.wrap_socket(
3624 3624 socket.socket(),
3625 3625 ssl_version=ssl.PROTOCOL_TLS,
3626 3626 cert_reqs=ssl.CERT_NONE,
3627 3627 ca_certs=None,
3628 3628 )
3629 3629
3630 3630 try:
3631 3631 s.connect(addr)
3632 3632 cert = s.getpeercert(True)
3633 3633
3634 3634 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3635 3635
3636 3636 complete = win32.checkcertificatechain(cert, build=False)
3637 3637
3638 3638 if not complete:
3639 3639 ui.status(_(b'certificate chain is incomplete, updating... '))
3640 3640
3641 3641 if not win32.checkcertificatechain(cert):
3642 3642 ui.status(_(b'failed.\n'))
3643 3643 else:
3644 3644 ui.status(_(b'done.\n'))
3645 3645 else:
3646 3646 ui.status(_(b'full certificate chain is available\n'))
3647 3647 finally:
3648 3648 s.close()
3649 3649
3650 3650
3651 3651 @command(
3652 3652 b'debug::stable-tail-sort',
3653 3653 [
3654 3654 (
3655 3655 b'T',
3656 3656 b'template',
3657 3657 b'{rev}\n',
3658 3658 _(b'display with template'),
3659 3659 _(b'TEMPLATE'),
3660 3660 ),
3661 3661 ],
3662 3662 b'REV',
3663 3663 )
3664 3664 def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
3665 3665 """display the stable-tail sort of the ancestors of a given node"""
3666 3666 rev = logcmdutil.revsingle(repo, revspec).rev()
3667 3667 cl = repo.changelog
3668 3668
3669 3669 displayer = logcmdutil.maketemplater(ui, repo, template)
3670 3670 sorted_revs = stabletailsort._stable_tail_sort_naive(cl, rev)
3671 3671 for ancestor_rev in sorted_revs:
3672 3672 displayer.show(repo[ancestor_rev])
3673 3673
3674 3674
3675 3675 @command(
3676 3676 b'debug::stable-tail-sort-leaps',
3677 3677 [
3678 3678 (
3679 3679 b'T',
3680 3680 b'template',
3681 3681 b'{rev}',
3682 3682 _(b'display with template'),
3683 3683 _(b'TEMPLATE'),
3684 3684 ),
3685 3685 (b's', b'specific', False, _(b'restrict to specific leaps')),
3686 3686 ],
3687 3687 b'REV',
3688 3688 )
3689 3689 def debug_stable_tail_sort_leaps(ui, repo, rspec, template, specific, **opts):
3690 3690 """display the leaps in the stable-tail sort of a node, one per line"""
3691 3691 rev = logcmdutil.revsingle(repo, rspec).rev()
3692 3692
3693 3693 if specific:
3694 3694 get_leaps = stabletailsort._find_specific_leaps_naive
3695 3695 else:
3696 3696 get_leaps = stabletailsort._find_all_leaps_naive
3697 3697
3698 3698 displayer = logcmdutil.maketemplater(ui, repo, template)
3699 3699 for source, target in get_leaps(repo.changelog, rev):
3700 3700 displayer.show(repo[source])
3701 3701 displayer.show(repo[target])
3702 3702 ui.write(b'\n')
3703 3703
3704 3704
3705 3705 @command(
3706 3706 b"debugbackupbundle",
3707 3707 [
3708 3708 (
3709 3709 b"",
3710 3710 b"recover",
3711 3711 b"",
3712 3712 b"brings the specified changeset back into the repository",
3713 3713 )
3714 3714 ]
3715 3715 + cmdutil.logopts,
3716 3716 _(b"hg debugbackupbundle [--recover HASH]"),
3717 3717 )
3718 3718 def debugbackupbundle(ui, repo, *pats, **opts):
3719 3719 """lists the changesets available in backup bundles
3720 3720
3721 3721 Without any arguments, this command prints a list of the changesets in each
3722 3722 backup bundle.
3723 3723
3724 3724 --recover takes a changeset hash and unbundles the first bundle that
3725 3725 contains that hash, which puts that changeset back in your repository.
3726 3726
3727 3727 --verbose will print the entire commit message and the bundle path for that
3728 3728 backup.
3729 3729 """
3730 3730 backups = list(
3731 3731 filter(
3732 3732 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3733 3733 )
3734 3734 )
3735 3735 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3736 3736
3737 opts = pycompat.byteskwargs(opts)
3738 opts[b"bundle"] = b""
3739 opts[b"force"] = None
3740 limit = logcmdutil.getlimit(opts)
3737 opts["bundle"] = b""
3738 opts["force"] = None
3739 limit = logcmdutil.getlimit(pycompat.byteskwargs(opts))
3741 3740
3742 3741 def display(other, chlist, displayer):
3743 if opts.get(b"newest_first"):
3742 if opts.get("newest_first"):
3744 3743 chlist.reverse()
3745 3744 count = 0
3746 3745 for n in chlist:
3747 3746 if limit is not None and count >= limit:
3748 3747 break
3749 3748 parents = [
3750 3749 True for p in other.changelog.parents(n) if p != repo.nullid
3751 3750 ]
3752 if opts.get(b"no_merges") and len(parents) == 2:
3751 if opts.get("no_merges") and len(parents) == 2:
3753 3752 continue
3754 3753 count += 1
3755 3754 displayer.show(other[n])
3756 3755
3757 recovernode = opts.get(b"recover")
3756 recovernode = opts.get("recover")
3758 3757 if recovernode:
3759 3758 if scmutil.isrevsymbol(repo, recovernode):
3760 3759 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3761 3760 return
3762 3761 elif backups:
3763 3762 msg = _(
3764 3763 b"Recover changesets using: hg debugbackupbundle --recover "
3765 3764 b"<changeset hash>\n\nAvailable backup changesets:"
3766 3765 )
3767 3766 ui.status(msg, label=b"status.removed")
3768 3767 else:
3769 3768 ui.status(_(b"no backup changesets found\n"))
3770 3769 return
3771 3770
3772 3771 for backup in backups:
3773 3772 # Much of this is copied from the hg incoming logic
3774 3773 source = os.path.relpath(backup, encoding.getcwd())
3775 3774 path = urlutil.get_unique_pull_path_obj(
3776 3775 b'debugbackupbundle',
3777 3776 ui,
3778 3777 source,
3779 3778 )
3780 3779 try:
3781 other = hg.peer(repo, opts, path)
3780 other = hg.peer(repo, pycompat.byteskwargs(opts), path)
3782 3781 except error.LookupError as ex:
3783 3782 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3784 3783 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3785 3784 ui.warn(msg, hint=hint)
3786 3785 continue
3787 branches = (path.branch, opts.get(b'branch', []))
3786 branches = (path.branch, opts.get('branch', []))
3788 3787 revs, checkout = hg.addbranchrevs(
3789 repo, other, branches, opts.get(b"rev")
3788 repo, other, branches, opts.get("rev")
3790 3789 )
3791 3790
3792 3791 if revs:
3793 3792 revs = [other.lookup(rev) for rev in revs]
3794 3793
3795 3794 with ui.silent():
3796 3795 try:
3797 3796 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3798 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3797 ui, repo, other, revs, opts["bundle"], opts["force"]
3799 3798 )
3800 3799 except error.LookupError:
3801 3800 continue
3802 3801
3803 3802 try:
3804 3803 if not chlist:
3805 3804 continue
3806 3805 if recovernode:
3807 3806 with repo.lock(), repo.transaction(b"unbundle") as tr:
3808 3807 if scmutil.isrevsymbol(other, recovernode):
3809 3808 ui.status(_(b"Unbundling %s\n") % (recovernode))
3810 3809 f = hg.openpath(ui, path.loc)
3811 3810 gen = exchange.readbundle(ui, f, path.loc)
3812 3811 if isinstance(gen, bundle2.unbundle20):
3813 3812 bundle2.applybundle(
3814 3813 repo,
3815 3814 gen,
3816 3815 tr,
3817 3816 source=b"unbundle",
3818 3817 url=b"bundle:" + path.loc,
3819 3818 )
3820 3819 else:
3821 3820 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3822 3821 break
3823 3822 else:
3824 3823 backupdate = encoding.strtolocal(
3825 3824 time.strftime(
3826 3825 "%a %H:%M, %Y-%m-%d",
3827 3826 time.localtime(os.path.getmtime(path.loc)),
3828 3827 )
3829 3828 )
3830 3829 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3831 3830 if ui.verbose:
3832 3831 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3833 3832 else:
3834 3833 opts[
3835 b"template"
3834 "template"
3836 3835 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3837 3836 displayer = logcmdutil.changesetdisplayer(
3838 ui, other, opts, False
3837 ui, other, pycompat.byteskwargs(opts), False
3839 3838 )
3840 3839 display(other, chlist, displayer)
3841 3840 displayer.close()
3842 3841 finally:
3843 3842 cleanupfn()
3844 3843
3845 3844
3846 3845 @command(
3847 3846 b'debugsub',
3848 3847 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3849 3848 _(b'[-r REV] [REV]'),
3850 3849 )
3851 3850 def debugsub(ui, repo, rev=None):
3852 3851 ctx = scmutil.revsingle(repo, rev, None)
3853 3852 for k, v in sorted(ctx.substate.items()):
3854 3853 ui.writenoi18n(b'path %s\n' % k)
3855 3854 ui.writenoi18n(b' source %s\n' % v[0])
3856 3855 ui.writenoi18n(b' revision %s\n' % v[1])
3857 3856
3858 3857
3859 3858 @command(
3860 3859 b'debugshell',
3861 3860 [
3862 3861 (
3863 3862 b'c',
3864 3863 b'command',
3865 3864 b'',
3866 3865 _(b'program passed in as a string'),
3867 3866 _(b'COMMAND'),
3868 3867 )
3869 3868 ],
3870 3869 _(b'[-c COMMAND]'),
3871 3870 optionalrepo=True,
3872 3871 )
3873 3872 def debugshell(ui, repo, **opts):
3874 3873 """run an interactive Python interpreter
3875 3874
3876 3875 The local namespace is provided with a reference to the ui and
3877 3876 the repo instance (if available).
3878 3877 """
3879 3878 import code
3880 3879
3881 3880 imported_objects = {
3882 3881 'ui': ui,
3883 3882 'repo': repo,
3884 3883 }
3885 3884
3886 3885 # py2exe disables initialization of the site module, which is responsible
3887 3886 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3888 3887 # the stuff that site normally does here, so that the interpreter can be
3889 3888 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3890 3889 # py.exe, or py2exe.
3891 3890 if getattr(sys, "frozen", None) == 'console_exe':
3892 3891 try:
3893 3892 import site
3894 3893
3895 3894 site.setcopyright()
3896 3895 site.sethelper()
3897 3896 site.setquit()
3898 3897 except ImportError:
3899 3898 site = None # Keep PyCharm happy
3900 3899
3901 3900 command = opts.get('command')
3902 3901 if command:
3903 3902 compiled = code.compile_command(encoding.strfromlocal(command))
3904 3903 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3905 3904 return
3906 3905
3907 3906 code.interact(local=imported_objects)
3908 3907
3909 3908
3910 3909 @command(
3911 3910 b'debug-revlog-stats',
3912 3911 [
3913 3912 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3914 3913 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3915 3914 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3916 3915 ]
3917 3916 + cmdutil.formatteropts,
3918 3917 )
3919 3918 def debug_revlog_stats(ui, repo, **opts):
3920 3919 """display statistics about revlogs in the store"""
3921 3920 opts = pycompat.byteskwargs(opts)
3922 3921 changelog = opts[b"changelog"]
3923 3922 manifest = opts[b"manifest"]
3924 3923 filelogs = opts[b"filelogs"]
3925 3924
3926 3925 if changelog is None and manifest is None and filelogs is None:
3927 3926 changelog = True
3928 3927 manifest = True
3929 3928 filelogs = True
3930 3929
3931 3930 repo = repo.unfiltered()
3932 3931 fm = ui.formatter(b'debug-revlog-stats', opts)
3933 3932 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3934 3933 fm.end()
3935 3934
3936 3935
3937 3936 @command(
3938 3937 b'debugsuccessorssets',
3939 3938 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3940 3939 _(b'[REV]'),
3941 3940 )
3942 3941 def debugsuccessorssets(ui, repo, *revs, **opts):
3943 3942 """show set of successors for revision
3944 3943
3945 3944 A successors set of changeset A is a consistent group of revisions that
3946 3945 succeed A. It contains non-obsolete changesets only unless closests
3947 3946 successors set is set.
3948 3947
3949 3948 In most cases a changeset A has a single successors set containing a single
3950 3949 successor (changeset A replaced by A').
3951 3950
3952 3951 A changeset that is made obsolete with no successors are called "pruned".
3953 3952 Such changesets have no successors sets at all.
3954 3953
3955 3954 A changeset that has been "split" will have a successors set containing
3956 3955 more than one successor.
3957 3956
3958 3957 A changeset that has been rewritten in multiple different ways is called
3959 3958 "divergent". Such changesets have multiple successor sets (each of which
3960 3959 may also be split, i.e. have multiple successors).
3961 3960
3962 3961 Results are displayed as follows::
3963 3962
3964 3963 <rev1>
3965 3964 <successors-1A>
3966 3965 <rev2>
3967 3966 <successors-2A>
3968 3967 <successors-2B1> <successors-2B2> <successors-2B3>
3969 3968
3970 3969 Here rev2 has two possible (i.e. divergent) successors sets. The first
3971 3970 holds one element, whereas the second holds three (i.e. the changeset has
3972 3971 been split).
3973 3972 """
3974 3973 # passed to successorssets caching computation from one call to another
3975 3974 cache = {}
3976 3975 ctx2str = bytes
3977 3976 node2str = short
3978 3977 for rev in logcmdutil.revrange(repo, revs):
3979 3978 ctx = repo[rev]
3980 3979 ui.write(b'%s\n' % ctx2str(ctx))
3981 3980 for succsset in obsutil.successorssets(
3982 3981 repo, ctx.node(), closest=opts['closest'], cache=cache
3983 3982 ):
3984 3983 if succsset:
3985 3984 ui.write(b' ')
3986 3985 ui.write(node2str(succsset[0]))
3987 3986 for node in succsset[1:]:
3988 3987 ui.write(b' ')
3989 3988 ui.write(node2str(node))
3990 3989 ui.write(b'\n')
3991 3990
3992 3991
3993 3992 @command(b'debugtagscache', [])
3994 3993 def debugtagscache(ui, repo):
3995 3994 """display the contents of .hg/cache/hgtagsfnodes1"""
3996 3995 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3997 3996 flog = repo.file(b'.hgtags')
3998 3997 for r in repo:
3999 3998 node = repo[r].node()
4000 3999 tagsnode = cache.getfnode(node, computemissing=False)
4001 4000 if tagsnode:
4002 4001 tagsnodedisplay = hex(tagsnode)
4003 4002 if not flog.hasnode(tagsnode):
4004 4003 tagsnodedisplay += b' (unknown node)'
4005 4004 elif tagsnode is None:
4006 4005 tagsnodedisplay = b'missing'
4007 4006 else:
4008 4007 tagsnodedisplay = b'invalid'
4009 4008
4010 4009 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
4011 4010
4012 4011
4013 4012 @command(
4014 4013 b'debugtemplate',
4015 4014 [
4016 4015 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
4017 4016 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
4018 4017 ],
4019 4018 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
4020 4019 optionalrepo=True,
4021 4020 )
4022 4021 def debugtemplate(ui, repo, tmpl, **opts):
4023 4022 """parse and apply a template
4024 4023
4025 4024 If -r/--rev is given, the template is processed as a log template and
4026 4025 applied to the given changesets. Otherwise, it is processed as a generic
4027 4026 template.
4028 4027
4029 4028 Use --verbose to print the parsed tree.
4030 4029 """
4031 4030 revs = None
4032 4031 if opts['rev']:
4033 4032 if repo is None:
4034 4033 raise error.RepoError(
4035 4034 _(b'there is no Mercurial repository here (.hg not found)')
4036 4035 )
4037 4036 revs = logcmdutil.revrange(repo, opts['rev'])
4038 4037
4039 4038 props = {}
4040 4039 for d in opts['define']:
4041 4040 try:
4042 4041 k, v = (e.strip() for e in d.split(b'=', 1))
4043 4042 if not k or k == b'ui':
4044 4043 raise ValueError
4045 4044 props[k] = v
4046 4045 except ValueError:
4047 4046 raise error.Abort(_(b'malformed keyword definition: %s') % d)
4048 4047
4049 4048 if ui.verbose:
4050 4049 aliases = ui.configitems(b'templatealias')
4051 4050 tree = templater.parse(tmpl)
4052 4051 ui.note(templater.prettyformat(tree), b'\n')
4053 4052 newtree = templater.expandaliases(tree, aliases)
4054 4053 if newtree != tree:
4055 4054 ui.notenoi18n(
4056 4055 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
4057 4056 )
4058 4057
4059 4058 if revs is None:
4060 4059 tres = formatter.templateresources(ui, repo)
4061 4060 t = formatter.maketemplater(ui, tmpl, resources=tres)
4062 4061 if ui.verbose:
4063 4062 kwds, funcs = t.symbolsuseddefault()
4064 4063 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4065 4064 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4066 4065 ui.write(t.renderdefault(props))
4067 4066 else:
4068 4067 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4069 4068 if ui.verbose:
4070 4069 kwds, funcs = displayer.t.symbolsuseddefault()
4071 4070 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4072 4071 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4073 4072 for r in revs:
4074 4073 displayer.show(repo[r], **pycompat.strkwargs(props))
4075 4074 displayer.close()
4076 4075
4077 4076
4078 4077 @command(
4079 4078 b'debuguigetpass',
4080 4079 [
4081 4080 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4082 4081 ],
4083 4082 _(b'[-p TEXT]'),
4084 4083 norepo=True,
4085 4084 )
4086 4085 def debuguigetpass(ui, prompt=b''):
4087 4086 """show prompt to type password"""
4088 4087 r = ui.getpass(prompt)
4089 4088 if r is None:
4090 4089 r = b"<default response>"
4091 4090 ui.writenoi18n(b'response: %s\n' % r)
4092 4091
4093 4092
4094 4093 @command(
4095 4094 b'debuguiprompt',
4096 4095 [
4097 4096 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4098 4097 ],
4099 4098 _(b'[-p TEXT]'),
4100 4099 norepo=True,
4101 4100 )
4102 4101 def debuguiprompt(ui, prompt=b''):
4103 4102 """show plain prompt"""
4104 4103 r = ui.prompt(prompt)
4105 4104 ui.writenoi18n(b'response: %s\n' % r)
4106 4105
4107 4106
4108 4107 @command(b'debugupdatecaches', [])
4109 4108 def debugupdatecaches(ui, repo, *pats, **opts):
4110 4109 """warm all known caches in the repository"""
4111 4110 with repo.wlock(), repo.lock():
4112 4111 repo.updatecaches(caches=repository.CACHES_ALL)
4113 4112
4114 4113
4115 4114 @command(
4116 4115 b'debugupgraderepo',
4117 4116 [
4118 4117 (
4119 4118 b'o',
4120 4119 b'optimize',
4121 4120 [],
4122 4121 _(b'extra optimization to perform'),
4123 4122 _(b'NAME'),
4124 4123 ),
4125 4124 (b'', b'run', False, _(b'performs an upgrade')),
4126 4125 (b'', b'backup', True, _(b'keep the old repository content around')),
4127 4126 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4128 4127 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4129 4128 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4130 4129 ],
4131 4130 )
4132 4131 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4133 4132 """upgrade a repository to use different features
4134 4133
4135 4134 If no arguments are specified, the repository is evaluated for upgrade
4136 4135 and a list of problems and potential optimizations is printed.
4137 4136
4138 4137 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4139 4138 can be influenced via additional arguments. More details will be provided
4140 4139 by the command output when run without ``--run``.
4141 4140
4142 4141 During the upgrade, the repository will be locked and no writes will be
4143 4142 allowed.
4144 4143
4145 4144 At the end of the upgrade, the repository may not be readable while new
4146 4145 repository data is swapped in. This window will be as long as it takes to
4147 4146 rename some directories inside the ``.hg`` directory. On most machines, this
4148 4147 should complete almost instantaneously and the chances of a consumer being
4149 4148 unable to access the repository should be low.
4150 4149
4151 4150 By default, all revlogs will be upgraded. You can restrict this using flags
4152 4151 such as `--manifest`:
4153 4152
4154 4153 * `--manifest`: only optimize the manifest
4155 4154 * `--no-manifest`: optimize all revlog but the manifest
4156 4155 * `--changelog`: optimize the changelog only
4157 4156 * `--no-changelog --no-manifest`: optimize filelogs only
4158 4157 * `--filelogs`: optimize the filelogs only
4159 4158 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4160 4159 """
4161 4160 return upgrade.upgraderepo(
4162 4161 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4163 4162 )
4164 4163
4165 4164
4166 4165 @command(
4167 4166 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4168 4167 )
4169 4168 def debugwalk(ui, repo, *pats, **opts):
4170 4169 """show how files match on given patterns"""
4171 4170 opts = pycompat.byteskwargs(opts)
4172 4171 m = scmutil.match(repo[None], pats, opts)
4173 4172 if ui.verbose:
4174 4173 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4175 4174 items = list(repo[None].walk(m))
4176 4175 if not items:
4177 4176 return
4178 4177 f = lambda fn: fn
4179 4178 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4180 4179 f = lambda fn: util.normpath(fn)
4181 4180 fmt = b'f %%-%ds %%-%ds %%s' % (
4182 4181 max([len(abs) for abs in items]),
4183 4182 max([len(repo.pathto(abs)) for abs in items]),
4184 4183 )
4185 4184 for abs in items:
4186 4185 line = fmt % (
4187 4186 abs,
4188 4187 f(repo.pathto(abs)),
4189 4188 m.exact(abs) and b'exact' or b'',
4190 4189 )
4191 4190 ui.write(b"%s\n" % line.rstrip())
4192 4191
4193 4192
4194 4193 @command(b'debugwhyunstable', [], _(b'REV'))
4195 4194 def debugwhyunstable(ui, repo, rev):
4196 4195 """explain instabilities of a changeset"""
4197 4196 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4198 4197 dnodes = b''
4199 4198 if entry.get(b'divergentnodes'):
4200 4199 dnodes = (
4201 4200 b' '.join(
4202 4201 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4203 4202 for ctx in entry[b'divergentnodes']
4204 4203 )
4205 4204 + b' '
4206 4205 )
4207 4206 ui.write(
4208 4207 b'%s: %s%s %s\n'
4209 4208 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4210 4209 )
4211 4210
4212 4211
4213 4212 @command(
4214 4213 b'debugwireargs',
4215 4214 [
4216 4215 (b'', b'three', b'', b'three'),
4217 4216 (b'', b'four', b'', b'four'),
4218 4217 (b'', b'five', b'', b'five'),
4219 4218 ]
4220 4219 + cmdutil.remoteopts,
4221 4220 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4222 4221 norepo=True,
4223 4222 )
4224 4223 def debugwireargs(ui, repopath, *vals, **opts):
4225 4224 opts = pycompat.byteskwargs(opts)
4226 4225 repo = hg.peer(ui, opts, repopath)
4227 4226 try:
4228 4227 for opt in cmdutil.remoteopts:
4229 4228 del opts[opt[1]]
4230 4229 args = {}
4231 4230 for k, v in opts.items():
4232 4231 if v:
4233 4232 args[k] = v
4234 4233 args = pycompat.strkwargs(args)
4235 4234 # run twice to check that we don't mess up the stream for the next command
4236 4235 res1 = repo.debugwireargs(*vals, **args)
4237 4236 res2 = repo.debugwireargs(*vals, **args)
4238 4237 ui.write(b"%s\n" % res1)
4239 4238 if res1 != res2:
4240 4239 ui.warn(b"%s\n" % res2)
4241 4240 finally:
4242 4241 repo.close()
4243 4242
4244 4243
4245 4244 def _parsewirelangblocks(fh):
4246 4245 activeaction = None
4247 4246 blocklines = []
4248 4247 lastindent = 0
4249 4248
4250 4249 for line in fh:
4251 4250 line = line.rstrip()
4252 4251 if not line:
4253 4252 continue
4254 4253
4255 4254 if line.startswith(b'#'):
4256 4255 continue
4257 4256
4258 4257 if not line.startswith(b' '):
4259 4258 # New block. Flush previous one.
4260 4259 if activeaction:
4261 4260 yield activeaction, blocklines
4262 4261
4263 4262 activeaction = line
4264 4263 blocklines = []
4265 4264 lastindent = 0
4266 4265 continue
4267 4266
4268 4267 # Else we start with an indent.
4269 4268
4270 4269 if not activeaction:
4271 4270 raise error.Abort(_(b'indented line outside of block'))
4272 4271
4273 4272 indent = len(line) - len(line.lstrip())
4274 4273
4275 4274 # If this line is indented more than the last line, concatenate it.
4276 4275 if indent > lastindent and blocklines:
4277 4276 blocklines[-1] += line.lstrip()
4278 4277 else:
4279 4278 blocklines.append(line)
4280 4279 lastindent = indent
4281 4280
4282 4281 # Flush last block.
4283 4282 if activeaction:
4284 4283 yield activeaction, blocklines
4285 4284
4286 4285
4287 4286 @command(
4288 4287 b'debugwireproto',
4289 4288 [
4290 4289 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4291 4290 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4292 4291 (
4293 4292 b'',
4294 4293 b'noreadstderr',
4295 4294 False,
4296 4295 _(b'do not read from stderr of the remote'),
4297 4296 ),
4298 4297 (
4299 4298 b'',
4300 4299 b'nologhandshake',
4301 4300 False,
4302 4301 _(b'do not log I/O related to the peer handshake'),
4303 4302 ),
4304 4303 ]
4305 4304 + cmdutil.remoteopts,
4306 4305 _(b'[PATH]'),
4307 4306 optionalrepo=True,
4308 4307 )
4309 4308 def debugwireproto(ui, repo, path=None, **opts):
4310 4309 """send wire protocol commands to a server
4311 4310
4312 4311 This command can be used to issue wire protocol commands to remote
4313 4312 peers and to debug the raw data being exchanged.
4314 4313
4315 4314 ``--localssh`` will start an SSH server against the current repository
4316 4315 and connect to that. By default, the connection will perform a handshake
4317 4316 and establish an appropriate peer instance.
4318 4317
4319 4318 ``--peer`` can be used to bypass the handshake protocol and construct a
4320 4319 peer instance using the specified class type. Valid values are ``raw``,
4321 4320 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4322 4321 don't support higher-level command actions.
4323 4322
4324 4323 ``--noreadstderr`` can be used to disable automatic reading from stderr
4325 4324 of the peer (for SSH connections only). Disabling automatic reading of
4326 4325 stderr is useful for making output more deterministic.
4327 4326
4328 4327 Commands are issued via a mini language which is specified via stdin.
4329 4328 The language consists of individual actions to perform. An action is
4330 4329 defined by a block. A block is defined as a line with no leading
4331 4330 space followed by 0 or more lines with leading space. Blocks are
4332 4331 effectively a high-level command with additional metadata.
4333 4332
4334 4333 Lines beginning with ``#`` are ignored.
4335 4334
4336 4335 The following sections denote available actions.
4337 4336
4338 4337 raw
4339 4338 ---
4340 4339
4341 4340 Send raw data to the server.
4342 4341
4343 4342 The block payload contains the raw data to send as one atomic send
4344 4343 operation. The data may not actually be delivered in a single system
4345 4344 call: it depends on the abilities of the transport being used.
4346 4345
4347 4346 Each line in the block is de-indented and concatenated. Then, that
4348 4347 value is evaluated as a Python b'' literal. This allows the use of
4349 4348 backslash escaping, etc.
4350 4349
4351 4350 raw+
4352 4351 ----
4353 4352
4354 4353 Behaves like ``raw`` except flushes output afterwards.
4355 4354
4356 4355 command <X>
4357 4356 -----------
4358 4357
4359 4358 Send a request to run a named command, whose name follows the ``command``
4360 4359 string.
4361 4360
4362 4361 Arguments to the command are defined as lines in this block. The format of
4363 4362 each line is ``<key> <value>``. e.g.::
4364 4363
4365 4364 command listkeys
4366 4365 namespace bookmarks
4367 4366
4368 4367 If the value begins with ``eval:``, it will be interpreted as a Python
4369 4368 literal expression. Otherwise values are interpreted as Python b'' literals.
4370 4369 This allows sending complex types and encoding special byte sequences via
4371 4370 backslash escaping.
4372 4371
4373 4372 The following arguments have special meaning:
4374 4373
4375 4374 ``PUSHFILE``
4376 4375 When defined, the *push* mechanism of the peer will be used instead
4377 4376 of the static request-response mechanism and the content of the
4378 4377 file specified in the value of this argument will be sent as the
4379 4378 command payload.
4380 4379
4381 4380 This can be used to submit a local bundle file to the remote.
4382 4381
4383 4382 batchbegin
4384 4383 ----------
4385 4384
4386 4385 Instruct the peer to begin a batched send.
4387 4386
4388 4387 All ``command`` blocks are queued for execution until the next
4389 4388 ``batchsubmit`` block.
4390 4389
4391 4390 batchsubmit
4392 4391 -----------
4393 4392
4394 4393 Submit previously queued ``command`` blocks as a batch request.
4395 4394
4396 4395 This action MUST be paired with a ``batchbegin`` action.
4397 4396
4398 4397 httprequest <method> <path>
4399 4398 ---------------------------
4400 4399
4401 4400 (HTTP peer only)
4402 4401
4403 4402 Send an HTTP request to the peer.
4404 4403
4405 4404 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4406 4405
4407 4406 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4408 4407 headers to add to the request. e.g. ``Accept: foo``.
4409 4408
4410 4409 The following arguments are special:
4411 4410
4412 4411 ``BODYFILE``
4413 4412 The content of the file defined as the value to this argument will be
4414 4413 transferred verbatim as the HTTP request body.
4415 4414
4416 4415 ``frame <type> <flags> <payload>``
4417 4416 Send a unified protocol frame as part of the request body.
4418 4417
4419 4418 All frames will be collected and sent as the body to the HTTP
4420 4419 request.
4421 4420
4422 4421 close
4423 4422 -----
4424 4423
4425 4424 Close the connection to the server.
4426 4425
4427 4426 flush
4428 4427 -----
4429 4428
4430 4429 Flush data written to the server.
4431 4430
4432 4431 readavailable
4433 4432 -------------
4434 4433
4435 4434 Close the write end of the connection and read all available data from
4436 4435 the server.
4437 4436
4438 4437 If the connection to the server encompasses multiple pipes, we poll both
4439 4438 pipes and read available data.
4440 4439
4441 4440 readline
4442 4441 --------
4443 4442
4444 4443 Read a line of output from the server. If there are multiple output
4445 4444 pipes, reads only the main pipe.
4446 4445
4447 4446 ereadline
4448 4447 ---------
4449 4448
4450 4449 Like ``readline``, but read from the stderr pipe, if available.
4451 4450
4452 4451 read <X>
4453 4452 --------
4454 4453
4455 4454 ``read()`` N bytes from the server's main output pipe.
4456 4455
4457 4456 eread <X>
4458 4457 ---------
4459 4458
4460 4459 ``read()`` N bytes from the server's stderr pipe, if available.
4461 4460
4462 4461 Specifying Unified Frame-Based Protocol Frames
4463 4462 ----------------------------------------------
4464 4463
4465 4464 It is possible to emit a *Unified Frame-Based Protocol* by using special
4466 4465 syntax.
4467 4466
4468 4467 A frame is composed as a type, flags, and payload. These can be parsed
4469 4468 from a string of the form:
4470 4469
4471 4470 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4472 4471
4473 4472 ``request-id`` and ``stream-id`` are integers defining the request and
4474 4473 stream identifiers.
4475 4474
4476 4475 ``type`` can be an integer value for the frame type or the string name
4477 4476 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4478 4477 ``command-name``.
4479 4478
4480 4479 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4481 4480 components. Each component (and there can be just one) can be an integer
4482 4481 or a flag name for stream flags or frame flags, respectively. Values are
4483 4482 resolved to integers and then bitwise OR'd together.
4484 4483
4485 4484 ``payload`` represents the raw frame payload. If it begins with
4486 4485 ``cbor:``, the following string is evaluated as Python code and the
4487 4486 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4488 4487 as a Python byte string literal.
4489 4488 """
4490 4489 opts = pycompat.byteskwargs(opts)
4491 4490
4492 4491 if opts[b'localssh'] and not repo:
4493 4492 raise error.Abort(_(b'--localssh requires a repository'))
4494 4493
4495 4494 if opts[b'peer'] and opts[b'peer'] not in (
4496 4495 b'raw',
4497 4496 b'ssh1',
4498 4497 ):
4499 4498 raise error.Abort(
4500 4499 _(b'invalid value for --peer'),
4501 4500 hint=_(b'valid values are "raw" and "ssh1"'),
4502 4501 )
4503 4502
4504 4503 if path and opts[b'localssh']:
4505 4504 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4506 4505
4507 4506 if ui.interactive():
4508 4507 ui.write(_(b'(waiting for commands on stdin)\n'))
4509 4508
4510 4509 blocks = list(_parsewirelangblocks(ui.fin))
4511 4510
4512 4511 proc = None
4513 4512 stdin = None
4514 4513 stdout = None
4515 4514 stderr = None
4516 4515 opener = None
4517 4516
4518 4517 if opts[b'localssh']:
4519 4518 # We start the SSH server in its own process so there is process
4520 4519 # separation. This prevents a whole class of potential bugs around
4521 4520 # shared state from interfering with server operation.
4522 4521 args = procutil.hgcmd() + [
4523 4522 b'-R',
4524 4523 repo.root,
4525 4524 b'debugserve',
4526 4525 b'--sshstdio',
4527 4526 ]
4528 4527 proc = subprocess.Popen(
4529 4528 pycompat.rapply(procutil.tonativestr, args),
4530 4529 stdin=subprocess.PIPE,
4531 4530 stdout=subprocess.PIPE,
4532 4531 stderr=subprocess.PIPE,
4533 4532 bufsize=0,
4534 4533 )
4535 4534
4536 4535 stdin = proc.stdin
4537 4536 stdout = proc.stdout
4538 4537 stderr = proc.stderr
4539 4538
4540 4539 # We turn the pipes into observers so we can log I/O.
4541 4540 if ui.verbose or opts[b'peer'] == b'raw':
4542 4541 stdin = util.makeloggingfileobject(
4543 4542 ui, proc.stdin, b'i', logdata=True
4544 4543 )
4545 4544 stdout = util.makeloggingfileobject(
4546 4545 ui, proc.stdout, b'o', logdata=True
4547 4546 )
4548 4547 stderr = util.makeloggingfileobject(
4549 4548 ui, proc.stderr, b'e', logdata=True
4550 4549 )
4551 4550
4552 4551 # --localssh also implies the peer connection settings.
4553 4552
4554 4553 url = b'ssh://localserver'
4555 4554 autoreadstderr = not opts[b'noreadstderr']
4556 4555
4557 4556 if opts[b'peer'] == b'ssh1':
4558 4557 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4559 4558 peer = sshpeer.sshv1peer(
4560 4559 ui,
4561 4560 url,
4562 4561 proc,
4563 4562 stdin,
4564 4563 stdout,
4565 4564 stderr,
4566 4565 None,
4567 4566 autoreadstderr=autoreadstderr,
4568 4567 )
4569 4568 elif opts[b'peer'] == b'raw':
4570 4569 ui.write(_(b'using raw connection to peer\n'))
4571 4570 peer = None
4572 4571 else:
4573 4572 ui.write(_(b'creating ssh peer from handshake results\n'))
4574 4573 peer = sshpeer._make_peer(
4575 4574 ui,
4576 4575 url,
4577 4576 proc,
4578 4577 stdin,
4579 4578 stdout,
4580 4579 stderr,
4581 4580 autoreadstderr=autoreadstderr,
4582 4581 )
4583 4582
4584 4583 elif path:
4585 4584 # We bypass hg.peer() so we can proxy the sockets.
4586 4585 # TODO consider not doing this because we skip
4587 4586 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4588 4587 u = urlutil.url(path)
4589 4588 if u.scheme != b'http':
4590 4589 raise error.Abort(_(b'only http:// paths are currently supported'))
4591 4590
4592 4591 url, authinfo = u.authinfo()
4593 4592 openerargs = {
4594 4593 'useragent': b'Mercurial debugwireproto',
4595 4594 }
4596 4595
4597 4596 # Turn pipes/sockets into observers so we can log I/O.
4598 4597 if ui.verbose:
4599 4598 openerargs.update(
4600 4599 {
4601 4600 'loggingfh': ui,
4602 4601 'loggingname': b's',
4603 4602 'loggingopts': {
4604 4603 'logdata': True,
4605 4604 'logdataapis': False,
4606 4605 },
4607 4606 }
4608 4607 )
4609 4608
4610 4609 if ui.debugflag:
4611 4610 openerargs['loggingopts']['logdataapis'] = True
4612 4611
4613 4612 # Don't send default headers when in raw mode. This allows us to
4614 4613 # bypass most of the behavior of our URL handling code so we can
4615 4614 # have near complete control over what's sent on the wire.
4616 4615 if opts[b'peer'] == b'raw':
4617 4616 openerargs['sendaccept'] = False
4618 4617
4619 4618 opener = urlmod.opener(ui, authinfo, **openerargs)
4620 4619
4621 4620 if opts[b'peer'] == b'raw':
4622 4621 ui.write(_(b'using raw connection to peer\n'))
4623 4622 peer = None
4624 4623 elif opts[b'peer']:
4625 4624 raise error.Abort(
4626 4625 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4627 4626 )
4628 4627 else:
4629 4628 peer_path = urlutil.try_path(ui, path)
4630 4629 peer = httppeer._make_peer(ui, peer_path, opener=opener)
4631 4630
4632 4631 # We /could/ populate stdin/stdout with sock.makefile()...
4633 4632 else:
4634 4633 raise error.Abort(_(b'unsupported connection configuration'))
4635 4634
4636 4635 batchedcommands = None
4637 4636
4638 4637 # Now perform actions based on the parsed wire language instructions.
4639 4638 for action, lines in blocks:
4640 4639 if action in (b'raw', b'raw+'):
4641 4640 if not stdin:
4642 4641 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4643 4642
4644 4643 # Concatenate the data together.
4645 4644 data = b''.join(l.lstrip() for l in lines)
4646 4645 data = stringutil.unescapestr(data)
4647 4646 stdin.write(data)
4648 4647
4649 4648 if action == b'raw+':
4650 4649 stdin.flush()
4651 4650 elif action == b'flush':
4652 4651 if not stdin:
4653 4652 raise error.Abort(_(b'cannot call flush on this peer'))
4654 4653 stdin.flush()
4655 4654 elif action.startswith(b'command'):
4656 4655 if not peer:
4657 4656 raise error.Abort(
4658 4657 _(
4659 4658 b'cannot send commands unless peer instance '
4660 4659 b'is available'
4661 4660 )
4662 4661 )
4663 4662
4664 4663 command = action.split(b' ', 1)[1]
4665 4664
4666 4665 args = {}
4667 4666 for line in lines:
4668 4667 # We need to allow empty values.
4669 4668 fields = line.lstrip().split(b' ', 1)
4670 4669 if len(fields) == 1:
4671 4670 key = fields[0]
4672 4671 value = b''
4673 4672 else:
4674 4673 key, value = fields
4675 4674
4676 4675 if value.startswith(b'eval:'):
4677 4676 value = stringutil.evalpythonliteral(value[5:])
4678 4677 else:
4679 4678 value = stringutil.unescapestr(value)
4680 4679
4681 4680 args[key] = value
4682 4681
4683 4682 if batchedcommands is not None:
4684 4683 batchedcommands.append((command, args))
4685 4684 continue
4686 4685
4687 4686 ui.status(_(b'sending %s command\n') % command)
4688 4687
4689 4688 if b'PUSHFILE' in args:
4690 4689 with open(args[b'PUSHFILE'], 'rb') as fh:
4691 4690 del args[b'PUSHFILE']
4692 4691 res, output = peer._callpush(
4693 4692 command, fh, **pycompat.strkwargs(args)
4694 4693 )
4695 4694 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4696 4695 ui.status(
4697 4696 _(b'remote output: %s\n') % stringutil.escapestr(output)
4698 4697 )
4699 4698 else:
4700 4699 with peer.commandexecutor() as e:
4701 4700 res = e.callcommand(command, args).result()
4702 4701
4703 4702 ui.status(
4704 4703 _(b'response: %s\n')
4705 4704 % stringutil.pprint(res, bprefix=True, indent=2)
4706 4705 )
4707 4706
4708 4707 elif action == b'batchbegin':
4709 4708 if batchedcommands is not None:
4710 4709 raise error.Abort(_(b'nested batchbegin not allowed'))
4711 4710
4712 4711 batchedcommands = []
4713 4712 elif action == b'batchsubmit':
4714 4713 # There is a batching API we could go through. But it would be
4715 4714 # difficult to normalize requests into function calls. It is easier
4716 4715 # to bypass this layer and normalize to commands + args.
4717 4716 ui.status(
4718 4717 _(b'sending batch with %d sub-commands\n')
4719 4718 % len(batchedcommands)
4720 4719 )
4721 4720 assert peer is not None
4722 4721 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4723 4722 ui.status(
4724 4723 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4725 4724 )
4726 4725
4727 4726 batchedcommands = None
4728 4727
4729 4728 elif action.startswith(b'httprequest '):
4730 4729 if not opener:
4731 4730 raise error.Abort(
4732 4731 _(b'cannot use httprequest without an HTTP peer')
4733 4732 )
4734 4733
4735 4734 request = action.split(b' ', 2)
4736 4735 if len(request) != 3:
4737 4736 raise error.Abort(
4738 4737 _(
4739 4738 b'invalid httprequest: expected format is '
4740 4739 b'"httprequest <method> <path>'
4741 4740 )
4742 4741 )
4743 4742
4744 4743 method, httppath = request[1:]
4745 4744 headers = {}
4746 4745 body = None
4747 4746 frames = []
4748 4747 for line in lines:
4749 4748 line = line.lstrip()
4750 4749 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4751 4750 if m:
4752 4751 # Headers need to use native strings.
4753 4752 key = pycompat.strurl(m.group(1))
4754 4753 value = pycompat.strurl(m.group(2))
4755 4754 headers[key] = value
4756 4755 continue
4757 4756
4758 4757 if line.startswith(b'BODYFILE '):
4759 4758 with open(line.split(b' ', 1), b'rb') as fh:
4760 4759 body = fh.read()
4761 4760 elif line.startswith(b'frame '):
4762 4761 frame = wireprotoframing.makeframefromhumanstring(
4763 4762 line[len(b'frame ') :]
4764 4763 )
4765 4764
4766 4765 frames.append(frame)
4767 4766 else:
4768 4767 raise error.Abort(
4769 4768 _(b'unknown argument to httprequest: %s') % line
4770 4769 )
4771 4770
4772 4771 url = path + httppath
4773 4772
4774 4773 if frames:
4775 4774 body = b''.join(bytes(f) for f in frames)
4776 4775
4777 4776 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4778 4777
4779 4778 # urllib.Request insists on using has_data() as a proxy for
4780 4779 # determining the request method. Override that to use our
4781 4780 # explicitly requested method.
4782 4781 req.get_method = lambda: pycompat.sysstr(method)
4783 4782
4784 4783 try:
4785 4784 res = opener.open(req)
4786 4785 body = res.read()
4787 4786 except util.urlerr.urlerror as e:
4788 4787 # read() method must be called, but only exists in Python 2
4789 4788 getattr(e, 'read', lambda: None)()
4790 4789 continue
4791 4790
4792 4791 ct = res.headers.get('Content-Type')
4793 4792 if ct == 'application/mercurial-cbor':
4794 4793 ui.write(
4795 4794 _(b'cbor> %s\n')
4796 4795 % stringutil.pprint(
4797 4796 cborutil.decodeall(body), bprefix=True, indent=2
4798 4797 )
4799 4798 )
4800 4799
4801 4800 elif action == b'close':
4802 4801 assert peer is not None
4803 4802 peer.close()
4804 4803 elif action == b'readavailable':
4805 4804 if not stdout or not stderr:
4806 4805 raise error.Abort(
4807 4806 _(b'readavailable not available on this peer')
4808 4807 )
4809 4808
4810 4809 stdin.close()
4811 4810 stdout.read()
4812 4811 stderr.read()
4813 4812
4814 4813 elif action == b'readline':
4815 4814 if not stdout:
4816 4815 raise error.Abort(_(b'readline not available on this peer'))
4817 4816 stdout.readline()
4818 4817 elif action == b'ereadline':
4819 4818 if not stderr:
4820 4819 raise error.Abort(_(b'ereadline not available on this peer'))
4821 4820 stderr.readline()
4822 4821 elif action.startswith(b'read '):
4823 4822 count = int(action.split(b' ', 1)[1])
4824 4823 if not stdout:
4825 4824 raise error.Abort(_(b'read not available on this peer'))
4826 4825 stdout.read(count)
4827 4826 elif action.startswith(b'eread '):
4828 4827 count = int(action.split(b' ', 1)[1])
4829 4828 if not stderr:
4830 4829 raise error.Abort(_(b'eread not available on this peer'))
4831 4830 stderr.read(count)
4832 4831 else:
4833 4832 raise error.Abort(_(b'unknown action: %s') % action)
4834 4833
4835 4834 if batchedcommands is not None:
4836 4835 raise error.Abort(_(b'unclosed "batchbegin" request'))
4837 4836
4838 4837 if peer:
4839 4838 peer.close()
4840 4839
4841 4840 if proc:
4842 4841 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now