##// END OF EJS Templates
debugrebuildstate: wrap the operation in a `changing_parents` context...
marmoute -
r51008:dae8dda6 default
parent child Browse files
Show More
@@ -1,4776 +1,4776 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import subprocess
25 25 import sys
26 26 import time
27 27
28 28 from .i18n import _
29 29 from .node import (
30 30 bin,
31 31 hex,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 bundlerepo,
42 42 changegroup,
43 43 cmdutil,
44 44 color,
45 45 context,
46 46 copies,
47 47 dagparser,
48 48 dirstateutils,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 requirements,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 verify,
91 91 vfs as vfsmod,
92 92 wireprotoframing,
93 93 wireprotoserver,
94 94 )
95 95 from .interfaces import repository
96 96 from .utils import (
97 97 cborutil,
98 98 compression,
99 99 dateutil,
100 100 procutil,
101 101 stringutil,
102 102 urlutil,
103 103 )
104 104
105 105 from .revlogutils import (
106 106 constants as revlog_constants,
107 107 debug as revlog_debug,
108 108 deltas as deltautil,
109 109 nodemap,
110 110 rewrite,
111 111 sidedata,
112 112 )
113 113
114 114 release = lockmod.release
115 115
116 116 table = {}
117 117 table.update(strip.command._table)
118 118 command = registrar.command(table)
119 119
120 120
121 121 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
122 122 def debugancestor(ui, repo, *args):
123 123 """find the ancestor revision of two revisions in a given index"""
124 124 if len(args) == 3:
125 125 index, rev1, rev2 = args
126 126 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
127 127 lookup = r.lookup
128 128 elif len(args) == 2:
129 129 if not repo:
130 130 raise error.Abort(
131 131 _(b'there is no Mercurial repository here (.hg not found)')
132 132 )
133 133 rev1, rev2 = args
134 134 r = repo.changelog
135 135 lookup = repo.lookup
136 136 else:
137 137 raise error.Abort(_(b'either two or three arguments required'))
138 138 a = r.ancestor(lookup(rev1), lookup(rev2))
139 139 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
140 140
141 141
142 142 @command(b'debugantivirusrunning', [])
143 143 def debugantivirusrunning(ui, repo):
144 144 """attempt to trigger an antivirus scanner to see if one is active"""
145 145 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
146 146 f.write(
147 147 util.b85decode(
148 148 # This is a base85-armored version of the EICAR test file. See
149 149 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
150 150 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
151 151 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
152 152 )
153 153 )
154 154 # Give an AV engine time to scan the file.
155 155 time.sleep(2)
156 156 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
157 157
158 158
159 159 @command(b'debugapplystreamclonebundle', [], b'FILE')
160 160 def debugapplystreamclonebundle(ui, repo, fname):
161 161 """apply a stream clone bundle file"""
162 162 f = hg.openpath(ui, fname)
163 163 gen = exchange.readbundle(ui, f, fname)
164 164 gen.apply(repo)
165 165
166 166
167 167 @command(
168 168 b'debugbuilddag',
169 169 [
170 170 (
171 171 b'm',
172 172 b'mergeable-file',
173 173 None,
174 174 _(b'add single file mergeable changes'),
175 175 ),
176 176 (
177 177 b'o',
178 178 b'overwritten-file',
179 179 None,
180 180 _(b'add single file all revs overwrite'),
181 181 ),
182 182 (b'n', b'new-file', None, _(b'add new file at each rev')),
183 183 (
184 184 b'',
185 185 b'from-existing',
186 186 None,
187 187 _(b'continue from a non-empty repository'),
188 188 ),
189 189 ],
190 190 _(b'[OPTION]... [TEXT]'),
191 191 )
192 192 def debugbuilddag(
193 193 ui,
194 194 repo,
195 195 text=None,
196 196 mergeable_file=False,
197 197 overwritten_file=False,
198 198 new_file=False,
199 199 from_existing=False,
200 200 ):
201 201 """builds a repo with a given DAG from scratch in the current empty repo
202 202
203 203 The description of the DAG is read from stdin if not given on the
204 204 command line.
205 205
206 206 Elements:
207 207
208 208 - "+n" is a linear run of n nodes based on the current default parent
209 209 - "." is a single node based on the current default parent
210 210 - "$" resets the default parent to null (implied at the start);
211 211 otherwise the default parent is always the last node created
212 212 - "<p" sets the default parent to the backref p
213 213 - "*p" is a fork at parent p, which is a backref
214 214 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
215 215 - "/p2" is a merge of the preceding node and p2
216 216 - ":tag" defines a local tag for the preceding node
217 217 - "@branch" sets the named branch for subsequent nodes
218 218 - "#...\\n" is a comment up to the end of the line
219 219
220 220 Whitespace between the above elements is ignored.
221 221
222 222 A backref is either
223 223
224 224 - a number n, which references the node curr-n, where curr is the current
225 225 node, or
226 226 - the name of a local tag you placed earlier using ":tag", or
227 227 - empty to denote the default parent.
228 228
229 229 All string valued-elements are either strictly alphanumeric, or must
230 230 be enclosed in double quotes ("..."), with "\\" as escape character.
231 231 """
232 232
233 233 if text is None:
234 234 ui.status(_(b"reading DAG from stdin\n"))
235 235 text = ui.fin.read()
236 236
237 237 cl = repo.changelog
238 238 if len(cl) > 0 and not from_existing:
239 239 raise error.Abort(_(b'repository is not empty'))
240 240
241 241 # determine number of revs in DAG
242 242 total = 0
243 243 for type, data in dagparser.parsedag(text):
244 244 if type == b'n':
245 245 total += 1
246 246
247 247 if mergeable_file:
248 248 linesperrev = 2
249 249 # make a file with k lines per rev
250 250 initialmergedlines = [b'%d' % i for i in range(0, total * linesperrev)]
251 251 initialmergedlines.append(b"")
252 252
253 253 tags = []
254 254 progress = ui.makeprogress(
255 255 _(b'building'), unit=_(b'revisions'), total=total
256 256 )
257 257 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
258 258 at = -1
259 259 atbranch = b'default'
260 260 nodeids = []
261 261 id = 0
262 262 progress.update(id)
263 263 for type, data in dagparser.parsedag(text):
264 264 if type == b'n':
265 265 ui.note((b'node %s\n' % pycompat.bytestr(data)))
266 266 id, ps = data
267 267
268 268 files = []
269 269 filecontent = {}
270 270
271 271 p2 = None
272 272 if mergeable_file:
273 273 fn = b"mf"
274 274 p1 = repo[ps[0]]
275 275 if len(ps) > 1:
276 276 p2 = repo[ps[1]]
277 277 pa = p1.ancestor(p2)
278 278 base, local, other = [
279 279 x[fn].data() for x in (pa, p1, p2)
280 280 ]
281 281 m3 = simplemerge.Merge3Text(base, local, other)
282 282 ml = [
283 283 l.strip()
284 284 for l in simplemerge.render_minimized(m3)[0]
285 285 ]
286 286 ml.append(b"")
287 287 elif at > 0:
288 288 ml = p1[fn].data().split(b"\n")
289 289 else:
290 290 ml = initialmergedlines
291 291 ml[id * linesperrev] += b" r%i" % id
292 292 mergedtext = b"\n".join(ml)
293 293 files.append(fn)
294 294 filecontent[fn] = mergedtext
295 295
296 296 if overwritten_file:
297 297 fn = b"of"
298 298 files.append(fn)
299 299 filecontent[fn] = b"r%i\n" % id
300 300
301 301 if new_file:
302 302 fn = b"nf%i" % id
303 303 files.append(fn)
304 304 filecontent[fn] = b"r%i\n" % id
305 305 if len(ps) > 1:
306 306 if not p2:
307 307 p2 = repo[ps[1]]
308 308 for fn in p2:
309 309 if fn.startswith(b"nf"):
310 310 files.append(fn)
311 311 filecontent[fn] = p2[fn].data()
312 312
313 313 def fctxfn(repo, cx, path):
314 314 if path in filecontent:
315 315 return context.memfilectx(
316 316 repo, cx, path, filecontent[path]
317 317 )
318 318 return None
319 319
320 320 if len(ps) == 0 or ps[0] < 0:
321 321 pars = [None, None]
322 322 elif len(ps) == 1:
323 323 pars = [nodeids[ps[0]], None]
324 324 else:
325 325 pars = [nodeids[p] for p in ps]
326 326 cx = context.memctx(
327 327 repo,
328 328 pars,
329 329 b"r%i" % id,
330 330 files,
331 331 fctxfn,
332 332 date=(id, 0),
333 333 user=b"debugbuilddag",
334 334 extra={b'branch': atbranch},
335 335 )
336 336 nodeid = repo.commitctx(cx)
337 337 nodeids.append(nodeid)
338 338 at = id
339 339 elif type == b'l':
340 340 id, name = data
341 341 ui.note((b'tag %s\n' % name))
342 342 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
343 343 elif type == b'a':
344 344 ui.note((b'branch %s\n' % data))
345 345 atbranch = data
346 346 progress.update(id)
347 347
348 348 if tags:
349 349 repo.vfs.write(b"localtags", b"".join(tags))
350 350
351 351
352 352 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
353 353 indent_string = b' ' * indent
354 354 if all:
355 355 ui.writenoi18n(
356 356 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
357 357 % indent_string
358 358 )
359 359
360 360 def showchunks(named):
361 361 ui.write(b"\n%s%s\n" % (indent_string, named))
362 362 for deltadata in gen.deltaiter():
363 363 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
364 364 ui.write(
365 365 b"%s%s %s %s %s %s %d\n"
366 366 % (
367 367 indent_string,
368 368 hex(node),
369 369 hex(p1),
370 370 hex(p2),
371 371 hex(cs),
372 372 hex(deltabase),
373 373 len(delta),
374 374 )
375 375 )
376 376
377 377 gen.changelogheader()
378 378 showchunks(b"changelog")
379 379 gen.manifestheader()
380 380 showchunks(b"manifest")
381 381 for chunkdata in iter(gen.filelogheader, {}):
382 382 fname = chunkdata[b'filename']
383 383 showchunks(fname)
384 384 else:
385 385 if isinstance(gen, bundle2.unbundle20):
386 386 raise error.Abort(_(b'use debugbundle2 for this file'))
387 387 gen.changelogheader()
388 388 for deltadata in gen.deltaiter():
389 389 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
390 390 ui.write(b"%s%s\n" % (indent_string, hex(node)))
391 391
392 392
393 393 def _debugobsmarkers(ui, part, indent=0, **opts):
394 394 """display version and markers contained in 'data'"""
395 395 opts = pycompat.byteskwargs(opts)
396 396 data = part.read()
397 397 indent_string = b' ' * indent
398 398 try:
399 399 version, markers = obsolete._readmarkers(data)
400 400 except error.UnknownVersion as exc:
401 401 msg = b"%sunsupported version: %s (%d bytes)\n"
402 402 msg %= indent_string, exc.version, len(data)
403 403 ui.write(msg)
404 404 else:
405 405 msg = b"%sversion: %d (%d bytes)\n"
406 406 msg %= indent_string, version, len(data)
407 407 ui.write(msg)
408 408 fm = ui.formatter(b'debugobsolete', opts)
409 409 for rawmarker in sorted(markers):
410 410 m = obsutil.marker(None, rawmarker)
411 411 fm.startitem()
412 412 fm.plain(indent_string)
413 413 cmdutil.showmarker(fm, m)
414 414 fm.end()
415 415
416 416
417 417 def _debugphaseheads(ui, data, indent=0):
418 418 """display version and markers contained in 'data'"""
419 419 indent_string = b' ' * indent
420 420 headsbyphase = phases.binarydecode(data)
421 421 for phase in phases.allphases:
422 422 for head in headsbyphase[phase]:
423 423 ui.write(indent_string)
424 424 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
425 425
426 426
427 427 def _quasirepr(thing):
428 428 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
429 429 return b'{%s}' % (
430 430 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
431 431 )
432 432 return pycompat.bytestr(repr(thing))
433 433
434 434
435 435 def _debugbundle2(ui, gen, all=None, **opts):
436 436 """lists the contents of a bundle2"""
437 437 if not isinstance(gen, bundle2.unbundle20):
438 438 raise error.Abort(_(b'not a bundle2 file'))
439 439 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
440 440 parttypes = opts.get('part_type', [])
441 441 for part in gen.iterparts():
442 442 if parttypes and part.type not in parttypes:
443 443 continue
444 444 msg = b'%s -- %s (mandatory: %r)\n'
445 445 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
446 446 if part.type == b'changegroup':
447 447 version = part.params.get(b'version', b'01')
448 448 cg = changegroup.getunbundler(version, part, b'UN')
449 449 if not ui.quiet:
450 450 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
451 451 if part.type == b'obsmarkers':
452 452 if not ui.quiet:
453 453 _debugobsmarkers(ui, part, indent=4, **opts)
454 454 if part.type == b'phase-heads':
455 455 if not ui.quiet:
456 456 _debugphaseheads(ui, part, indent=4)
457 457
458 458
459 459 @command(
460 460 b'debugbundle',
461 461 [
462 462 (b'a', b'all', None, _(b'show all details')),
463 463 (b'', b'part-type', [], _(b'show only the named part type')),
464 464 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
465 465 ],
466 466 _(b'FILE'),
467 467 norepo=True,
468 468 )
469 469 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
470 470 """lists the contents of a bundle"""
471 471 with hg.openpath(ui, bundlepath) as f:
472 472 if spec:
473 473 spec = exchange.getbundlespec(ui, f)
474 474 ui.write(b'%s\n' % spec)
475 475 return
476 476
477 477 gen = exchange.readbundle(ui, f, bundlepath)
478 478 if isinstance(gen, bundle2.unbundle20):
479 479 return _debugbundle2(ui, gen, all=all, **opts)
480 480 _debugchangegroup(ui, gen, all=all, **opts)
481 481
482 482
483 483 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
484 484 def debugcapabilities(ui, path, **opts):
485 485 """lists the capabilities of a remote peer"""
486 486 opts = pycompat.byteskwargs(opts)
487 487 peer = hg.peer(ui, opts, path)
488 488 try:
489 489 caps = peer.capabilities()
490 490 ui.writenoi18n(b'Main capabilities:\n')
491 491 for c in sorted(caps):
492 492 ui.write(b' %s\n' % c)
493 493 b2caps = bundle2.bundle2caps(peer)
494 494 if b2caps:
495 495 ui.writenoi18n(b'Bundle2 capabilities:\n')
496 496 for key, values in sorted(b2caps.items()):
497 497 ui.write(b' %s\n' % key)
498 498 for v in values:
499 499 ui.write(b' %s\n' % v)
500 500 finally:
501 501 peer.close()
502 502
503 503
504 504 @command(
505 505 b'debugchangedfiles',
506 506 [
507 507 (
508 508 b'',
509 509 b'compute',
510 510 False,
511 511 b"compute information instead of reading it from storage",
512 512 ),
513 513 ],
514 514 b'REV',
515 515 )
516 516 def debugchangedfiles(ui, repo, rev, **opts):
517 517 """list the stored files changes for a revision"""
518 518 ctx = logcmdutil.revsingle(repo, rev, None)
519 519 files = None
520 520
521 521 if opts['compute']:
522 522 files = metadata.compute_all_files_changes(ctx)
523 523 else:
524 524 sd = repo.changelog.sidedata(ctx.rev())
525 525 files_block = sd.get(sidedata.SD_FILES)
526 526 if files_block is not None:
527 527 files = metadata.decode_files_sidedata(sd)
528 528 if files is not None:
529 529 for f in sorted(files.touched):
530 530 if f in files.added:
531 531 action = b"added"
532 532 elif f in files.removed:
533 533 action = b"removed"
534 534 elif f in files.merged:
535 535 action = b"merged"
536 536 elif f in files.salvaged:
537 537 action = b"salvaged"
538 538 else:
539 539 action = b"touched"
540 540
541 541 copy_parent = b""
542 542 copy_source = b""
543 543 if f in files.copied_from_p1:
544 544 copy_parent = b"p1"
545 545 copy_source = files.copied_from_p1[f]
546 546 elif f in files.copied_from_p2:
547 547 copy_parent = b"p2"
548 548 copy_source = files.copied_from_p2[f]
549 549
550 550 data = (action, copy_parent, f, copy_source)
551 551 template = b"%-8s %2s: %s, %s;\n"
552 552 ui.write(template % data)
553 553
554 554
555 555 @command(b'debugcheckstate', [], b'')
556 556 def debugcheckstate(ui, repo):
557 557 """validate the correctness of the current dirstate"""
558 558 errors = verify.verifier(repo)._verify_dirstate()
559 559 if errors:
560 560 errstr = _(b"dirstate inconsistent with current parent's manifest")
561 561 raise error.Abort(errstr)
562 562
563 563
564 564 @command(
565 565 b'debugcolor',
566 566 [(b'', b'style', None, _(b'show all configured styles'))],
567 567 b'hg debugcolor',
568 568 )
569 569 def debugcolor(ui, repo, **opts):
570 570 """show available color, effects or style"""
571 571 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
572 572 if opts.get('style'):
573 573 return _debugdisplaystyle(ui)
574 574 else:
575 575 return _debugdisplaycolor(ui)
576 576
577 577
578 578 def _debugdisplaycolor(ui):
579 579 ui = ui.copy()
580 580 ui._styles.clear()
581 581 for effect in color._activeeffects(ui).keys():
582 582 ui._styles[effect] = effect
583 583 if ui._terminfoparams:
584 584 for k, v in ui.configitems(b'color'):
585 585 if k.startswith(b'color.'):
586 586 ui._styles[k] = k[6:]
587 587 elif k.startswith(b'terminfo.'):
588 588 ui._styles[k] = k[9:]
589 589 ui.write(_(b'available colors:\n'))
590 590 # sort label with a '_' after the other to group '_background' entry.
591 591 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
592 592 for colorname, label in items:
593 593 ui.write(b'%s\n' % colorname, label=label)
594 594
595 595
596 596 def _debugdisplaystyle(ui):
597 597 ui.write(_(b'available style:\n'))
598 598 if not ui._styles:
599 599 return
600 600 width = max(len(s) for s in ui._styles)
601 601 for label, effects in sorted(ui._styles.items()):
602 602 ui.write(b'%s' % label, label=label)
603 603 if effects:
604 604 # 50
605 605 ui.write(b': ')
606 606 ui.write(b' ' * (max(0, width - len(label))))
607 607 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
608 608 ui.write(b'\n')
609 609
610 610
611 611 @command(b'debugcreatestreamclonebundle', [], b'FILE')
612 612 def debugcreatestreamclonebundle(ui, repo, fname):
613 613 """create a stream clone bundle file
614 614
615 615 Stream bundles are special bundles that are essentially archives of
616 616 revlog files. They are commonly used for cloning very quickly.
617 617 """
618 618 # TODO we may want to turn this into an abort when this functionality
619 619 # is moved into `hg bundle`.
620 620 if phases.hassecret(repo):
621 621 ui.warn(
622 622 _(
623 623 b'(warning: stream clone bundle will contain secret '
624 624 b'revisions)\n'
625 625 )
626 626 )
627 627
628 628 requirements, gen = streamclone.generatebundlev1(repo)
629 629 changegroup.writechunks(ui, gen, fname)
630 630
631 631 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
632 632
633 633
634 634 @command(
635 635 b'debugdag',
636 636 [
637 637 (b't', b'tags', None, _(b'use tags as labels')),
638 638 (b'b', b'branches', None, _(b'annotate with branch names')),
639 639 (b'', b'dots', None, _(b'use dots for runs')),
640 640 (b's', b'spaces', None, _(b'separate elements by spaces')),
641 641 ],
642 642 _(b'[OPTION]... [FILE [REV]...]'),
643 643 optionalrepo=True,
644 644 )
645 645 def debugdag(ui, repo, file_=None, *revs, **opts):
646 646 """format the changelog or an index DAG as a concise textual description
647 647
648 648 If you pass a revlog index, the revlog's DAG is emitted. If you list
649 649 revision numbers, they get labeled in the output as rN.
650 650
651 651 Otherwise, the changelog DAG of the current repo is emitted.
652 652 """
653 653 spaces = opts.get('spaces')
654 654 dots = opts.get('dots')
655 655 if file_:
656 656 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
657 657 revs = {int(r) for r in revs}
658 658
659 659 def events():
660 660 for r in rlog:
661 661 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
662 662 if r in revs:
663 663 yield b'l', (r, b"r%i" % r)
664 664
665 665 elif repo:
666 666 cl = repo.changelog
667 667 tags = opts.get('tags')
668 668 branches = opts.get('branches')
669 669 if tags:
670 670 labels = {}
671 671 for l, n in repo.tags().items():
672 672 labels.setdefault(cl.rev(n), []).append(l)
673 673
674 674 def events():
675 675 b = b"default"
676 676 for r in cl:
677 677 if branches:
678 678 newb = cl.read(cl.node(r))[5][b'branch']
679 679 if newb != b:
680 680 yield b'a', newb
681 681 b = newb
682 682 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
683 683 if tags:
684 684 ls = labels.get(r)
685 685 if ls:
686 686 for l in ls:
687 687 yield b'l', (r, l)
688 688
689 689 else:
690 690 raise error.Abort(_(b'need repo for changelog dag'))
691 691
692 692 for line in dagparser.dagtextlines(
693 693 events(),
694 694 addspaces=spaces,
695 695 wraplabels=True,
696 696 wrapannotations=True,
697 697 wrapnonlinear=dots,
698 698 usedots=dots,
699 699 maxlinewidth=70,
700 700 ):
701 701 ui.write(line)
702 702 ui.write(b"\n")
703 703
704 704
705 705 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
706 706 def debugdata(ui, repo, file_, rev=None, **opts):
707 707 """dump the contents of a data file revision"""
708 708 opts = pycompat.byteskwargs(opts)
709 709 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
710 710 if rev is not None:
711 711 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
712 712 file_, rev = None, file_
713 713 elif rev is None:
714 714 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
715 715 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
716 716 try:
717 717 ui.write(r.rawdata(r.lookup(rev)))
718 718 except KeyError:
719 719 raise error.Abort(_(b'invalid revision identifier %s') % rev)
720 720
721 721
722 722 @command(
723 723 b'debugdate',
724 724 [(b'e', b'extended', None, _(b'try extended date formats'))],
725 725 _(b'[-e] DATE [RANGE]'),
726 726 norepo=True,
727 727 optionalrepo=True,
728 728 )
729 729 def debugdate(ui, date, range=None, **opts):
730 730 """parse and display a date"""
731 731 if opts["extended"]:
732 732 d = dateutil.parsedate(date, dateutil.extendeddateformats)
733 733 else:
734 734 d = dateutil.parsedate(date)
735 735 ui.writenoi18n(b"internal: %d %d\n" % d)
736 736 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
737 737 if range:
738 738 m = dateutil.matchdate(range)
739 739 ui.writenoi18n(b"match: %s\n" % m(d[0]))
740 740
741 741
742 742 @command(
743 743 b'debugdeltachain',
744 744 cmdutil.debugrevlogopts + cmdutil.formatteropts,
745 745 _(b'-c|-m|FILE'),
746 746 optionalrepo=True,
747 747 )
748 748 def debugdeltachain(ui, repo, file_=None, **opts):
749 749 """dump information about delta chains in a revlog
750 750
751 751 Output can be templatized. Available template keywords are:
752 752
753 753 :``rev``: revision number
754 754 :``p1``: parent 1 revision number (for reference)
755 755 :``p2``: parent 2 revision number (for reference)
756 756 :``chainid``: delta chain identifier (numbered by unique base)
757 757 :``chainlen``: delta chain length to this revision
758 758 :``prevrev``: previous revision in delta chain
759 759 :``deltatype``: role of delta / how it was computed
760 760 - base: a full snapshot
761 761 - snap: an intermediate snapshot
762 762 - p1: a delta against the first parent
763 763 - p2: a delta against the second parent
764 764 - skip1: a delta against the same base as p1
765 765 (when p1 has empty delta
766 766 - skip2: a delta against the same base as p2
767 767 (when p2 has empty delta
768 768 - prev: a delta against the previous revision
769 769 - other: a delta against an arbitrary revision
770 770 :``compsize``: compressed size of revision
771 771 :``uncompsize``: uncompressed size of revision
772 772 :``chainsize``: total size of compressed revisions in chain
773 773 :``chainratio``: total chain size divided by uncompressed revision size
774 774 (new delta chains typically start at ratio 2.00)
775 775 :``lindist``: linear distance from base revision in delta chain to end
776 776 of this revision
777 777 :``extradist``: total size of revisions not part of this delta chain from
778 778 base of delta chain to end of this revision; a measurement
779 779 of how much extra data we need to read/seek across to read
780 780 the delta chain for this revision
781 781 :``extraratio``: extradist divided by chainsize; another representation of
782 782 how much unrelated data is needed to load this delta chain
783 783
784 784 If the repository is configured to use the sparse read, additional keywords
785 785 are available:
786 786
787 787 :``readsize``: total size of data read from the disk for a revision
788 788 (sum of the sizes of all the blocks)
789 789 :``largestblock``: size of the largest block of data read from the disk
790 790 :``readdensity``: density of useful bytes in the data read from the disk
791 791 :``srchunks``: in how many data hunks the whole revision would be read
792 792
793 793 The sparse read can be enabled with experimental.sparse-read = True
794 794 """
795 795 opts = pycompat.byteskwargs(opts)
796 796 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
797 797 index = r.index
798 798 start = r.start
799 799 length = r.length
800 800 generaldelta = r._generaldelta
801 801 withsparseread = getattr(r, '_withsparseread', False)
802 802
803 803 # security to avoid crash on corrupted revlogs
804 804 total_revs = len(index)
805 805
806 806 def revinfo(rev):
807 807 e = index[rev]
808 808 compsize = e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
809 809 uncompsize = e[revlog_constants.ENTRY_DATA_UNCOMPRESSED_LENGTH]
810 810 chainsize = 0
811 811
812 812 base = e[revlog_constants.ENTRY_DELTA_BASE]
813 813 p1 = e[revlog_constants.ENTRY_PARENT_1]
814 814 p2 = e[revlog_constants.ENTRY_PARENT_2]
815 815
816 816 # If the parents of a revision has an empty delta, we never try to delta
817 817 # against that parent, but directly against the delta base of that
818 818 # parent (recursively). It avoids adding a useless entry in the chain.
819 819 #
820 820 # However we need to detect that as a special case for delta-type, that
821 821 # is not simply "other".
822 822 p1_base = p1
823 823 if p1 != nullrev and p1 < total_revs:
824 824 e1 = index[p1]
825 825 while e1[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
826 826 new_base = e1[revlog_constants.ENTRY_DELTA_BASE]
827 827 if (
828 828 new_base == p1_base
829 829 or new_base == nullrev
830 830 or new_base >= total_revs
831 831 ):
832 832 break
833 833 p1_base = new_base
834 834 e1 = index[p1_base]
835 835 p2_base = p2
836 836 if p2 != nullrev and p2 < total_revs:
837 837 e2 = index[p2]
838 838 while e2[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH] == 0:
839 839 new_base = e2[revlog_constants.ENTRY_DELTA_BASE]
840 840 if (
841 841 new_base == p2_base
842 842 or new_base == nullrev
843 843 or new_base >= total_revs
844 844 ):
845 845 break
846 846 p2_base = new_base
847 847 e2 = index[p2_base]
848 848
849 849 if generaldelta:
850 850 if base == p1:
851 851 deltatype = b'p1'
852 852 elif base == p2:
853 853 deltatype = b'p2'
854 854 elif base == rev:
855 855 deltatype = b'base'
856 856 elif base == p1_base:
857 857 deltatype = b'skip1'
858 858 elif base == p2_base:
859 859 deltatype = b'skip2'
860 860 elif r.issnapshot(rev):
861 861 deltatype = b'snap'
862 862 elif base == rev - 1:
863 863 deltatype = b'prev'
864 864 else:
865 865 deltatype = b'other'
866 866 else:
867 867 if base == rev:
868 868 deltatype = b'base'
869 869 else:
870 870 deltatype = b'prev'
871 871
872 872 chain = r._deltachain(rev)[0]
873 873 for iterrev in chain:
874 874 e = index[iterrev]
875 875 chainsize += e[revlog_constants.ENTRY_DATA_COMPRESSED_LENGTH]
876 876
877 877 return p1, p2, compsize, uncompsize, deltatype, chain, chainsize
878 878
879 879 fm = ui.formatter(b'debugdeltachain', opts)
880 880
881 881 fm.plain(
882 882 b' rev p1 p2 chain# chainlen prev delta '
883 883 b'size rawsize chainsize ratio lindist extradist '
884 884 b'extraratio'
885 885 )
886 886 if withsparseread:
887 887 fm.plain(b' readsize largestblk rddensity srchunks')
888 888 fm.plain(b'\n')
889 889
890 890 chainbases = {}
891 891 for rev in r:
892 892 p1, p2, comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
893 893 chainbase = chain[0]
894 894 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
895 895 basestart = start(chainbase)
896 896 revstart = start(rev)
897 897 lineardist = revstart + comp - basestart
898 898 extradist = lineardist - chainsize
899 899 try:
900 900 prevrev = chain[-2]
901 901 except IndexError:
902 902 prevrev = -1
903 903
904 904 if uncomp != 0:
905 905 chainratio = float(chainsize) / float(uncomp)
906 906 else:
907 907 chainratio = chainsize
908 908
909 909 if chainsize != 0:
910 910 extraratio = float(extradist) / float(chainsize)
911 911 else:
912 912 extraratio = extradist
913 913
914 914 fm.startitem()
915 915 fm.write(
916 916 b'rev p1 p2 chainid chainlen prevrev deltatype compsize '
917 917 b'uncompsize chainsize chainratio lindist extradist '
918 918 b'extraratio',
919 919 b'%7d %7d %7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
920 920 rev,
921 921 p1,
922 922 p2,
923 923 chainid,
924 924 len(chain),
925 925 prevrev,
926 926 deltatype,
927 927 comp,
928 928 uncomp,
929 929 chainsize,
930 930 chainratio,
931 931 lineardist,
932 932 extradist,
933 933 extraratio,
934 934 rev=rev,
935 935 chainid=chainid,
936 936 chainlen=len(chain),
937 937 prevrev=prevrev,
938 938 deltatype=deltatype,
939 939 compsize=comp,
940 940 uncompsize=uncomp,
941 941 chainsize=chainsize,
942 942 chainratio=chainratio,
943 943 lindist=lineardist,
944 944 extradist=extradist,
945 945 extraratio=extraratio,
946 946 )
947 947 if withsparseread:
948 948 readsize = 0
949 949 largestblock = 0
950 950 srchunks = 0
951 951
952 952 for revschunk in deltautil.slicechunk(r, chain):
953 953 srchunks += 1
954 954 blkend = start(revschunk[-1]) + length(revschunk[-1])
955 955 blksize = blkend - start(revschunk[0])
956 956
957 957 readsize += blksize
958 958 if largestblock < blksize:
959 959 largestblock = blksize
960 960
961 961 if readsize:
962 962 readdensity = float(chainsize) / float(readsize)
963 963 else:
964 964 readdensity = 1
965 965
966 966 fm.write(
967 967 b'readsize largestblock readdensity srchunks',
968 968 b' %10d %10d %9.5f %8d',
969 969 readsize,
970 970 largestblock,
971 971 readdensity,
972 972 srchunks,
973 973 readsize=readsize,
974 974 largestblock=largestblock,
975 975 readdensity=readdensity,
976 976 srchunks=srchunks,
977 977 )
978 978
979 979 fm.plain(b'\n')
980 980
981 981 fm.end()
982 982
983 983
984 984 @command(
985 985 b'debug-delta-find',
986 986 cmdutil.debugrevlogopts
987 987 + cmdutil.formatteropts
988 988 + [
989 989 (
990 990 b'',
991 991 b'source',
992 992 b'full',
993 993 _(b'input data feed to the process (full, storage, p1, p2, prev)'),
994 994 ),
995 995 ],
996 996 _(b'-c|-m|FILE REV'),
997 997 optionalrepo=True,
998 998 )
999 999 def debugdeltafind(ui, repo, arg_1, arg_2=None, source=b'full', **opts):
1000 1000 """display the computation to get to a valid delta for storing REV
1001 1001
1002 1002 This command will replay the process used to find the "best" delta to store
1003 1003 a revision and display information about all the steps used to get to that
1004 1004 result.
1005 1005
1006 1006 By default, the process is fed with a the full-text for the revision. This
1007 1007 can be controlled with the --source flag.
1008 1008
1009 1009 The revision use the revision number of the target storage (not changelog
1010 1010 revision number).
1011 1011
1012 1012 note: the process is initiated from a full text of the revision to store.
1013 1013 """
1014 1014 opts = pycompat.byteskwargs(opts)
1015 1015 if arg_2 is None:
1016 1016 file_ = None
1017 1017 rev = arg_1
1018 1018 else:
1019 1019 file_ = arg_1
1020 1020 rev = arg_2
1021 1021
1022 1022 rev = int(rev)
1023 1023
1024 1024 revlog = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
1025 1025 p1r, p2r = revlog.parentrevs(rev)
1026 1026
1027 1027 if source == b'full':
1028 1028 base_rev = nullrev
1029 1029 elif source == b'storage':
1030 1030 base_rev = revlog.deltaparent(rev)
1031 1031 elif source == b'p1':
1032 1032 base_rev = p1r
1033 1033 elif source == b'p2':
1034 1034 base_rev = p2r
1035 1035 elif source == b'prev':
1036 1036 base_rev = rev - 1
1037 1037 else:
1038 1038 raise error.InputError(b"invalid --source value: %s" % source)
1039 1039
1040 1040 revlog_debug.debug_delta_find(ui, revlog, rev, base_rev=base_rev)
1041 1041
1042 1042
1043 1043 @command(
1044 1044 b'debugdirstate|debugstate',
1045 1045 [
1046 1046 (
1047 1047 b'',
1048 1048 b'nodates',
1049 1049 None,
1050 1050 _(b'do not display the saved mtime (DEPRECATED)'),
1051 1051 ),
1052 1052 (b'', b'dates', True, _(b'display the saved mtime')),
1053 1053 (b'', b'datesort', None, _(b'sort by saved mtime')),
1054 1054 (
1055 1055 b'',
1056 1056 b'docket',
1057 1057 False,
1058 1058 _(b'display the docket (metadata file) instead'),
1059 1059 ),
1060 1060 (
1061 1061 b'',
1062 1062 b'all',
1063 1063 False,
1064 1064 _(b'display dirstate-v2 tree nodes that would not exist in v1'),
1065 1065 ),
1066 1066 ],
1067 1067 _(b'[OPTION]...'),
1068 1068 )
1069 1069 def debugstate(ui, repo, **opts):
1070 1070 """show the contents of the current dirstate"""
1071 1071
1072 1072 if opts.get("docket"):
1073 1073 if not repo.dirstate._use_dirstate_v2:
1074 1074 raise error.Abort(_(b'dirstate v1 does not have a docket'))
1075 1075
1076 1076 docket = repo.dirstate._map.docket
1077 1077 (
1078 1078 start_offset,
1079 1079 root_nodes,
1080 1080 nodes_with_entry,
1081 1081 nodes_with_copy,
1082 1082 unused_bytes,
1083 1083 _unused,
1084 1084 ignore_pattern,
1085 1085 ) = dirstateutils.v2.TREE_METADATA.unpack(docket.tree_metadata)
1086 1086
1087 1087 ui.write(_(b"size of dirstate data: %d\n") % docket.data_size)
1088 1088 ui.write(_(b"data file uuid: %s\n") % docket.uuid)
1089 1089 ui.write(_(b"start offset of root nodes: %d\n") % start_offset)
1090 1090 ui.write(_(b"number of root nodes: %d\n") % root_nodes)
1091 1091 ui.write(_(b"nodes with entries: %d\n") % nodes_with_entry)
1092 1092 ui.write(_(b"nodes with copies: %d\n") % nodes_with_copy)
1093 1093 ui.write(_(b"number of unused bytes: %d\n") % unused_bytes)
1094 1094 ui.write(
1095 1095 _(b"ignore pattern hash: %s\n") % binascii.hexlify(ignore_pattern)
1096 1096 )
1097 1097 return
1098 1098
1099 1099 nodates = not opts['dates']
1100 1100 if opts.get('nodates') is not None:
1101 1101 nodates = True
1102 1102 datesort = opts.get('datesort')
1103 1103
1104 1104 if datesort:
1105 1105
1106 1106 def keyfunc(entry):
1107 1107 filename, _state, _mode, _size, mtime = entry
1108 1108 return (mtime, filename)
1109 1109
1110 1110 else:
1111 1111 keyfunc = None # sort by filename
1112 1112 entries = list(repo.dirstate._map.debug_iter(all=opts['all']))
1113 1113 entries.sort(key=keyfunc)
1114 1114 for entry in entries:
1115 1115 filename, state, mode, size, mtime = entry
1116 1116 if mtime == -1:
1117 1117 timestr = b'unset '
1118 1118 elif nodates:
1119 1119 timestr = b'set '
1120 1120 else:
1121 1121 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(mtime))
1122 1122 timestr = encoding.strtolocal(timestr)
1123 1123 if mode & 0o20000:
1124 1124 mode = b'lnk'
1125 1125 else:
1126 1126 mode = b'%3o' % (mode & 0o777 & ~util.umask)
1127 1127 ui.write(b"%c %s %10d %s%s\n" % (state, mode, size, timestr, filename))
1128 1128 for f in repo.dirstate.copies():
1129 1129 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1130 1130
1131 1131
1132 1132 @command(
1133 1133 b'debugdirstateignorepatternshash',
1134 1134 [],
1135 1135 _(b''),
1136 1136 )
1137 1137 def debugdirstateignorepatternshash(ui, repo, **opts):
1138 1138 """show the hash of ignore patterns stored in dirstate if v2,
1139 1139 or nothing for dirstate-v2
1140 1140 """
1141 1141 if repo.dirstate._use_dirstate_v2:
1142 1142 docket = repo.dirstate._map.docket
1143 1143 hash_len = 20 # 160 bits for SHA-1
1144 1144 hash_bytes = docket.tree_metadata[-hash_len:]
1145 1145 ui.write(binascii.hexlify(hash_bytes) + b'\n')
1146 1146
1147 1147
1148 1148 @command(
1149 1149 b'debugdiscovery',
1150 1150 [
1151 1151 (b'', b'old', None, _(b'use old-style discovery')),
1152 1152 (
1153 1153 b'',
1154 1154 b'nonheads',
1155 1155 None,
1156 1156 _(b'use old-style discovery with non-heads included'),
1157 1157 ),
1158 1158 (b'', b'rev', [], b'restrict discovery to this set of revs'),
1159 1159 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
1160 1160 (
1161 1161 b'',
1162 1162 b'local-as-revs',
1163 1163 b"",
1164 1164 b'treat local has having these revisions only',
1165 1165 ),
1166 1166 (
1167 1167 b'',
1168 1168 b'remote-as-revs',
1169 1169 b"",
1170 1170 b'use local as remote, with only these revisions',
1171 1171 ),
1172 1172 ]
1173 1173 + cmdutil.remoteopts
1174 1174 + cmdutil.formatteropts,
1175 1175 _(b'[--rev REV] [OTHER]'),
1176 1176 )
1177 1177 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1178 1178 """runs the changeset discovery protocol in isolation
1179 1179
1180 1180 The local peer can be "replaced" by a subset of the local repository by
1181 1181 using the `--local-as-revs` flag. In the same way, the usual `remote` peer
1182 1182 can be "replaced" by a subset of the local repository using the
1183 1183 `--remote-as-revs` flag. This is useful to efficiently debug pathological
1184 1184 discovery situations.
1185 1185
1186 1186 The following developer oriented config are relevant for people playing with this command:
1187 1187
1188 1188 * devel.discovery.exchange-heads=True
1189 1189
1190 1190 If False, the discovery will not start with
1191 1191 remote head fetching and local head querying.
1192 1192
1193 1193 * devel.discovery.grow-sample=True
1194 1194
1195 1195 If False, the sample size used in set discovery will not be increased
1196 1196 through the process
1197 1197
1198 1198 * devel.discovery.grow-sample.dynamic=True
1199 1199
1200 1200 When discovery.grow-sample.dynamic is True, the default, the sample size is
1201 1201 adapted to the shape of the undecided set (it is set to the max of:
1202 1202 <target-size>, len(roots(undecided)), len(heads(undecided)
1203 1203
1204 1204 * devel.discovery.grow-sample.rate=1.05
1205 1205
1206 1206 the rate at which the sample grow
1207 1207
1208 1208 * devel.discovery.randomize=True
1209 1209
1210 1210 If andom sampling during discovery are deterministic. It is meant for
1211 1211 integration tests.
1212 1212
1213 1213 * devel.discovery.sample-size=200
1214 1214
1215 1215 Control the initial size of the discovery sample
1216 1216
1217 1217 * devel.discovery.sample-size.initial=100
1218 1218
1219 1219 Control the initial size of the discovery for initial change
1220 1220 """
1221 1221 opts = pycompat.byteskwargs(opts)
1222 1222 unfi = repo.unfiltered()
1223 1223
1224 1224 # setup potential extra filtering
1225 1225 local_revs = opts[b"local_as_revs"]
1226 1226 remote_revs = opts[b"remote_as_revs"]
1227 1227
1228 1228 # make sure tests are repeatable
1229 1229 random.seed(int(opts[b'seed']))
1230 1230
1231 1231 if not remote_revs:
1232 1232 path = urlutil.get_unique_pull_path_obj(
1233 1233 b'debugdiscovery', ui, remoteurl
1234 1234 )
1235 1235 branches = (path.branch, [])
1236 1236 remote = hg.peer(repo, opts, path)
1237 1237 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(path.loc))
1238 1238 else:
1239 1239 branches = (None, [])
1240 1240 remote_filtered_revs = logcmdutil.revrange(
1241 1241 unfi, [b"not (::(%s))" % remote_revs]
1242 1242 )
1243 1243 remote_filtered_revs = frozenset(remote_filtered_revs)
1244 1244
1245 1245 def remote_func(x):
1246 1246 return remote_filtered_revs
1247 1247
1248 1248 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1249 1249
1250 1250 remote = repo.peer()
1251 1251 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1252 1252
1253 1253 if local_revs:
1254 1254 local_filtered_revs = logcmdutil.revrange(
1255 1255 unfi, [b"not (::(%s))" % local_revs]
1256 1256 )
1257 1257 local_filtered_revs = frozenset(local_filtered_revs)
1258 1258
1259 1259 def local_func(x):
1260 1260 return local_filtered_revs
1261 1261
1262 1262 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1263 1263 repo = repo.filtered(b'debug-discovery-local-filter')
1264 1264
1265 1265 data = {}
1266 1266 if opts.get(b'old'):
1267 1267
1268 1268 def doit(pushedrevs, remoteheads, remote=remote):
1269 1269 if not util.safehasattr(remote, b'branches'):
1270 1270 # enable in-client legacy support
1271 1271 remote = localrepo.locallegacypeer(remote.local())
1272 1272 if remote_revs:
1273 1273 r = remote._repo.filtered(b'debug-discovery-remote-filter')
1274 1274 remote._repo = r
1275 1275 common, _in, hds = treediscovery.findcommonincoming(
1276 1276 repo, remote, force=True, audit=data
1277 1277 )
1278 1278 common = set(common)
1279 1279 if not opts.get(b'nonheads'):
1280 1280 ui.writenoi18n(
1281 1281 b"unpruned common: %s\n"
1282 1282 % b" ".join(sorted(short(n) for n in common))
1283 1283 )
1284 1284
1285 1285 clnode = repo.changelog.node
1286 1286 common = repo.revs(b'heads(::%ln)', common)
1287 1287 common = {clnode(r) for r in common}
1288 1288 return common, hds
1289 1289
1290 1290 else:
1291 1291
1292 1292 def doit(pushedrevs, remoteheads, remote=remote):
1293 1293 nodes = None
1294 1294 if pushedrevs:
1295 1295 revs = logcmdutil.revrange(repo, pushedrevs)
1296 1296 nodes = [repo[r].node() for r in revs]
1297 1297 common, any, hds = setdiscovery.findcommonheads(
1298 1298 ui,
1299 1299 repo,
1300 1300 remote,
1301 1301 ancestorsof=nodes,
1302 1302 audit=data,
1303 1303 abortwhenunrelated=False,
1304 1304 )
1305 1305 return common, hds
1306 1306
1307 1307 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1308 1308 localrevs = opts[b'rev']
1309 1309
1310 1310 fm = ui.formatter(b'debugdiscovery', opts)
1311 1311 if fm.strict_format:
1312 1312
1313 1313 @contextlib.contextmanager
1314 1314 def may_capture_output():
1315 1315 ui.pushbuffer()
1316 1316 yield
1317 1317 data[b'output'] = ui.popbuffer()
1318 1318
1319 1319 else:
1320 1320 may_capture_output = util.nullcontextmanager
1321 1321 with may_capture_output():
1322 1322 with util.timedcm('debug-discovery') as t:
1323 1323 common, hds = doit(localrevs, remoterevs)
1324 1324
1325 1325 # compute all statistics
1326 1326 if len(common) == 1 and repo.nullid in common:
1327 1327 common = set()
1328 1328 heads_common = set(common)
1329 1329 heads_remote = set(hds)
1330 1330 heads_local = set(repo.heads())
1331 1331 # note: they cannot be a local or remote head that is in common and not
1332 1332 # itself a head of common.
1333 1333 heads_common_local = heads_common & heads_local
1334 1334 heads_common_remote = heads_common & heads_remote
1335 1335 heads_common_both = heads_common & heads_remote & heads_local
1336 1336
1337 1337 all = repo.revs(b'all()')
1338 1338 common = repo.revs(b'::%ln', common)
1339 1339 roots_common = repo.revs(b'roots(::%ld)', common)
1340 1340 missing = repo.revs(b'not ::%ld', common)
1341 1341 heads_missing = repo.revs(b'heads(%ld)', missing)
1342 1342 roots_missing = repo.revs(b'roots(%ld)', missing)
1343 1343 assert len(common) + len(missing) == len(all)
1344 1344
1345 1345 initial_undecided = repo.revs(
1346 1346 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1347 1347 )
1348 1348 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1349 1349 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1350 1350 common_initial_undecided = initial_undecided & common
1351 1351 missing_initial_undecided = initial_undecided & missing
1352 1352
1353 1353 data[b'elapsed'] = t.elapsed
1354 1354 data[b'nb-common-heads'] = len(heads_common)
1355 1355 data[b'nb-common-heads-local'] = len(heads_common_local)
1356 1356 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1357 1357 data[b'nb-common-heads-both'] = len(heads_common_both)
1358 1358 data[b'nb-common-roots'] = len(roots_common)
1359 1359 data[b'nb-head-local'] = len(heads_local)
1360 1360 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1361 1361 data[b'nb-head-remote'] = len(heads_remote)
1362 1362 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1363 1363 heads_common_remote
1364 1364 )
1365 1365 data[b'nb-revs'] = len(all)
1366 1366 data[b'nb-revs-common'] = len(common)
1367 1367 data[b'nb-revs-missing'] = len(missing)
1368 1368 data[b'nb-missing-heads'] = len(heads_missing)
1369 1369 data[b'nb-missing-roots'] = len(roots_missing)
1370 1370 data[b'nb-ini_und'] = len(initial_undecided)
1371 1371 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1372 1372 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1373 1373 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1374 1374 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1375 1375
1376 1376 fm.startitem()
1377 1377 fm.data(**pycompat.strkwargs(data))
1378 1378 # display discovery summary
1379 1379 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1380 1380 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1381 1381 if b'total-round-trips-heads' in data:
1382 1382 fm.plain(
1383 1383 b" round-trips-heads: %(total-round-trips-heads)9d\n" % data
1384 1384 )
1385 1385 if b'total-round-trips-branches' in data:
1386 1386 fm.plain(
1387 1387 b" round-trips-branches: %(total-round-trips-branches)9d\n"
1388 1388 % data
1389 1389 )
1390 1390 if b'total-round-trips-between' in data:
1391 1391 fm.plain(
1392 1392 b" round-trips-between: %(total-round-trips-between)9d\n" % data
1393 1393 )
1394 1394 fm.plain(b"queries: %(total-queries)9d\n" % data)
1395 1395 if b'total-queries-branches' in data:
1396 1396 fm.plain(b" queries-branches: %(total-queries-branches)9d\n" % data)
1397 1397 if b'total-queries-between' in data:
1398 1398 fm.plain(b" queries-between: %(total-queries-between)9d\n" % data)
1399 1399 fm.plain(b"heads summary:\n")
1400 1400 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1401 1401 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1402 1402 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1403 1403 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1404 1404 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1405 1405 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1406 1406 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1407 1407 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1408 1408 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1409 1409 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1410 1410 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1411 1411 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1412 1412 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1413 1413 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1414 1414 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1415 1415 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1416 1416 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1417 1417 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1418 1418 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1419 1419 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1420 1420 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1421 1421 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1422 1422
1423 1423 if ui.verbose:
1424 1424 fm.plain(
1425 1425 b"common heads: %s\n"
1426 1426 % b" ".join(sorted(short(n) for n in heads_common))
1427 1427 )
1428 1428 fm.end()
1429 1429
1430 1430
1431 1431 _chunksize = 4 << 10
1432 1432
1433 1433
1434 1434 @command(
1435 1435 b'debugdownload',
1436 1436 [
1437 1437 (b'o', b'output', b'', _(b'path')),
1438 1438 ],
1439 1439 optionalrepo=True,
1440 1440 )
1441 1441 def debugdownload(ui, repo, url, output=None, **opts):
1442 1442 """download a resource using Mercurial logic and config"""
1443 1443 fh = urlmod.open(ui, url, output)
1444 1444
1445 1445 dest = ui
1446 1446 if output:
1447 1447 dest = open(output, b"wb", _chunksize)
1448 1448 try:
1449 1449 data = fh.read(_chunksize)
1450 1450 while data:
1451 1451 dest.write(data)
1452 1452 data = fh.read(_chunksize)
1453 1453 finally:
1454 1454 if output:
1455 1455 dest.close()
1456 1456
1457 1457
1458 1458 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1459 1459 def debugextensions(ui, repo, **opts):
1460 1460 '''show information about active extensions'''
1461 1461 opts = pycompat.byteskwargs(opts)
1462 1462 exts = extensions.extensions(ui)
1463 1463 hgver = util.version()
1464 1464 fm = ui.formatter(b'debugextensions', opts)
1465 1465 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1466 1466 isinternal = extensions.ismoduleinternal(extmod)
1467 1467 extsource = None
1468 1468
1469 1469 if util.safehasattr(extmod, '__file__'):
1470 1470 extsource = pycompat.fsencode(extmod.__file__)
1471 1471 elif getattr(sys, 'oxidized', False):
1472 1472 extsource = pycompat.sysexecutable
1473 1473 if isinternal:
1474 1474 exttestedwith = [] # never expose magic string to users
1475 1475 else:
1476 1476 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1477 1477 extbuglink = getattr(extmod, 'buglink', None)
1478 1478
1479 1479 fm.startitem()
1480 1480
1481 1481 if ui.quiet or ui.verbose:
1482 1482 fm.write(b'name', b'%s\n', extname)
1483 1483 else:
1484 1484 fm.write(b'name', b'%s', extname)
1485 1485 if isinternal or hgver in exttestedwith:
1486 1486 fm.plain(b'\n')
1487 1487 elif not exttestedwith:
1488 1488 fm.plain(_(b' (untested!)\n'))
1489 1489 else:
1490 1490 lasttestedversion = exttestedwith[-1]
1491 1491 fm.plain(b' (%s!)\n' % lasttestedversion)
1492 1492
1493 1493 fm.condwrite(
1494 1494 ui.verbose and extsource,
1495 1495 b'source',
1496 1496 _(b' location: %s\n'),
1497 1497 extsource or b"",
1498 1498 )
1499 1499
1500 1500 if ui.verbose:
1501 1501 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1502 1502 fm.data(bundled=isinternal)
1503 1503
1504 1504 fm.condwrite(
1505 1505 ui.verbose and exttestedwith,
1506 1506 b'testedwith',
1507 1507 _(b' tested with: %s\n'),
1508 1508 fm.formatlist(exttestedwith, name=b'ver'),
1509 1509 )
1510 1510
1511 1511 fm.condwrite(
1512 1512 ui.verbose and extbuglink,
1513 1513 b'buglink',
1514 1514 _(b' bug reporting: %s\n'),
1515 1515 extbuglink or b"",
1516 1516 )
1517 1517
1518 1518 fm.end()
1519 1519
1520 1520
1521 1521 @command(
1522 1522 b'debugfileset',
1523 1523 [
1524 1524 (
1525 1525 b'r',
1526 1526 b'rev',
1527 1527 b'',
1528 1528 _(b'apply the filespec on this revision'),
1529 1529 _(b'REV'),
1530 1530 ),
1531 1531 (
1532 1532 b'',
1533 1533 b'all-files',
1534 1534 False,
1535 1535 _(b'test files from all revisions and working directory'),
1536 1536 ),
1537 1537 (
1538 1538 b's',
1539 1539 b'show-matcher',
1540 1540 None,
1541 1541 _(b'print internal representation of matcher'),
1542 1542 ),
1543 1543 (
1544 1544 b'p',
1545 1545 b'show-stage',
1546 1546 [],
1547 1547 _(b'print parsed tree at the given stage'),
1548 1548 _(b'NAME'),
1549 1549 ),
1550 1550 ],
1551 1551 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1552 1552 )
1553 1553 def debugfileset(ui, repo, expr, **opts):
1554 1554 '''parse and apply a fileset specification'''
1555 1555 from . import fileset
1556 1556
1557 1557 fileset.symbols # force import of fileset so we have predicates to optimize
1558 1558 opts = pycompat.byteskwargs(opts)
1559 1559 ctx = logcmdutil.revsingle(repo, opts.get(b'rev'), None)
1560 1560
1561 1561 stages = [
1562 1562 (b'parsed', pycompat.identity),
1563 1563 (b'analyzed', filesetlang.analyze),
1564 1564 (b'optimized', filesetlang.optimize),
1565 1565 ]
1566 1566 stagenames = {n for n, f in stages}
1567 1567
1568 1568 showalways = set()
1569 1569 if ui.verbose and not opts[b'show_stage']:
1570 1570 # show parsed tree by --verbose (deprecated)
1571 1571 showalways.add(b'parsed')
1572 1572 if opts[b'show_stage'] == [b'all']:
1573 1573 showalways.update(stagenames)
1574 1574 else:
1575 1575 for n in opts[b'show_stage']:
1576 1576 if n not in stagenames:
1577 1577 raise error.Abort(_(b'invalid stage name: %s') % n)
1578 1578 showalways.update(opts[b'show_stage'])
1579 1579
1580 1580 tree = filesetlang.parse(expr)
1581 1581 for n, f in stages:
1582 1582 tree = f(tree)
1583 1583 if n in showalways:
1584 1584 if opts[b'show_stage'] or n != b'parsed':
1585 1585 ui.write(b"* %s:\n" % n)
1586 1586 ui.write(filesetlang.prettyformat(tree), b"\n")
1587 1587
1588 1588 files = set()
1589 1589 if opts[b'all_files']:
1590 1590 for r in repo:
1591 1591 c = repo[r]
1592 1592 files.update(c.files())
1593 1593 files.update(c.substate)
1594 1594 if opts[b'all_files'] or ctx.rev() is None:
1595 1595 wctx = repo[None]
1596 1596 files.update(
1597 1597 repo.dirstate.walk(
1598 1598 scmutil.matchall(repo),
1599 1599 subrepos=list(wctx.substate),
1600 1600 unknown=True,
1601 1601 ignored=True,
1602 1602 )
1603 1603 )
1604 1604 files.update(wctx.substate)
1605 1605 else:
1606 1606 files.update(ctx.files())
1607 1607 files.update(ctx.substate)
1608 1608
1609 1609 m = ctx.matchfileset(repo.getcwd(), expr)
1610 1610 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1611 1611 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1612 1612 for f in sorted(files):
1613 1613 if not m(f):
1614 1614 continue
1615 1615 ui.write(b"%s\n" % f)
1616 1616
1617 1617
1618 1618 @command(
1619 1619 b"debug-repair-issue6528",
1620 1620 [
1621 1621 (
1622 1622 b'',
1623 1623 b'to-report',
1624 1624 b'',
1625 1625 _(b'build a report of affected revisions to this file'),
1626 1626 _(b'FILE'),
1627 1627 ),
1628 1628 (
1629 1629 b'',
1630 1630 b'from-report',
1631 1631 b'',
1632 1632 _(b'repair revisions listed in this report file'),
1633 1633 _(b'FILE'),
1634 1634 ),
1635 1635 (
1636 1636 b'',
1637 1637 b'paranoid',
1638 1638 False,
1639 1639 _(b'check that both detection methods do the same thing'),
1640 1640 ),
1641 1641 ]
1642 1642 + cmdutil.dryrunopts,
1643 1643 )
1644 1644 def debug_repair_issue6528(ui, repo, **opts):
1645 1645 """find affected revisions and repair them. See issue6528 for more details.
1646 1646
1647 1647 The `--to-report` and `--from-report` flags allow you to cache and reuse the
1648 1648 computation of affected revisions for a given repository across clones.
1649 1649 The report format is line-based (with empty lines ignored):
1650 1650
1651 1651 ```
1652 1652 <ascii-hex of the affected revision>,... <unencoded filelog index filename>
1653 1653 ```
1654 1654
1655 1655 There can be multiple broken revisions per filelog, they are separated by
1656 1656 a comma with no spaces. The only space is between the revision(s) and the
1657 1657 filename.
1658 1658
1659 1659 Note that this does *not* mean that this repairs future affected revisions,
1660 1660 that needs a separate fix at the exchange level that was introduced in
1661 1661 Mercurial 5.9.1.
1662 1662
1663 1663 There is a `--paranoid` flag to test that the fast implementation is correct
1664 1664 by checking it against the slow implementation. Since this matter is quite
1665 1665 urgent and testing every edge-case is probably quite costly, we use this
1666 1666 method to test on large repositories as a fuzzing method of sorts.
1667 1667 """
1668 1668 cmdutil.check_incompatible_arguments(
1669 1669 opts, 'to_report', ['from_report', 'dry_run']
1670 1670 )
1671 1671 dry_run = opts.get('dry_run')
1672 1672 to_report = opts.get('to_report')
1673 1673 from_report = opts.get('from_report')
1674 1674 paranoid = opts.get('paranoid')
1675 1675 # TODO maybe add filelog pattern and revision pattern parameters to help
1676 1676 # narrow down the search for users that know what they're looking for?
1677 1677
1678 1678 if requirements.REVLOGV1_REQUIREMENT not in repo.requirements:
1679 1679 msg = b"can only repair revlogv1 repositories, v2 is not affected"
1680 1680 raise error.Abort(_(msg))
1681 1681
1682 1682 rewrite.repair_issue6528(
1683 1683 ui,
1684 1684 repo,
1685 1685 dry_run=dry_run,
1686 1686 to_report=to_report,
1687 1687 from_report=from_report,
1688 1688 paranoid=paranoid,
1689 1689 )
1690 1690
1691 1691
1692 1692 @command(b'debugformat', [] + cmdutil.formatteropts)
1693 1693 def debugformat(ui, repo, **opts):
1694 1694 """display format information about the current repository
1695 1695
1696 1696 Use --verbose to get extra information about current config value and
1697 1697 Mercurial default."""
1698 1698 opts = pycompat.byteskwargs(opts)
1699 1699 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1700 1700 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1701 1701
1702 1702 def makeformatname(name):
1703 1703 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1704 1704
1705 1705 fm = ui.formatter(b'debugformat', opts)
1706 1706 if fm.isplain():
1707 1707
1708 1708 def formatvalue(value):
1709 1709 if util.safehasattr(value, b'startswith'):
1710 1710 return value
1711 1711 if value:
1712 1712 return b'yes'
1713 1713 else:
1714 1714 return b'no'
1715 1715
1716 1716 else:
1717 1717 formatvalue = pycompat.identity
1718 1718
1719 1719 fm.plain(b'format-variant')
1720 1720 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1721 1721 fm.plain(b' repo')
1722 1722 if ui.verbose:
1723 1723 fm.plain(b' config default')
1724 1724 fm.plain(b'\n')
1725 1725 for fv in upgrade.allformatvariant:
1726 1726 fm.startitem()
1727 1727 repovalue = fv.fromrepo(repo)
1728 1728 configvalue = fv.fromconfig(repo)
1729 1729
1730 1730 if repovalue != configvalue:
1731 1731 namelabel = b'formatvariant.name.mismatchconfig'
1732 1732 repolabel = b'formatvariant.repo.mismatchconfig'
1733 1733 elif repovalue != fv.default:
1734 1734 namelabel = b'formatvariant.name.mismatchdefault'
1735 1735 repolabel = b'formatvariant.repo.mismatchdefault'
1736 1736 else:
1737 1737 namelabel = b'formatvariant.name.uptodate'
1738 1738 repolabel = b'formatvariant.repo.uptodate'
1739 1739
1740 1740 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1741 1741 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1742 1742 if fv.default != configvalue:
1743 1743 configlabel = b'formatvariant.config.special'
1744 1744 else:
1745 1745 configlabel = b'formatvariant.config.default'
1746 1746 fm.condwrite(
1747 1747 ui.verbose,
1748 1748 b'config',
1749 1749 b' %6s',
1750 1750 formatvalue(configvalue),
1751 1751 label=configlabel,
1752 1752 )
1753 1753 fm.condwrite(
1754 1754 ui.verbose,
1755 1755 b'default',
1756 1756 b' %7s',
1757 1757 formatvalue(fv.default),
1758 1758 label=b'formatvariant.default',
1759 1759 )
1760 1760 fm.plain(b'\n')
1761 1761 fm.end()
1762 1762
1763 1763
1764 1764 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1765 1765 def debugfsinfo(ui, path=b"."):
1766 1766 """show information detected about current filesystem"""
1767 1767 ui.writenoi18n(b'path: %s\n' % path)
1768 1768 ui.writenoi18n(
1769 1769 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1770 1770 )
1771 1771 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1772 1772 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1773 1773 ui.writenoi18n(
1774 1774 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1775 1775 )
1776 1776 ui.writenoi18n(
1777 1777 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1778 1778 )
1779 1779 casesensitive = b'(unknown)'
1780 1780 try:
1781 1781 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1782 1782 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1783 1783 except OSError:
1784 1784 pass
1785 1785 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1786 1786
1787 1787
1788 1788 @command(
1789 1789 b'debuggetbundle',
1790 1790 [
1791 1791 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1792 1792 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1793 1793 (
1794 1794 b't',
1795 1795 b'type',
1796 1796 b'bzip2',
1797 1797 _(b'bundle compression type to use'),
1798 1798 _(b'TYPE'),
1799 1799 ),
1800 1800 ],
1801 1801 _(b'REPO FILE [-H|-C ID]...'),
1802 1802 norepo=True,
1803 1803 )
1804 1804 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1805 1805 """retrieves a bundle from a repo
1806 1806
1807 1807 Every ID must be a full-length hex node id string. Saves the bundle to the
1808 1808 given file.
1809 1809 """
1810 1810 opts = pycompat.byteskwargs(opts)
1811 1811 repo = hg.peer(ui, opts, repopath)
1812 1812 if not repo.capable(b'getbundle'):
1813 1813 raise error.Abort(b"getbundle() not supported by target repository")
1814 1814 args = {}
1815 1815 if common:
1816 1816 args['common'] = [bin(s) for s in common]
1817 1817 if head:
1818 1818 args['heads'] = [bin(s) for s in head]
1819 1819 # TODO: get desired bundlecaps from command line.
1820 1820 args['bundlecaps'] = None
1821 1821 bundle = repo.getbundle(b'debug', **args)
1822 1822
1823 1823 bundletype = opts.get(b'type', b'bzip2').lower()
1824 1824 btypes = {
1825 1825 b'none': b'HG10UN',
1826 1826 b'bzip2': b'HG10BZ',
1827 1827 b'gzip': b'HG10GZ',
1828 1828 b'bundle2': b'HG20',
1829 1829 }
1830 1830 bundletype = btypes.get(bundletype)
1831 1831 if bundletype not in bundle2.bundletypes:
1832 1832 raise error.Abort(_(b'unknown bundle type specified with --type'))
1833 1833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1834 1834
1835 1835
1836 1836 @command(b'debugignore', [], b'[FILE]')
1837 1837 def debugignore(ui, repo, *files, **opts):
1838 1838 """display the combined ignore pattern and information about ignored files
1839 1839
1840 1840 With no argument display the combined ignore pattern.
1841 1841
1842 1842 Given space separated file names, shows if the given file is ignored and
1843 1843 if so, show the ignore rule (file and line number) that matched it.
1844 1844 """
1845 1845 ignore = repo.dirstate._ignore
1846 1846 if not files:
1847 1847 # Show all the patterns
1848 1848 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1849 1849 else:
1850 1850 m = scmutil.match(repo[None], pats=files)
1851 1851 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1852 1852 for f in m.files():
1853 1853 nf = util.normpath(f)
1854 1854 ignored = None
1855 1855 ignoredata = None
1856 1856 if nf != b'.':
1857 1857 if ignore(nf):
1858 1858 ignored = nf
1859 1859 ignoredata = repo.dirstate._ignorefileandline(nf)
1860 1860 else:
1861 1861 for p in pathutil.finddirs(nf):
1862 1862 if ignore(p):
1863 1863 ignored = p
1864 1864 ignoredata = repo.dirstate._ignorefileandline(p)
1865 1865 break
1866 1866 if ignored:
1867 1867 if ignored == nf:
1868 1868 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1869 1869 else:
1870 1870 ui.write(
1871 1871 _(
1872 1872 b"%s is ignored because of "
1873 1873 b"containing directory %s\n"
1874 1874 )
1875 1875 % (uipathfn(f), ignored)
1876 1876 )
1877 1877 ignorefile, lineno, line = ignoredata
1878 1878 ui.write(
1879 1879 _(b"(ignore rule in %s, line %d: '%s')\n")
1880 1880 % (ignorefile, lineno, line)
1881 1881 )
1882 1882 else:
1883 1883 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1884 1884
1885 1885
1886 1886 @command(
1887 1887 b'debug-revlog-index|debugindex',
1888 1888 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1889 1889 _(b'-c|-m|FILE'),
1890 1890 )
1891 1891 def debugindex(ui, repo, file_=None, **opts):
1892 1892 """dump index data for a revlog"""
1893 1893 opts = pycompat.byteskwargs(opts)
1894 1894 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1895 1895
1896 1896 fm = ui.formatter(b'debugindex', opts)
1897 1897
1898 1898 revlog = getattr(store, b'_revlog', store)
1899 1899
1900 1900 return revlog_debug.debug_index(
1901 1901 ui,
1902 1902 repo,
1903 1903 formatter=fm,
1904 1904 revlog=revlog,
1905 1905 full_node=ui.debugflag,
1906 1906 )
1907 1907
1908 1908
1909 1909 @command(
1910 1910 b'debugindexdot',
1911 1911 cmdutil.debugrevlogopts,
1912 1912 _(b'-c|-m|FILE'),
1913 1913 optionalrepo=True,
1914 1914 )
1915 1915 def debugindexdot(ui, repo, file_=None, **opts):
1916 1916 """dump an index DAG as a graphviz dot file"""
1917 1917 opts = pycompat.byteskwargs(opts)
1918 1918 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1919 1919 ui.writenoi18n(b"digraph G {\n")
1920 1920 for i in r:
1921 1921 node = r.node(i)
1922 1922 pp = r.parents(node)
1923 1923 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1924 1924 if pp[1] != repo.nullid:
1925 1925 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1926 1926 ui.write(b"}\n")
1927 1927
1928 1928
1929 1929 @command(b'debugindexstats', [])
1930 1930 def debugindexstats(ui, repo):
1931 1931 """show stats related to the changelog index"""
1932 1932 repo.changelog.shortest(repo.nullid, 1)
1933 1933 index = repo.changelog.index
1934 1934 if not util.safehasattr(index, b'stats'):
1935 1935 raise error.Abort(_(b'debugindexstats only works with native code'))
1936 1936 for k, v in sorted(index.stats().items()):
1937 1937 ui.write(b'%s: %d\n' % (k, v))
1938 1938
1939 1939
1940 1940 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1941 1941 def debuginstall(ui, **opts):
1942 1942 """test Mercurial installation
1943 1943
1944 1944 Returns 0 on success.
1945 1945 """
1946 1946 opts = pycompat.byteskwargs(opts)
1947 1947
1948 1948 problems = 0
1949 1949
1950 1950 fm = ui.formatter(b'debuginstall', opts)
1951 1951 fm.startitem()
1952 1952
1953 1953 # encoding might be unknown or wrong. don't translate these messages.
1954 1954 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1955 1955 err = None
1956 1956 try:
1957 1957 codecs.lookup(pycompat.sysstr(encoding.encoding))
1958 1958 except LookupError as inst:
1959 1959 err = stringutil.forcebytestr(inst)
1960 1960 problems += 1
1961 1961 fm.condwrite(
1962 1962 err,
1963 1963 b'encodingerror',
1964 1964 b" %s\n (check that your locale is properly set)\n",
1965 1965 err,
1966 1966 )
1967 1967
1968 1968 # Python
1969 1969 pythonlib = None
1970 1970 if util.safehasattr(os, '__file__'):
1971 1971 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1972 1972 elif getattr(sys, 'oxidized', False):
1973 1973 pythonlib = pycompat.sysexecutable
1974 1974
1975 1975 fm.write(
1976 1976 b'pythonexe',
1977 1977 _(b"checking Python executable (%s)\n"),
1978 1978 pycompat.sysexecutable or _(b"unknown"),
1979 1979 )
1980 1980 fm.write(
1981 1981 b'pythonimplementation',
1982 1982 _(b"checking Python implementation (%s)\n"),
1983 1983 pycompat.sysbytes(platform.python_implementation()),
1984 1984 )
1985 1985 fm.write(
1986 1986 b'pythonver',
1987 1987 _(b"checking Python version (%s)\n"),
1988 1988 (b"%d.%d.%d" % sys.version_info[:3]),
1989 1989 )
1990 1990 fm.write(
1991 1991 b'pythonlib',
1992 1992 _(b"checking Python lib (%s)...\n"),
1993 1993 pythonlib or _(b"unknown"),
1994 1994 )
1995 1995
1996 1996 try:
1997 1997 from . import rustext # pytype: disable=import-error
1998 1998
1999 1999 rustext.__doc__ # trigger lazy import
2000 2000 except ImportError:
2001 2001 rustext = None
2002 2002
2003 2003 security = set(sslutil.supportedprotocols)
2004 2004 if sslutil.hassni:
2005 2005 security.add(b'sni')
2006 2006
2007 2007 fm.write(
2008 2008 b'pythonsecurity',
2009 2009 _(b"checking Python security support (%s)\n"),
2010 2010 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
2011 2011 )
2012 2012
2013 2013 # These are warnings, not errors. So don't increment problem count. This
2014 2014 # may change in the future.
2015 2015 if b'tls1.2' not in security:
2016 2016 fm.plain(
2017 2017 _(
2018 2018 b' TLS 1.2 not supported by Python install; '
2019 2019 b'network connections lack modern security\n'
2020 2020 )
2021 2021 )
2022 2022 if b'sni' not in security:
2023 2023 fm.plain(
2024 2024 _(
2025 2025 b' SNI not supported by Python install; may have '
2026 2026 b'connectivity issues with some servers\n'
2027 2027 )
2028 2028 )
2029 2029
2030 2030 fm.plain(
2031 2031 _(
2032 2032 b"checking Rust extensions (%s)\n"
2033 2033 % (b'missing' if rustext is None else b'installed')
2034 2034 ),
2035 2035 )
2036 2036
2037 2037 # TODO print CA cert info
2038 2038
2039 2039 # hg version
2040 2040 hgver = util.version()
2041 2041 fm.write(
2042 2042 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
2043 2043 )
2044 2044 fm.write(
2045 2045 b'hgverextra',
2046 2046 _(b"checking Mercurial custom build (%s)\n"),
2047 2047 b'+'.join(hgver.split(b'+')[1:]),
2048 2048 )
2049 2049
2050 2050 # compiled modules
2051 2051 hgmodules = None
2052 2052 if util.safehasattr(sys.modules[__name__], '__file__'):
2053 2053 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
2054 2054 elif getattr(sys, 'oxidized', False):
2055 2055 hgmodules = pycompat.sysexecutable
2056 2056
2057 2057 fm.write(
2058 2058 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
2059 2059 )
2060 2060 fm.write(
2061 2061 b'hgmodules',
2062 2062 _(b"checking installed modules (%s)...\n"),
2063 2063 hgmodules or _(b"unknown"),
2064 2064 )
2065 2065
2066 2066 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
2067 2067 rustext = rustandc # for now, that's the only case
2068 2068 cext = policy.policy in (b'c', b'allow') or rustandc
2069 2069 nopure = cext or rustext
2070 2070 if nopure:
2071 2071 err = None
2072 2072 try:
2073 2073 if cext:
2074 2074 from .cext import ( # pytype: disable=import-error
2075 2075 base85,
2076 2076 bdiff,
2077 2077 mpatch,
2078 2078 osutil,
2079 2079 )
2080 2080
2081 2081 # quiet pyflakes
2082 2082 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
2083 2083 if rustext:
2084 2084 from .rustext import ( # pytype: disable=import-error
2085 2085 ancestor,
2086 2086 dirstate,
2087 2087 )
2088 2088
2089 2089 dir(ancestor), dir(dirstate) # quiet pyflakes
2090 2090 except Exception as inst:
2091 2091 err = stringutil.forcebytestr(inst)
2092 2092 problems += 1
2093 2093 fm.condwrite(err, b'extensionserror', b" %s\n", err)
2094 2094
2095 2095 compengines = util.compengines._engines.values()
2096 2096 fm.write(
2097 2097 b'compengines',
2098 2098 _(b'checking registered compression engines (%s)\n'),
2099 2099 fm.formatlist(
2100 2100 sorted(e.name() for e in compengines),
2101 2101 name=b'compengine',
2102 2102 fmt=b'%s',
2103 2103 sep=b', ',
2104 2104 ),
2105 2105 )
2106 2106 fm.write(
2107 2107 b'compenginesavail',
2108 2108 _(b'checking available compression engines (%s)\n'),
2109 2109 fm.formatlist(
2110 2110 sorted(e.name() for e in compengines if e.available()),
2111 2111 name=b'compengine',
2112 2112 fmt=b'%s',
2113 2113 sep=b', ',
2114 2114 ),
2115 2115 )
2116 2116 wirecompengines = compression.compengines.supportedwireengines(
2117 2117 compression.SERVERROLE
2118 2118 )
2119 2119 fm.write(
2120 2120 b'compenginesserver',
2121 2121 _(
2122 2122 b'checking available compression engines '
2123 2123 b'for wire protocol (%s)\n'
2124 2124 ),
2125 2125 fm.formatlist(
2126 2126 [e.name() for e in wirecompengines if e.wireprotosupport()],
2127 2127 name=b'compengine',
2128 2128 fmt=b'%s',
2129 2129 sep=b', ',
2130 2130 ),
2131 2131 )
2132 2132 re2 = b'missing'
2133 2133 if util._re2:
2134 2134 re2 = b'available'
2135 2135 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
2136 2136 fm.data(re2=bool(util._re2))
2137 2137
2138 2138 # templates
2139 2139 p = templater.templatedir()
2140 2140 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
2141 2141 fm.condwrite(not p, b'', _(b" no template directories found\n"))
2142 2142 if p:
2143 2143 (m, fp) = templater.try_open_template(b"map-cmdline.default")
2144 2144 if m:
2145 2145 # template found, check if it is working
2146 2146 err = None
2147 2147 try:
2148 2148 templater.templater.frommapfile(m)
2149 2149 except Exception as inst:
2150 2150 err = stringutil.forcebytestr(inst)
2151 2151 p = None
2152 2152 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
2153 2153 else:
2154 2154 p = None
2155 2155 fm.condwrite(
2156 2156 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
2157 2157 )
2158 2158 fm.condwrite(
2159 2159 not m,
2160 2160 b'defaulttemplatenotfound',
2161 2161 _(b" template '%s' not found\n"),
2162 2162 b"default",
2163 2163 )
2164 2164 if not p:
2165 2165 problems += 1
2166 2166 fm.condwrite(
2167 2167 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
2168 2168 )
2169 2169
2170 2170 # editor
2171 2171 editor = ui.geteditor()
2172 2172 editor = util.expandpath(editor)
2173 2173 editorbin = procutil.shellsplit(editor)[0]
2174 2174 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
2175 2175 cmdpath = procutil.findexe(editorbin)
2176 2176 fm.condwrite(
2177 2177 not cmdpath and editor == b'vi',
2178 2178 b'vinotfound',
2179 2179 _(
2180 2180 b" No commit editor set and can't find %s in PATH\n"
2181 2181 b" (specify a commit editor in your configuration"
2182 2182 b" file)\n"
2183 2183 ),
2184 2184 not cmdpath and editor == b'vi' and editorbin,
2185 2185 )
2186 2186 fm.condwrite(
2187 2187 not cmdpath and editor != b'vi',
2188 2188 b'editornotfound',
2189 2189 _(
2190 2190 b" Can't find editor '%s' in PATH\n"
2191 2191 b" (specify a commit editor in your configuration"
2192 2192 b" file)\n"
2193 2193 ),
2194 2194 not cmdpath and editorbin,
2195 2195 )
2196 2196 if not cmdpath and editor != b'vi':
2197 2197 problems += 1
2198 2198
2199 2199 # check username
2200 2200 username = None
2201 2201 err = None
2202 2202 try:
2203 2203 username = ui.username()
2204 2204 except error.Abort as e:
2205 2205 err = e.message
2206 2206 problems += 1
2207 2207
2208 2208 fm.condwrite(
2209 2209 username, b'username', _(b"checking username (%s)\n"), username
2210 2210 )
2211 2211 fm.condwrite(
2212 2212 err,
2213 2213 b'usernameerror',
2214 2214 _(
2215 2215 b"checking username...\n %s\n"
2216 2216 b" (specify a username in your configuration file)\n"
2217 2217 ),
2218 2218 err,
2219 2219 )
2220 2220
2221 2221 for name, mod in extensions.extensions():
2222 2222 handler = getattr(mod, 'debuginstall', None)
2223 2223 if handler is not None:
2224 2224 problems += handler(ui, fm)
2225 2225
2226 2226 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
2227 2227 if not problems:
2228 2228 fm.data(problems=problems)
2229 2229 fm.condwrite(
2230 2230 problems,
2231 2231 b'problems',
2232 2232 _(b"%d problems detected, please check your install!\n"),
2233 2233 problems,
2234 2234 )
2235 2235 fm.end()
2236 2236
2237 2237 return problems
2238 2238
2239 2239
2240 2240 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
2241 2241 def debugknown(ui, repopath, *ids, **opts):
2242 2242 """test whether node ids are known to a repo
2243 2243
2244 2244 Every ID must be a full-length hex node id string. Returns a list of 0s
2245 2245 and 1s indicating unknown/known.
2246 2246 """
2247 2247 opts = pycompat.byteskwargs(opts)
2248 2248 repo = hg.peer(ui, opts, repopath)
2249 2249 if not repo.capable(b'known'):
2250 2250 raise error.Abort(b"known() not supported by target repository")
2251 2251 flags = repo.known([bin(s) for s in ids])
2252 2252 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2253 2253
2254 2254
2255 2255 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2256 2256 def debuglabelcomplete(ui, repo, *args):
2257 2257 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2258 2258 debugnamecomplete(ui, repo, *args)
2259 2259
2260 2260
2261 2261 @command(
2262 2262 b'debuglocks',
2263 2263 [
2264 2264 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2265 2265 (
2266 2266 b'W',
2267 2267 b'force-free-wlock',
2268 2268 None,
2269 2269 _(b'free the working state lock (DANGEROUS)'),
2270 2270 ),
2271 2271 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2272 2272 (
2273 2273 b'S',
2274 2274 b'set-wlock',
2275 2275 None,
2276 2276 _(b'set the working state lock until stopped'),
2277 2277 ),
2278 2278 ],
2279 2279 _(b'[OPTION]...'),
2280 2280 )
2281 2281 def debuglocks(ui, repo, **opts):
2282 2282 """show or modify state of locks
2283 2283
2284 2284 By default, this command will show which locks are held. This
2285 2285 includes the user and process holding the lock, the amount of time
2286 2286 the lock has been held, and the machine name where the process is
2287 2287 running if it's not local.
2288 2288
2289 2289 Locks protect the integrity of Mercurial's data, so should be
2290 2290 treated with care. System crashes or other interruptions may cause
2291 2291 locks to not be properly released, though Mercurial will usually
2292 2292 detect and remove such stale locks automatically.
2293 2293
2294 2294 However, detecting stale locks may not always be possible (for
2295 2295 instance, on a shared filesystem). Removing locks may also be
2296 2296 blocked by filesystem permissions.
2297 2297
2298 2298 Setting a lock will prevent other commands from changing the data.
2299 2299 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2300 2300 The set locks are removed when the command exits.
2301 2301
2302 2302 Returns 0 if no locks are held.
2303 2303
2304 2304 """
2305 2305
2306 2306 if opts.get('force_free_lock'):
2307 2307 repo.svfs.tryunlink(b'lock')
2308 2308 if opts.get('force_free_wlock'):
2309 2309 repo.vfs.tryunlink(b'wlock')
2310 2310 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2311 2311 return 0
2312 2312
2313 2313 locks = []
2314 2314 try:
2315 2315 if opts.get('set_wlock'):
2316 2316 try:
2317 2317 locks.append(repo.wlock(False))
2318 2318 except error.LockHeld:
2319 2319 raise error.Abort(_(b'wlock is already held'))
2320 2320 if opts.get('set_lock'):
2321 2321 try:
2322 2322 locks.append(repo.lock(False))
2323 2323 except error.LockHeld:
2324 2324 raise error.Abort(_(b'lock is already held'))
2325 2325 if len(locks):
2326 2326 try:
2327 2327 if ui.interactive():
2328 2328 prompt = _(b"ready to release the lock (y)? $$ &Yes")
2329 2329 ui.promptchoice(prompt)
2330 2330 else:
2331 2331 msg = b"%d locks held, waiting for signal\n"
2332 2332 msg %= len(locks)
2333 2333 ui.status(msg)
2334 2334 while True: # XXX wait for a signal
2335 2335 time.sleep(0.1)
2336 2336 except KeyboardInterrupt:
2337 2337 msg = b"signal-received releasing locks\n"
2338 2338 ui.status(msg)
2339 2339 return 0
2340 2340 finally:
2341 2341 release(*locks)
2342 2342
2343 2343 now = time.time()
2344 2344 held = 0
2345 2345
2346 2346 def report(vfs, name, method):
2347 2347 # this causes stale locks to get reaped for more accurate reporting
2348 2348 try:
2349 2349 l = method(False)
2350 2350 except error.LockHeld:
2351 2351 l = None
2352 2352
2353 2353 if l:
2354 2354 l.release()
2355 2355 else:
2356 2356 try:
2357 2357 st = vfs.lstat(name)
2358 2358 age = now - st[stat.ST_MTIME]
2359 2359 user = util.username(st.st_uid)
2360 2360 locker = vfs.readlock(name)
2361 2361 if b":" in locker:
2362 2362 host, pid = locker.split(b':')
2363 2363 if host == socket.gethostname():
2364 2364 locker = b'user %s, process %s' % (user or b'None', pid)
2365 2365 else:
2366 2366 locker = b'user %s, process %s, host %s' % (
2367 2367 user or b'None',
2368 2368 pid,
2369 2369 host,
2370 2370 )
2371 2371 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2372 2372 return 1
2373 2373 except FileNotFoundError:
2374 2374 pass
2375 2375
2376 2376 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2377 2377 return 0
2378 2378
2379 2379 held += report(repo.svfs, b"lock", repo.lock)
2380 2380 held += report(repo.vfs, b"wlock", repo.wlock)
2381 2381
2382 2382 return held
2383 2383
2384 2384
2385 2385 @command(
2386 2386 b'debugmanifestfulltextcache',
2387 2387 [
2388 2388 (b'', b'clear', False, _(b'clear the cache')),
2389 2389 (
2390 2390 b'a',
2391 2391 b'add',
2392 2392 [],
2393 2393 _(b'add the given manifest nodes to the cache'),
2394 2394 _(b'NODE'),
2395 2395 ),
2396 2396 ],
2397 2397 b'',
2398 2398 )
2399 2399 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2400 2400 """show, clear or amend the contents of the manifest fulltext cache"""
2401 2401
2402 2402 def getcache():
2403 2403 r = repo.manifestlog.getstorage(b'')
2404 2404 try:
2405 2405 return r._fulltextcache
2406 2406 except AttributeError:
2407 2407 msg = _(
2408 2408 b"Current revlog implementation doesn't appear to have a "
2409 2409 b"manifest fulltext cache\n"
2410 2410 )
2411 2411 raise error.Abort(msg)
2412 2412
2413 2413 if opts.get('clear'):
2414 2414 with repo.wlock():
2415 2415 cache = getcache()
2416 2416 cache.clear(clear_persisted_data=True)
2417 2417 return
2418 2418
2419 2419 if add:
2420 2420 with repo.wlock():
2421 2421 m = repo.manifestlog
2422 2422 store = m.getstorage(b'')
2423 2423 for n in add:
2424 2424 try:
2425 2425 manifest = m[store.lookup(n)]
2426 2426 except error.LookupError as e:
2427 2427 raise error.Abort(
2428 2428 bytes(e), hint=b"Check your manifest node id"
2429 2429 )
2430 2430 manifest.read() # stores revisision in cache too
2431 2431 return
2432 2432
2433 2433 cache = getcache()
2434 2434 if not len(cache):
2435 2435 ui.write(_(b'cache empty\n'))
2436 2436 else:
2437 2437 ui.write(
2438 2438 _(
2439 2439 b'cache contains %d manifest entries, in order of most to '
2440 2440 b'least recent:\n'
2441 2441 )
2442 2442 % (len(cache),)
2443 2443 )
2444 2444 totalsize = 0
2445 2445 for nodeid in cache:
2446 2446 # Use cache.get to not update the LRU order
2447 2447 data = cache.peek(nodeid)
2448 2448 size = len(data)
2449 2449 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2450 2450 ui.write(
2451 2451 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2452 2452 )
2453 2453 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2454 2454 ui.write(
2455 2455 _(b'total cache data size %s, on-disk %s\n')
2456 2456 % (util.bytecount(totalsize), util.bytecount(ondisk))
2457 2457 )
2458 2458
2459 2459
2460 2460 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2461 2461 def debugmergestate(ui, repo, *args, **opts):
2462 2462 """print merge state
2463 2463
2464 2464 Use --verbose to print out information about whether v1 or v2 merge state
2465 2465 was chosen."""
2466 2466
2467 2467 if ui.verbose:
2468 2468 ms = mergestatemod.mergestate(repo)
2469 2469
2470 2470 # sort so that reasonable information is on top
2471 2471 v1records = ms._readrecordsv1()
2472 2472 v2records = ms._readrecordsv2()
2473 2473
2474 2474 if not v1records and not v2records:
2475 2475 pass
2476 2476 elif not v2records:
2477 2477 ui.writenoi18n(b'no version 2 merge state\n')
2478 2478 elif ms._v1v2match(v1records, v2records):
2479 2479 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2480 2480 else:
2481 2481 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2482 2482
2483 2483 opts = pycompat.byteskwargs(opts)
2484 2484 if not opts[b'template']:
2485 2485 opts[b'template'] = (
2486 2486 b'{if(commits, "", "no merge state found\n")}'
2487 2487 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2488 2488 b'{files % "file: {path} (state \\"{state}\\")\n'
2489 2489 b'{if(local_path, "'
2490 2490 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2491 2491 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2492 2492 b' other path: {other_path} (node {other_node})\n'
2493 2493 b'")}'
2494 2494 b'{if(rename_side, "'
2495 2495 b' rename side: {rename_side}\n'
2496 2496 b' renamed path: {renamed_path}\n'
2497 2497 b'")}'
2498 2498 b'{extras % " extra: {key} = {value}\n"}'
2499 2499 b'"}'
2500 2500 b'{extras % "extra: {file} ({key} = {value})\n"}'
2501 2501 )
2502 2502
2503 2503 ms = mergestatemod.mergestate.read(repo)
2504 2504
2505 2505 fm = ui.formatter(b'debugmergestate', opts)
2506 2506 fm.startitem()
2507 2507
2508 2508 fm_commits = fm.nested(b'commits')
2509 2509 if ms.active():
2510 2510 for name, node, label_index in (
2511 2511 (b'local', ms.local, 0),
2512 2512 (b'other', ms.other, 1),
2513 2513 ):
2514 2514 fm_commits.startitem()
2515 2515 fm_commits.data(name=name)
2516 2516 fm_commits.data(node=hex(node))
2517 2517 if ms._labels and len(ms._labels) > label_index:
2518 2518 fm_commits.data(label=ms._labels[label_index])
2519 2519 fm_commits.end()
2520 2520
2521 2521 fm_files = fm.nested(b'files')
2522 2522 if ms.active():
2523 2523 for f in ms:
2524 2524 fm_files.startitem()
2525 2525 fm_files.data(path=f)
2526 2526 state = ms._state[f]
2527 2527 fm_files.data(state=state[0])
2528 2528 if state[0] in (
2529 2529 mergestatemod.MERGE_RECORD_UNRESOLVED,
2530 2530 mergestatemod.MERGE_RECORD_RESOLVED,
2531 2531 ):
2532 2532 fm_files.data(local_key=state[1])
2533 2533 fm_files.data(local_path=state[2])
2534 2534 fm_files.data(ancestor_path=state[3])
2535 2535 fm_files.data(ancestor_node=state[4])
2536 2536 fm_files.data(other_path=state[5])
2537 2537 fm_files.data(other_node=state[6])
2538 2538 fm_files.data(local_flags=state[7])
2539 2539 elif state[0] in (
2540 2540 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2541 2541 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2542 2542 ):
2543 2543 fm_files.data(renamed_path=state[1])
2544 2544 fm_files.data(rename_side=state[2])
2545 2545 fm_extras = fm_files.nested(b'extras')
2546 2546 for k, v in sorted(ms.extras(f).items()):
2547 2547 fm_extras.startitem()
2548 2548 fm_extras.data(key=k)
2549 2549 fm_extras.data(value=v)
2550 2550 fm_extras.end()
2551 2551
2552 2552 fm_files.end()
2553 2553
2554 2554 fm_extras = fm.nested(b'extras')
2555 2555 for f, d in sorted(ms.allextras().items()):
2556 2556 if f in ms:
2557 2557 # If file is in mergestate, we have already processed it's extras
2558 2558 continue
2559 2559 for k, v in d.items():
2560 2560 fm_extras.startitem()
2561 2561 fm_extras.data(file=f)
2562 2562 fm_extras.data(key=k)
2563 2563 fm_extras.data(value=v)
2564 2564 fm_extras.end()
2565 2565
2566 2566 fm.end()
2567 2567
2568 2568
2569 2569 @command(b'debugnamecomplete', [], _(b'NAME...'))
2570 2570 def debugnamecomplete(ui, repo, *args):
2571 2571 '''complete "names" - tags, open branch names, bookmark names'''
2572 2572
2573 2573 names = set()
2574 2574 # since we previously only listed open branches, we will handle that
2575 2575 # specially (after this for loop)
2576 2576 for name, ns in repo.names.items():
2577 2577 if name != b'branches':
2578 2578 names.update(ns.listnames(repo))
2579 2579 names.update(
2580 2580 tag
2581 2581 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2582 2582 if not closed
2583 2583 )
2584 2584 completions = set()
2585 2585 if not args:
2586 2586 args = [b'']
2587 2587 for a in args:
2588 2588 completions.update(n for n in names if n.startswith(a))
2589 2589 ui.write(b'\n'.join(sorted(completions)))
2590 2590 ui.write(b'\n')
2591 2591
2592 2592
2593 2593 @command(
2594 2594 b'debugnodemap',
2595 2595 [
2596 2596 (
2597 2597 b'',
2598 2598 b'dump-new',
2599 2599 False,
2600 2600 _(b'write a (new) persistent binary nodemap on stdout'),
2601 2601 ),
2602 2602 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2603 2603 (
2604 2604 b'',
2605 2605 b'check',
2606 2606 False,
2607 2607 _(b'check that the data on disk data are correct.'),
2608 2608 ),
2609 2609 (
2610 2610 b'',
2611 2611 b'metadata',
2612 2612 False,
2613 2613 _(b'display the on disk meta data for the nodemap'),
2614 2614 ),
2615 2615 ],
2616 2616 )
2617 2617 def debugnodemap(ui, repo, **opts):
2618 2618 """write and inspect on disk nodemap"""
2619 2619 if opts['dump_new']:
2620 2620 unfi = repo.unfiltered()
2621 2621 cl = unfi.changelog
2622 2622 if util.safehasattr(cl.index, "nodemap_data_all"):
2623 2623 data = cl.index.nodemap_data_all()
2624 2624 else:
2625 2625 data = nodemap.persistent_data(cl.index)
2626 2626 ui.write(data)
2627 2627 elif opts['dump_disk']:
2628 2628 unfi = repo.unfiltered()
2629 2629 cl = unfi.changelog
2630 2630 nm_data = nodemap.persisted_data(cl)
2631 2631 if nm_data is not None:
2632 2632 docket, data = nm_data
2633 2633 ui.write(data[:])
2634 2634 elif opts['check']:
2635 2635 unfi = repo.unfiltered()
2636 2636 cl = unfi.changelog
2637 2637 nm_data = nodemap.persisted_data(cl)
2638 2638 if nm_data is not None:
2639 2639 docket, data = nm_data
2640 2640 return nodemap.check_data(ui, cl.index, data)
2641 2641 elif opts['metadata']:
2642 2642 unfi = repo.unfiltered()
2643 2643 cl = unfi.changelog
2644 2644 nm_data = nodemap.persisted_data(cl)
2645 2645 if nm_data is not None:
2646 2646 docket, data = nm_data
2647 2647 ui.write((b"uid: %s\n") % docket.uid)
2648 2648 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2649 2649 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2650 2650 ui.write((b"data-length: %d\n") % docket.data_length)
2651 2651 ui.write((b"data-unused: %d\n") % docket.data_unused)
2652 2652 unused_perc = docket.data_unused * 100.0 / docket.data_length
2653 2653 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2654 2654
2655 2655
2656 2656 @command(
2657 2657 b'debugobsolete',
2658 2658 [
2659 2659 (b'', b'flags', 0, _(b'markers flag')),
2660 2660 (
2661 2661 b'',
2662 2662 b'record-parents',
2663 2663 False,
2664 2664 _(b'record parent information for the precursor'),
2665 2665 ),
2666 2666 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2667 2667 (
2668 2668 b'',
2669 2669 b'exclusive',
2670 2670 False,
2671 2671 _(b'restrict display to markers only relevant to REV'),
2672 2672 ),
2673 2673 (b'', b'index', False, _(b'display index of the marker')),
2674 2674 (b'', b'delete', [], _(b'delete markers specified by indices')),
2675 2675 ]
2676 2676 + cmdutil.commitopts2
2677 2677 + cmdutil.formatteropts,
2678 2678 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2679 2679 )
2680 2680 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2681 2681 """create arbitrary obsolete marker
2682 2682
2683 2683 With no arguments, displays the list of obsolescence markers."""
2684 2684
2685 2685 opts = pycompat.byteskwargs(opts)
2686 2686
2687 2687 def parsenodeid(s):
2688 2688 try:
2689 2689 # We do not use revsingle/revrange functions here to accept
2690 2690 # arbitrary node identifiers, possibly not present in the
2691 2691 # local repository.
2692 2692 n = bin(s)
2693 2693 if len(n) != repo.nodeconstants.nodelen:
2694 2694 raise ValueError
2695 2695 return n
2696 2696 except ValueError:
2697 2697 raise error.InputError(
2698 2698 b'changeset references must be full hexadecimal '
2699 2699 b'node identifiers'
2700 2700 )
2701 2701
2702 2702 if opts.get(b'delete'):
2703 2703 indices = []
2704 2704 for v in opts.get(b'delete'):
2705 2705 try:
2706 2706 indices.append(int(v))
2707 2707 except ValueError:
2708 2708 raise error.InputError(
2709 2709 _(b'invalid index value: %r') % v,
2710 2710 hint=_(b'use integers for indices'),
2711 2711 )
2712 2712
2713 2713 if repo.currenttransaction():
2714 2714 raise error.Abort(
2715 2715 _(b'cannot delete obsmarkers in the middle of transaction.')
2716 2716 )
2717 2717
2718 2718 with repo.lock():
2719 2719 n = repair.deleteobsmarkers(repo.obsstore, indices)
2720 2720 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2721 2721
2722 2722 return
2723 2723
2724 2724 if precursor is not None:
2725 2725 if opts[b'rev']:
2726 2726 raise error.InputError(
2727 2727 b'cannot select revision when creating marker'
2728 2728 )
2729 2729 metadata = {}
2730 2730 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2731 2731 succs = tuple(parsenodeid(succ) for succ in successors)
2732 2732 l = repo.lock()
2733 2733 try:
2734 2734 tr = repo.transaction(b'debugobsolete')
2735 2735 try:
2736 2736 date = opts.get(b'date')
2737 2737 if date:
2738 2738 date = dateutil.parsedate(date)
2739 2739 else:
2740 2740 date = None
2741 2741 prec = parsenodeid(precursor)
2742 2742 parents = None
2743 2743 if opts[b'record_parents']:
2744 2744 if prec not in repo.unfiltered():
2745 2745 raise error.Abort(
2746 2746 b'cannot used --record-parents on '
2747 2747 b'unknown changesets'
2748 2748 )
2749 2749 parents = repo.unfiltered()[prec].parents()
2750 2750 parents = tuple(p.node() for p in parents)
2751 2751 repo.obsstore.create(
2752 2752 tr,
2753 2753 prec,
2754 2754 succs,
2755 2755 opts[b'flags'],
2756 2756 parents=parents,
2757 2757 date=date,
2758 2758 metadata=metadata,
2759 2759 ui=ui,
2760 2760 )
2761 2761 tr.close()
2762 2762 except ValueError as exc:
2763 2763 raise error.Abort(
2764 2764 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2765 2765 )
2766 2766 finally:
2767 2767 tr.release()
2768 2768 finally:
2769 2769 l.release()
2770 2770 else:
2771 2771 if opts[b'rev']:
2772 2772 revs = logcmdutil.revrange(repo, opts[b'rev'])
2773 2773 nodes = [repo[r].node() for r in revs]
2774 2774 markers = list(
2775 2775 obsutil.getmarkers(
2776 2776 repo, nodes=nodes, exclusive=opts[b'exclusive']
2777 2777 )
2778 2778 )
2779 2779 markers.sort(key=lambda x: x._data)
2780 2780 else:
2781 2781 markers = obsutil.getmarkers(repo)
2782 2782
2783 2783 markerstoiter = markers
2784 2784 isrelevant = lambda m: True
2785 2785 if opts.get(b'rev') and opts.get(b'index'):
2786 2786 markerstoiter = obsutil.getmarkers(repo)
2787 2787 markerset = set(markers)
2788 2788 isrelevant = lambda m: m in markerset
2789 2789
2790 2790 fm = ui.formatter(b'debugobsolete', opts)
2791 2791 for i, m in enumerate(markerstoiter):
2792 2792 if not isrelevant(m):
2793 2793 # marker can be irrelevant when we're iterating over a set
2794 2794 # of markers (markerstoiter) which is bigger than the set
2795 2795 # of markers we want to display (markers)
2796 2796 # this can happen if both --index and --rev options are
2797 2797 # provided and thus we need to iterate over all of the markers
2798 2798 # to get the correct indices, but only display the ones that
2799 2799 # are relevant to --rev value
2800 2800 continue
2801 2801 fm.startitem()
2802 2802 ind = i if opts.get(b'index') else None
2803 2803 cmdutil.showmarker(fm, m, index=ind)
2804 2804 fm.end()
2805 2805
2806 2806
2807 2807 @command(
2808 2808 b'debugp1copies',
2809 2809 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2810 2810 _(b'[-r REV]'),
2811 2811 )
2812 2812 def debugp1copies(ui, repo, **opts):
2813 2813 """dump copy information compared to p1"""
2814 2814
2815 2815 opts = pycompat.byteskwargs(opts)
2816 2816 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2817 2817 for dst, src in ctx.p1copies().items():
2818 2818 ui.write(b'%s -> %s\n' % (src, dst))
2819 2819
2820 2820
2821 2821 @command(
2822 2822 b'debugp2copies',
2823 2823 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2824 2824 _(b'[-r REV]'),
2825 2825 )
2826 2826 def debugp2copies(ui, repo, **opts):
2827 2827 """dump copy information compared to p2"""
2828 2828
2829 2829 opts = pycompat.byteskwargs(opts)
2830 2830 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2831 2831 for dst, src in ctx.p2copies().items():
2832 2832 ui.write(b'%s -> %s\n' % (src, dst))
2833 2833
2834 2834
2835 2835 @command(
2836 2836 b'debugpathcomplete',
2837 2837 [
2838 2838 (b'f', b'full', None, _(b'complete an entire path')),
2839 2839 (b'n', b'normal', None, _(b'show only normal files')),
2840 2840 (b'a', b'added', None, _(b'show only added files')),
2841 2841 (b'r', b'removed', None, _(b'show only removed files')),
2842 2842 ],
2843 2843 _(b'FILESPEC...'),
2844 2844 )
2845 2845 def debugpathcomplete(ui, repo, *specs, **opts):
2846 2846 """complete part or all of a tracked path
2847 2847
2848 2848 This command supports shells that offer path name completion. It
2849 2849 currently completes only files already known to the dirstate.
2850 2850
2851 2851 Completion extends only to the next path segment unless
2852 2852 --full is specified, in which case entire paths are used."""
2853 2853
2854 2854 def complete(path, acceptable):
2855 2855 dirstate = repo.dirstate
2856 2856 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2857 2857 rootdir = repo.root + pycompat.ossep
2858 2858 if spec != repo.root and not spec.startswith(rootdir):
2859 2859 return [], []
2860 2860 if os.path.isdir(spec):
2861 2861 spec += b'/'
2862 2862 spec = spec[len(rootdir) :]
2863 2863 fixpaths = pycompat.ossep != b'/'
2864 2864 if fixpaths:
2865 2865 spec = spec.replace(pycompat.ossep, b'/')
2866 2866 speclen = len(spec)
2867 2867 fullpaths = opts['full']
2868 2868 files, dirs = set(), set()
2869 2869 adddir, addfile = dirs.add, files.add
2870 2870 for f, st in dirstate.items():
2871 2871 if f.startswith(spec) and st.state in acceptable:
2872 2872 if fixpaths:
2873 2873 f = f.replace(b'/', pycompat.ossep)
2874 2874 if fullpaths:
2875 2875 addfile(f)
2876 2876 continue
2877 2877 s = f.find(pycompat.ossep, speclen)
2878 2878 if s >= 0:
2879 2879 adddir(f[:s])
2880 2880 else:
2881 2881 addfile(f)
2882 2882 return files, dirs
2883 2883
2884 2884 acceptable = b''
2885 2885 if opts['normal']:
2886 2886 acceptable += b'nm'
2887 2887 if opts['added']:
2888 2888 acceptable += b'a'
2889 2889 if opts['removed']:
2890 2890 acceptable += b'r'
2891 2891 cwd = repo.getcwd()
2892 2892 if not specs:
2893 2893 specs = [b'.']
2894 2894
2895 2895 files, dirs = set(), set()
2896 2896 for spec in specs:
2897 2897 f, d = complete(spec, acceptable or b'nmar')
2898 2898 files.update(f)
2899 2899 dirs.update(d)
2900 2900 files.update(dirs)
2901 2901 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2902 2902 ui.write(b'\n')
2903 2903
2904 2904
2905 2905 @command(
2906 2906 b'debugpathcopies',
2907 2907 cmdutil.walkopts,
2908 2908 b'hg debugpathcopies REV1 REV2 [FILE]',
2909 2909 inferrepo=True,
2910 2910 )
2911 2911 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2912 2912 """show copies between two revisions"""
2913 2913 ctx1 = scmutil.revsingle(repo, rev1)
2914 2914 ctx2 = scmutil.revsingle(repo, rev2)
2915 2915 m = scmutil.match(ctx1, pats, opts)
2916 2916 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2917 2917 ui.write(b'%s -> %s\n' % (src, dst))
2918 2918
2919 2919
2920 2920 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2921 2921 def debugpeer(ui, path):
2922 2922 """establish a connection to a peer repository"""
2923 2923 # Always enable peer request logging. Requires --debug to display
2924 2924 # though.
2925 2925 overrides = {
2926 2926 (b'devel', b'debug.peer-request'): True,
2927 2927 }
2928 2928
2929 2929 with ui.configoverride(overrides):
2930 2930 peer = hg.peer(ui, {}, path)
2931 2931
2932 2932 try:
2933 2933 local = peer.local() is not None
2934 2934 canpush = peer.canpush()
2935 2935
2936 2936 ui.write(_(b'url: %s\n') % peer.url())
2937 2937 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2938 2938 ui.write(
2939 2939 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2940 2940 )
2941 2941 finally:
2942 2942 peer.close()
2943 2943
2944 2944
2945 2945 @command(
2946 2946 b'debugpickmergetool',
2947 2947 [
2948 2948 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2949 2949 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2950 2950 ]
2951 2951 + cmdutil.walkopts
2952 2952 + cmdutil.mergetoolopts,
2953 2953 _(b'[PATTERN]...'),
2954 2954 inferrepo=True,
2955 2955 )
2956 2956 def debugpickmergetool(ui, repo, *pats, **opts):
2957 2957 """examine which merge tool is chosen for specified file
2958 2958
2959 2959 As described in :hg:`help merge-tools`, Mercurial examines
2960 2960 configurations below in this order to decide which merge tool is
2961 2961 chosen for specified file.
2962 2962
2963 2963 1. ``--tool`` option
2964 2964 2. ``HGMERGE`` environment variable
2965 2965 3. configurations in ``merge-patterns`` section
2966 2966 4. configuration of ``ui.merge``
2967 2967 5. configurations in ``merge-tools`` section
2968 2968 6. ``hgmerge`` tool (for historical reason only)
2969 2969 7. default tool for fallback (``:merge`` or ``:prompt``)
2970 2970
2971 2971 This command writes out examination result in the style below::
2972 2972
2973 2973 FILE = MERGETOOL
2974 2974
2975 2975 By default, all files known in the first parent context of the
2976 2976 working directory are examined. Use file patterns and/or -I/-X
2977 2977 options to limit target files. -r/--rev is also useful to examine
2978 2978 files in another context without actual updating to it.
2979 2979
2980 2980 With --debug, this command shows warning messages while matching
2981 2981 against ``merge-patterns`` and so on, too. It is recommended to
2982 2982 use this option with explicit file patterns and/or -I/-X options,
2983 2983 because this option increases amount of output per file according
2984 2984 to configurations in hgrc.
2985 2985
2986 2986 With -v/--verbose, this command shows configurations below at
2987 2987 first (only if specified).
2988 2988
2989 2989 - ``--tool`` option
2990 2990 - ``HGMERGE`` environment variable
2991 2991 - configuration of ``ui.merge``
2992 2992
2993 2993 If merge tool is chosen before matching against
2994 2994 ``merge-patterns``, this command can't show any helpful
2995 2995 information, even with --debug. In such case, information above is
2996 2996 useful to know why a merge tool is chosen.
2997 2997 """
2998 2998 opts = pycompat.byteskwargs(opts)
2999 2999 overrides = {}
3000 3000 if opts[b'tool']:
3001 3001 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
3002 3002 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
3003 3003
3004 3004 with ui.configoverride(overrides, b'debugmergepatterns'):
3005 3005 hgmerge = encoding.environ.get(b"HGMERGE")
3006 3006 if hgmerge is not None:
3007 3007 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
3008 3008 uimerge = ui.config(b"ui", b"merge")
3009 3009 if uimerge:
3010 3010 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
3011 3011
3012 3012 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3013 3013 m = scmutil.match(ctx, pats, opts)
3014 3014 changedelete = opts[b'changedelete']
3015 3015 for path in ctx.walk(m):
3016 3016 fctx = ctx[path]
3017 3017 with ui.silent(
3018 3018 error=True
3019 3019 ) if not ui.debugflag else util.nullcontextmanager():
3020 3020 tool, toolpath = filemerge._picktool(
3021 3021 repo,
3022 3022 ui,
3023 3023 path,
3024 3024 fctx.isbinary(),
3025 3025 b'l' in fctx.flags(),
3026 3026 changedelete,
3027 3027 )
3028 3028 ui.write(b'%s = %s\n' % (path, tool))
3029 3029
3030 3030
3031 3031 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
3032 3032 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
3033 3033 """access the pushkey key/value protocol
3034 3034
3035 3035 With two args, list the keys in the given namespace.
3036 3036
3037 3037 With five args, set a key to new if it currently is set to old.
3038 3038 Reports success or failure.
3039 3039 """
3040 3040
3041 3041 target = hg.peer(ui, {}, repopath)
3042 3042 try:
3043 3043 if keyinfo:
3044 3044 key, old, new = keyinfo
3045 3045 with target.commandexecutor() as e:
3046 3046 r = e.callcommand(
3047 3047 b'pushkey',
3048 3048 {
3049 3049 b'namespace': namespace,
3050 3050 b'key': key,
3051 3051 b'old': old,
3052 3052 b'new': new,
3053 3053 },
3054 3054 ).result()
3055 3055
3056 3056 ui.status(pycompat.bytestr(r) + b'\n')
3057 3057 return not r
3058 3058 else:
3059 3059 for k, v in sorted(target.listkeys(namespace).items()):
3060 3060 ui.write(
3061 3061 b"%s\t%s\n"
3062 3062 % (stringutil.escapestr(k), stringutil.escapestr(v))
3063 3063 )
3064 3064 finally:
3065 3065 target.close()
3066 3066
3067 3067
3068 3068 @command(b'debugpvec', [], _(b'A B'))
3069 3069 def debugpvec(ui, repo, a, b=None):
3070 3070 ca = scmutil.revsingle(repo, a)
3071 3071 cb = scmutil.revsingle(repo, b)
3072 3072 pa = pvec.ctxpvec(ca)
3073 3073 pb = pvec.ctxpvec(cb)
3074 3074 if pa == pb:
3075 3075 rel = b"="
3076 3076 elif pa > pb:
3077 3077 rel = b">"
3078 3078 elif pa < pb:
3079 3079 rel = b"<"
3080 3080 elif pa | pb:
3081 3081 rel = b"|"
3082 3082 ui.write(_(b"a: %s\n") % pa)
3083 3083 ui.write(_(b"b: %s\n") % pb)
3084 3084 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
3085 3085 ui.write(
3086 3086 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
3087 3087 % (
3088 3088 abs(pa._depth - pb._depth),
3089 3089 pvec._hamming(pa._vec, pb._vec),
3090 3090 pa.distance(pb),
3091 3091 rel,
3092 3092 )
3093 3093 )
3094 3094
3095 3095
3096 3096 @command(
3097 3097 b'debugrebuilddirstate|debugrebuildstate',
3098 3098 [
3099 3099 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
3100 3100 (
3101 3101 b'',
3102 3102 b'minimal',
3103 3103 None,
3104 3104 _(
3105 3105 b'only rebuild files that are inconsistent with '
3106 3106 b'the working copy parent'
3107 3107 ),
3108 3108 ),
3109 3109 ],
3110 3110 _(b'[-r REV]'),
3111 3111 )
3112 3112 def debugrebuilddirstate(ui, repo, rev, **opts):
3113 3113 """rebuild the dirstate as it would look like for the given revision
3114 3114
3115 3115 If no revision is specified the first current parent will be used.
3116 3116
3117 3117 The dirstate will be set to the files of the given revision.
3118 3118 The actual working directory content or existing dirstate
3119 3119 information such as adds or removes is not considered.
3120 3120
3121 3121 ``minimal`` will only rebuild the dirstate status for files that claim to be
3122 3122 tracked but are not in the parent manifest, or that exist in the parent
3123 3123 manifest but are not in the dirstate. It will not change adds, removes, or
3124 3124 modified files that are in the working copy parent.
3125 3125
3126 3126 One use of this command is to make the next :hg:`status` invocation
3127 3127 check the actual file content.
3128 3128 """
3129 3129 ctx = scmutil.revsingle(repo, rev)
3130 3130 with repo.wlock():
3131 3131 if repo.currenttransaction() is not None:
3132 3132 msg = b'rebuild the dirstate outside of a transaction'
3133 3133 raise error.ProgrammingError(msg)
3134 3134 dirstate = repo.dirstate
3135 3135 changedfiles = None
3136 3136 # See command doc for what minimal does.
3137 3137 if opts.get('minimal'):
3138 3138 manifestfiles = set(ctx.manifest().keys())
3139 3139 dirstatefiles = set(dirstate)
3140 3140 manifestonly = manifestfiles - dirstatefiles
3141 3141 dsonly = dirstatefiles - manifestfiles
3142 3142 dsnotadded = {f for f in dsonly if not dirstate.get_entry(f).added}
3143 3143 changedfiles = manifestonly | dsnotadded
3144 3144
3145 with dirstate.changing_parents(repo):
3145 3146 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
3146 dirstate.write(repo.currenttransaction())
3147 3147
3148 3148
3149 3149 @command(
3150 3150 b'debugrebuildfncache',
3151 3151 [
3152 3152 (
3153 3153 b'',
3154 3154 b'only-data',
3155 3155 False,
3156 3156 _(b'only look for wrong .d files (much faster)'),
3157 3157 )
3158 3158 ],
3159 3159 b'',
3160 3160 )
3161 3161 def debugrebuildfncache(ui, repo, **opts):
3162 3162 """rebuild the fncache file"""
3163 3163 opts = pycompat.byteskwargs(opts)
3164 3164 repair.rebuildfncache(ui, repo, opts.get(b"only_data"))
3165 3165
3166 3166
3167 3167 @command(
3168 3168 b'debugrename',
3169 3169 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
3170 3170 _(b'[-r REV] [FILE]...'),
3171 3171 )
3172 3172 def debugrename(ui, repo, *pats, **opts):
3173 3173 """dump rename information"""
3174 3174
3175 3175 opts = pycompat.byteskwargs(opts)
3176 3176 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
3177 3177 m = scmutil.match(ctx, pats, opts)
3178 3178 for abs in ctx.walk(m):
3179 3179 fctx = ctx[abs]
3180 3180 o = fctx.filelog().renamed(fctx.filenode())
3181 3181 rel = repo.pathto(abs)
3182 3182 if o:
3183 3183 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
3184 3184 else:
3185 3185 ui.write(_(b"%s not renamed\n") % rel)
3186 3186
3187 3187
3188 3188 @command(b'debugrequires|debugrequirements', [], b'')
3189 3189 def debugrequirements(ui, repo):
3190 3190 """print the current repo requirements"""
3191 3191 for r in sorted(repo.requirements):
3192 3192 ui.write(b"%s\n" % r)
3193 3193
3194 3194
3195 3195 @command(
3196 3196 b'debugrevlog',
3197 3197 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
3198 3198 _(b'-c|-m|FILE'),
3199 3199 optionalrepo=True,
3200 3200 )
3201 3201 def debugrevlog(ui, repo, file_=None, **opts):
3202 3202 """show data and statistics about a revlog"""
3203 3203 opts = pycompat.byteskwargs(opts)
3204 3204 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
3205 3205
3206 3206 if opts.get(b"dump"):
3207 3207 revlog_debug.dump(ui, r)
3208 3208 else:
3209 3209 revlog_debug.debug_revlog(ui, r)
3210 3210 return 0
3211 3211
3212 3212
3213 3213 @command(
3214 3214 b'debugrevlogindex',
3215 3215 cmdutil.debugrevlogopts
3216 3216 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3217 3217 _(b'[-f FORMAT] -c|-m|FILE'),
3218 3218 optionalrepo=True,
3219 3219 )
3220 3220 def debugrevlogindex(ui, repo, file_=None, **opts):
3221 3221 """dump the contents of a revlog index"""
3222 3222 opts = pycompat.byteskwargs(opts)
3223 3223 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3224 3224 format = opts.get(b'format', 0)
3225 3225 if format not in (0, 1):
3226 3226 raise error.Abort(_(b"unknown format %d") % format)
3227 3227
3228 3228 if ui.debugflag:
3229 3229 shortfn = hex
3230 3230 else:
3231 3231 shortfn = short
3232 3232
3233 3233 # There might not be anything in r, so have a sane default
3234 3234 idlen = 12
3235 3235 for i in r:
3236 3236 idlen = len(shortfn(r.node(i)))
3237 3237 break
3238 3238
3239 3239 if format == 0:
3240 3240 if ui.verbose:
3241 3241 ui.writenoi18n(
3242 3242 b" rev offset length linkrev %s %s p2\n"
3243 3243 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3244 3244 )
3245 3245 else:
3246 3246 ui.writenoi18n(
3247 3247 b" rev linkrev %s %s p2\n"
3248 3248 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3249 3249 )
3250 3250 elif format == 1:
3251 3251 if ui.verbose:
3252 3252 ui.writenoi18n(
3253 3253 (
3254 3254 b" rev flag offset length size link p1"
3255 3255 b" p2 %s\n"
3256 3256 )
3257 3257 % b"nodeid".rjust(idlen)
3258 3258 )
3259 3259 else:
3260 3260 ui.writenoi18n(
3261 3261 b" rev flag size link p1 p2 %s\n"
3262 3262 % b"nodeid".rjust(idlen)
3263 3263 )
3264 3264
3265 3265 for i in r:
3266 3266 node = r.node(i)
3267 3267 if format == 0:
3268 3268 try:
3269 3269 pp = r.parents(node)
3270 3270 except Exception:
3271 3271 pp = [repo.nullid, repo.nullid]
3272 3272 if ui.verbose:
3273 3273 ui.write(
3274 3274 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3275 3275 % (
3276 3276 i,
3277 3277 r.start(i),
3278 3278 r.length(i),
3279 3279 r.linkrev(i),
3280 3280 shortfn(node),
3281 3281 shortfn(pp[0]),
3282 3282 shortfn(pp[1]),
3283 3283 )
3284 3284 )
3285 3285 else:
3286 3286 ui.write(
3287 3287 b"% 6d % 7d %s %s %s\n"
3288 3288 % (
3289 3289 i,
3290 3290 r.linkrev(i),
3291 3291 shortfn(node),
3292 3292 shortfn(pp[0]),
3293 3293 shortfn(pp[1]),
3294 3294 )
3295 3295 )
3296 3296 elif format == 1:
3297 3297 pr = r.parentrevs(i)
3298 3298 if ui.verbose:
3299 3299 ui.write(
3300 3300 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3301 3301 % (
3302 3302 i,
3303 3303 r.flags(i),
3304 3304 r.start(i),
3305 3305 r.length(i),
3306 3306 r.rawsize(i),
3307 3307 r.linkrev(i),
3308 3308 pr[0],
3309 3309 pr[1],
3310 3310 shortfn(node),
3311 3311 )
3312 3312 )
3313 3313 else:
3314 3314 ui.write(
3315 3315 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3316 3316 % (
3317 3317 i,
3318 3318 r.flags(i),
3319 3319 r.rawsize(i),
3320 3320 r.linkrev(i),
3321 3321 pr[0],
3322 3322 pr[1],
3323 3323 shortfn(node),
3324 3324 )
3325 3325 )
3326 3326
3327 3327
3328 3328 @command(
3329 3329 b'debugrevspec',
3330 3330 [
3331 3331 (
3332 3332 b'',
3333 3333 b'optimize',
3334 3334 None,
3335 3335 _(b'print parsed tree after optimizing (DEPRECATED)'),
3336 3336 ),
3337 3337 (
3338 3338 b'',
3339 3339 b'show-revs',
3340 3340 True,
3341 3341 _(b'print list of result revisions (default)'),
3342 3342 ),
3343 3343 (
3344 3344 b's',
3345 3345 b'show-set',
3346 3346 None,
3347 3347 _(b'print internal representation of result set'),
3348 3348 ),
3349 3349 (
3350 3350 b'p',
3351 3351 b'show-stage',
3352 3352 [],
3353 3353 _(b'print parsed tree at the given stage'),
3354 3354 _(b'NAME'),
3355 3355 ),
3356 3356 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3357 3357 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3358 3358 ],
3359 3359 b'REVSPEC',
3360 3360 )
3361 3361 def debugrevspec(ui, repo, expr, **opts):
3362 3362 """parse and apply a revision specification
3363 3363
3364 3364 Use -p/--show-stage option to print the parsed tree at the given stages.
3365 3365 Use -p all to print tree at every stage.
3366 3366
3367 3367 Use --no-show-revs option with -s or -p to print only the set
3368 3368 representation or the parsed tree respectively.
3369 3369
3370 3370 Use --verify-optimized to compare the optimized result with the unoptimized
3371 3371 one. Returns 1 if the optimized result differs.
3372 3372 """
3373 3373 opts = pycompat.byteskwargs(opts)
3374 3374 aliases = ui.configitems(b'revsetalias')
3375 3375 stages = [
3376 3376 (b'parsed', lambda tree: tree),
3377 3377 (
3378 3378 b'expanded',
3379 3379 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3380 3380 ),
3381 3381 (b'concatenated', revsetlang.foldconcat),
3382 3382 (b'analyzed', revsetlang.analyze),
3383 3383 (b'optimized', revsetlang.optimize),
3384 3384 ]
3385 3385 if opts[b'no_optimized']:
3386 3386 stages = stages[:-1]
3387 3387 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3388 3388 raise error.Abort(
3389 3389 _(b'cannot use --verify-optimized with --no-optimized')
3390 3390 )
3391 3391 stagenames = {n for n, f in stages}
3392 3392
3393 3393 showalways = set()
3394 3394 showchanged = set()
3395 3395 if ui.verbose and not opts[b'show_stage']:
3396 3396 # show parsed tree by --verbose (deprecated)
3397 3397 showalways.add(b'parsed')
3398 3398 showchanged.update([b'expanded', b'concatenated'])
3399 3399 if opts[b'optimize']:
3400 3400 showalways.add(b'optimized')
3401 3401 if opts[b'show_stage'] and opts[b'optimize']:
3402 3402 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3403 3403 if opts[b'show_stage'] == [b'all']:
3404 3404 showalways.update(stagenames)
3405 3405 else:
3406 3406 for n in opts[b'show_stage']:
3407 3407 if n not in stagenames:
3408 3408 raise error.Abort(_(b'invalid stage name: %s') % n)
3409 3409 showalways.update(opts[b'show_stage'])
3410 3410
3411 3411 treebystage = {}
3412 3412 printedtree = None
3413 3413 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3414 3414 for n, f in stages:
3415 3415 treebystage[n] = tree = f(tree)
3416 3416 if n in showalways or (n in showchanged and tree != printedtree):
3417 3417 if opts[b'show_stage'] or n != b'parsed':
3418 3418 ui.write(b"* %s:\n" % n)
3419 3419 ui.write(revsetlang.prettyformat(tree), b"\n")
3420 3420 printedtree = tree
3421 3421
3422 3422 if opts[b'verify_optimized']:
3423 3423 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3424 3424 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3425 3425 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3426 3426 ui.writenoi18n(
3427 3427 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3428 3428 )
3429 3429 ui.writenoi18n(
3430 3430 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3431 3431 )
3432 3432 arevs = list(arevs)
3433 3433 brevs = list(brevs)
3434 3434 if arevs == brevs:
3435 3435 return 0
3436 3436 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3437 3437 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3438 3438 sm = difflib.SequenceMatcher(None, arevs, brevs)
3439 3439 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3440 3440 if tag in ('delete', 'replace'):
3441 3441 for c in arevs[alo:ahi]:
3442 3442 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3443 3443 if tag in ('insert', 'replace'):
3444 3444 for c in brevs[blo:bhi]:
3445 3445 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3446 3446 if tag == 'equal':
3447 3447 for c in arevs[alo:ahi]:
3448 3448 ui.write(b' %d\n' % c)
3449 3449 return 1
3450 3450
3451 3451 func = revset.makematcher(tree)
3452 3452 revs = func(repo)
3453 3453 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3454 3454 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3455 3455 if not opts[b'show_revs']:
3456 3456 return
3457 3457 for c in revs:
3458 3458 ui.write(b"%d\n" % c)
3459 3459
3460 3460
3461 3461 @command(
3462 3462 b'debugserve',
3463 3463 [
3464 3464 (
3465 3465 b'',
3466 3466 b'sshstdio',
3467 3467 False,
3468 3468 _(b'run an SSH server bound to process handles'),
3469 3469 ),
3470 3470 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3471 3471 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3472 3472 ],
3473 3473 b'',
3474 3474 )
3475 3475 def debugserve(ui, repo, **opts):
3476 3476 """run a server with advanced settings
3477 3477
3478 3478 This command is similar to :hg:`serve`. It exists partially as a
3479 3479 workaround to the fact that ``hg serve --stdio`` must have specific
3480 3480 arguments for security reasons.
3481 3481 """
3482 3482 opts = pycompat.byteskwargs(opts)
3483 3483
3484 3484 if not opts[b'sshstdio']:
3485 3485 raise error.Abort(_(b'only --sshstdio is currently supported'))
3486 3486
3487 3487 logfh = None
3488 3488
3489 3489 if opts[b'logiofd'] and opts[b'logiofile']:
3490 3490 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3491 3491
3492 3492 if opts[b'logiofd']:
3493 3493 # Ideally we would be line buffered. But line buffering in binary
3494 3494 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3495 3495 # buffering could have performance impacts. But since this isn't
3496 3496 # performance critical code, it should be fine.
3497 3497 try:
3498 3498 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3499 3499 except OSError as e:
3500 3500 if e.errno != errno.ESPIPE:
3501 3501 raise
3502 3502 # can't seek a pipe, so `ab` mode fails on py3
3503 3503 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3504 3504 elif opts[b'logiofile']:
3505 3505 logfh = open(opts[b'logiofile'], b'ab', 0)
3506 3506
3507 3507 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3508 3508 s.serve_forever()
3509 3509
3510 3510
3511 3511 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3512 3512 def debugsetparents(ui, repo, rev1, rev2=None):
3513 3513 """manually set the parents of the current working directory (DANGEROUS)
3514 3514
3515 3515 This command is not what you are looking for and should not be used. Using
3516 3516 this command will most certainly results in slight corruption of the file
3517 3517 level histories withing your repository. DO NOT USE THIS COMMAND.
3518 3518
3519 3519 The command update the p1 and p2 field in the dirstate, and not touching
3520 3520 anything else. This useful for writing repository conversion tools, but
3521 3521 should be used with extreme care. For example, neither the working
3522 3522 directory nor the dirstate is updated, so file status may be incorrect
3523 3523 after running this command. Only used if you are one of the few people that
3524 3524 deeply unstand both conversion tools and file level histories. If you are
3525 3525 reading this help, you are not one of this people (most of them sailed west
3526 3526 from Mithlond anyway.
3527 3527
3528 3528 So one last time DO NOT USE THIS COMMAND.
3529 3529
3530 3530 Returns 0 on success.
3531 3531 """
3532 3532
3533 3533 node1 = scmutil.revsingle(repo, rev1).node()
3534 3534 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3535 3535
3536 3536 with repo.wlock():
3537 3537 repo.setparents(node1, node2)
3538 3538
3539 3539
3540 3540 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3541 3541 def debugsidedata(ui, repo, file_, rev=None, **opts):
3542 3542 """dump the side data for a cl/manifest/file revision
3543 3543
3544 3544 Use --verbose to dump the sidedata content."""
3545 3545 opts = pycompat.byteskwargs(opts)
3546 3546 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3547 3547 if rev is not None:
3548 3548 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3549 3549 file_, rev = None, file_
3550 3550 elif rev is None:
3551 3551 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3552 3552 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3553 3553 r = getattr(r, '_revlog', r)
3554 3554 try:
3555 3555 sidedata = r.sidedata(r.lookup(rev))
3556 3556 except KeyError:
3557 3557 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3558 3558 if sidedata:
3559 3559 sidedata = list(sidedata.items())
3560 3560 sidedata.sort()
3561 3561 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3562 3562 for key, value in sidedata:
3563 3563 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3564 3564 if ui.verbose:
3565 3565 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3566 3566
3567 3567
3568 3568 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3569 3569 def debugssl(ui, repo, source=None, **opts):
3570 3570 """test a secure connection to a server
3571 3571
3572 3572 This builds the certificate chain for the server on Windows, installing the
3573 3573 missing intermediates and trusted root via Windows Update if necessary. It
3574 3574 does nothing on other platforms.
3575 3575
3576 3576 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3577 3577 that server is used. See :hg:`help urls` for more information.
3578 3578
3579 3579 If the update succeeds, retry the original operation. Otherwise, the cause
3580 3580 of the SSL error is likely another issue.
3581 3581 """
3582 3582 if not pycompat.iswindows:
3583 3583 raise error.Abort(
3584 3584 _(b'certificate chain building is only possible on Windows')
3585 3585 )
3586 3586
3587 3587 if not source:
3588 3588 if not repo:
3589 3589 raise error.Abort(
3590 3590 _(
3591 3591 b"there is no Mercurial repository here, and no "
3592 3592 b"server specified"
3593 3593 )
3594 3594 )
3595 3595 source = b"default"
3596 3596
3597 3597 path = urlutil.get_unique_pull_path_obj(b'debugssl', ui, source)
3598 3598 url = path.url
3599 3599
3600 3600 defaultport = {b'https': 443, b'ssh': 22}
3601 3601 if url.scheme in defaultport:
3602 3602 try:
3603 3603 addr = (url.host, int(url.port or defaultport[url.scheme]))
3604 3604 except ValueError:
3605 3605 raise error.Abort(_(b"malformed port number in URL"))
3606 3606 else:
3607 3607 raise error.Abort(_(b"only https and ssh connections are supported"))
3608 3608
3609 3609 from . import win32
3610 3610
3611 3611 s = ssl.wrap_socket(
3612 3612 socket.socket(),
3613 3613 ssl_version=ssl.PROTOCOL_TLS,
3614 3614 cert_reqs=ssl.CERT_NONE,
3615 3615 ca_certs=None,
3616 3616 )
3617 3617
3618 3618 try:
3619 3619 s.connect(addr)
3620 3620 cert = s.getpeercert(True)
3621 3621
3622 3622 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3623 3623
3624 3624 complete = win32.checkcertificatechain(cert, build=False)
3625 3625
3626 3626 if not complete:
3627 3627 ui.status(_(b'certificate chain is incomplete, updating... '))
3628 3628
3629 3629 if not win32.checkcertificatechain(cert):
3630 3630 ui.status(_(b'failed.\n'))
3631 3631 else:
3632 3632 ui.status(_(b'done.\n'))
3633 3633 else:
3634 3634 ui.status(_(b'full certificate chain is available\n'))
3635 3635 finally:
3636 3636 s.close()
3637 3637
3638 3638
3639 3639 @command(
3640 3640 b"debugbackupbundle",
3641 3641 [
3642 3642 (
3643 3643 b"",
3644 3644 b"recover",
3645 3645 b"",
3646 3646 b"brings the specified changeset back into the repository",
3647 3647 )
3648 3648 ]
3649 3649 + cmdutil.logopts,
3650 3650 _(b"hg debugbackupbundle [--recover HASH]"),
3651 3651 )
3652 3652 def debugbackupbundle(ui, repo, *pats, **opts):
3653 3653 """lists the changesets available in backup bundles
3654 3654
3655 3655 Without any arguments, this command prints a list of the changesets in each
3656 3656 backup bundle.
3657 3657
3658 3658 --recover takes a changeset hash and unbundles the first bundle that
3659 3659 contains that hash, which puts that changeset back in your repository.
3660 3660
3661 3661 --verbose will print the entire commit message and the bundle path for that
3662 3662 backup.
3663 3663 """
3664 3664 backups = list(
3665 3665 filter(
3666 3666 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3667 3667 )
3668 3668 )
3669 3669 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3670 3670
3671 3671 opts = pycompat.byteskwargs(opts)
3672 3672 opts[b"bundle"] = b""
3673 3673 opts[b"force"] = None
3674 3674 limit = logcmdutil.getlimit(opts)
3675 3675
3676 3676 def display(other, chlist, displayer):
3677 3677 if opts.get(b"newest_first"):
3678 3678 chlist.reverse()
3679 3679 count = 0
3680 3680 for n in chlist:
3681 3681 if limit is not None and count >= limit:
3682 3682 break
3683 3683 parents = [
3684 3684 True for p in other.changelog.parents(n) if p != repo.nullid
3685 3685 ]
3686 3686 if opts.get(b"no_merges") and len(parents) == 2:
3687 3687 continue
3688 3688 count += 1
3689 3689 displayer.show(other[n])
3690 3690
3691 3691 recovernode = opts.get(b"recover")
3692 3692 if recovernode:
3693 3693 if scmutil.isrevsymbol(repo, recovernode):
3694 3694 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3695 3695 return
3696 3696 elif backups:
3697 3697 msg = _(
3698 3698 b"Recover changesets using: hg debugbackupbundle --recover "
3699 3699 b"<changeset hash>\n\nAvailable backup changesets:"
3700 3700 )
3701 3701 ui.status(msg, label=b"status.removed")
3702 3702 else:
3703 3703 ui.status(_(b"no backup changesets found\n"))
3704 3704 return
3705 3705
3706 3706 for backup in backups:
3707 3707 # Much of this is copied from the hg incoming logic
3708 3708 source = os.path.relpath(backup, encoding.getcwd())
3709 3709 path = urlutil.get_unique_pull_path_obj(
3710 3710 b'debugbackupbundle',
3711 3711 ui,
3712 3712 source,
3713 3713 )
3714 3714 try:
3715 3715 other = hg.peer(repo, opts, path)
3716 3716 except error.LookupError as ex:
3717 3717 msg = _(b"\nwarning: unable to open bundle %s") % path.loc
3718 3718 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3719 3719 ui.warn(msg, hint=hint)
3720 3720 continue
3721 3721 branches = (path.branch, opts.get(b'branch', []))
3722 3722 revs, checkout = hg.addbranchrevs(
3723 3723 repo, other, branches, opts.get(b"rev")
3724 3724 )
3725 3725
3726 3726 if revs:
3727 3727 revs = [other.lookup(rev) for rev in revs]
3728 3728
3729 3729 with ui.silent():
3730 3730 try:
3731 3731 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3732 3732 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3733 3733 )
3734 3734 except error.LookupError:
3735 3735 continue
3736 3736
3737 3737 try:
3738 3738 if not chlist:
3739 3739 continue
3740 3740 if recovernode:
3741 3741 with repo.lock(), repo.transaction(b"unbundle") as tr:
3742 3742 if scmutil.isrevsymbol(other, recovernode):
3743 3743 ui.status(_(b"Unbundling %s\n") % (recovernode))
3744 3744 f = hg.openpath(ui, path.loc)
3745 3745 gen = exchange.readbundle(ui, f, path.loc)
3746 3746 if isinstance(gen, bundle2.unbundle20):
3747 3747 bundle2.applybundle(
3748 3748 repo,
3749 3749 gen,
3750 3750 tr,
3751 3751 source=b"unbundle",
3752 3752 url=b"bundle:" + path.loc,
3753 3753 )
3754 3754 else:
3755 3755 gen.apply(repo, b"unbundle", b"bundle:" + path.loc)
3756 3756 break
3757 3757 else:
3758 3758 backupdate = encoding.strtolocal(
3759 3759 time.strftime(
3760 3760 "%a %H:%M, %Y-%m-%d",
3761 3761 time.localtime(os.path.getmtime(path.loc)),
3762 3762 )
3763 3763 )
3764 3764 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3765 3765 if ui.verbose:
3766 3766 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), path.loc))
3767 3767 else:
3768 3768 opts[
3769 3769 b"template"
3770 3770 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3771 3771 displayer = logcmdutil.changesetdisplayer(
3772 3772 ui, other, opts, False
3773 3773 )
3774 3774 display(other, chlist, displayer)
3775 3775 displayer.close()
3776 3776 finally:
3777 3777 cleanupfn()
3778 3778
3779 3779
3780 3780 @command(
3781 3781 b'debugsub',
3782 3782 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3783 3783 _(b'[-r REV] [REV]'),
3784 3784 )
3785 3785 def debugsub(ui, repo, rev=None):
3786 3786 ctx = scmutil.revsingle(repo, rev, None)
3787 3787 for k, v in sorted(ctx.substate.items()):
3788 3788 ui.writenoi18n(b'path %s\n' % k)
3789 3789 ui.writenoi18n(b' source %s\n' % v[0])
3790 3790 ui.writenoi18n(b' revision %s\n' % v[1])
3791 3791
3792 3792
3793 3793 @command(
3794 3794 b'debugshell',
3795 3795 [
3796 3796 (
3797 3797 b'c',
3798 3798 b'command',
3799 3799 b'',
3800 3800 _(b'program passed in as a string'),
3801 3801 _(b'COMMAND'),
3802 3802 )
3803 3803 ],
3804 3804 _(b'[-c COMMAND]'),
3805 3805 optionalrepo=True,
3806 3806 )
3807 3807 def debugshell(ui, repo, **opts):
3808 3808 """run an interactive Python interpreter
3809 3809
3810 3810 The local namespace is provided with a reference to the ui and
3811 3811 the repo instance (if available).
3812 3812 """
3813 3813 import code
3814 3814
3815 3815 imported_objects = {
3816 3816 'ui': ui,
3817 3817 'repo': repo,
3818 3818 }
3819 3819
3820 3820 # py2exe disables initialization of the site module, which is responsible
3821 3821 # for arranging for ``quit()`` to exit the interpreter. Manually initialize
3822 3822 # the stuff that site normally does here, so that the interpreter can be
3823 3823 # quit in a consistent manner, whether run with pyoxidizer, exewrapper.c,
3824 3824 # py.exe, or py2exe.
3825 3825 if getattr(sys, "frozen", None) == 'console_exe':
3826 3826 try:
3827 3827 import site
3828 3828
3829 3829 site.setcopyright()
3830 3830 site.sethelper()
3831 3831 site.setquit()
3832 3832 except ImportError:
3833 3833 site = None # Keep PyCharm happy
3834 3834
3835 3835 command = opts.get('command')
3836 3836 if command:
3837 3837 compiled = code.compile_command(encoding.strfromlocal(command))
3838 3838 code.InteractiveInterpreter(locals=imported_objects).runcode(compiled)
3839 3839 return
3840 3840
3841 3841 code.interact(local=imported_objects)
3842 3842
3843 3843
3844 3844 @command(
3845 3845 b'debug-revlog-stats',
3846 3846 [
3847 3847 (b'c', b'changelog', None, _(b'Display changelog statistics')),
3848 3848 (b'm', b'manifest', None, _(b'Display manifest statistics')),
3849 3849 (b'f', b'filelogs', None, _(b'Display filelogs statistics')),
3850 3850 ]
3851 3851 + cmdutil.formatteropts,
3852 3852 )
3853 3853 def debug_revlog_stats(ui, repo, **opts):
3854 3854 """display statistics about revlogs in the store"""
3855 3855 opts = pycompat.byteskwargs(opts)
3856 3856 changelog = opts[b"changelog"]
3857 3857 manifest = opts[b"manifest"]
3858 3858 filelogs = opts[b"filelogs"]
3859 3859
3860 3860 if changelog is None and manifest is None and filelogs is None:
3861 3861 changelog = True
3862 3862 manifest = True
3863 3863 filelogs = True
3864 3864
3865 3865 repo = repo.unfiltered()
3866 3866 fm = ui.formatter(b'debug-revlog-stats', opts)
3867 3867 revlog_debug.debug_revlog_stats(repo, fm, changelog, manifest, filelogs)
3868 3868 fm.end()
3869 3869
3870 3870
3871 3871 @command(
3872 3872 b'debugsuccessorssets',
3873 3873 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3874 3874 _(b'[REV]'),
3875 3875 )
3876 3876 def debugsuccessorssets(ui, repo, *revs, **opts):
3877 3877 """show set of successors for revision
3878 3878
3879 3879 A successors set of changeset A is a consistent group of revisions that
3880 3880 succeed A. It contains non-obsolete changesets only unless closests
3881 3881 successors set is set.
3882 3882
3883 3883 In most cases a changeset A has a single successors set containing a single
3884 3884 successor (changeset A replaced by A').
3885 3885
3886 3886 A changeset that is made obsolete with no successors are called "pruned".
3887 3887 Such changesets have no successors sets at all.
3888 3888
3889 3889 A changeset that has been "split" will have a successors set containing
3890 3890 more than one successor.
3891 3891
3892 3892 A changeset that has been rewritten in multiple different ways is called
3893 3893 "divergent". Such changesets have multiple successor sets (each of which
3894 3894 may also be split, i.e. have multiple successors).
3895 3895
3896 3896 Results are displayed as follows::
3897 3897
3898 3898 <rev1>
3899 3899 <successors-1A>
3900 3900 <rev2>
3901 3901 <successors-2A>
3902 3902 <successors-2B1> <successors-2B2> <successors-2B3>
3903 3903
3904 3904 Here rev2 has two possible (i.e. divergent) successors sets. The first
3905 3905 holds one element, whereas the second holds three (i.e. the changeset has
3906 3906 been split).
3907 3907 """
3908 3908 # passed to successorssets caching computation from one call to another
3909 3909 cache = {}
3910 3910 ctx2str = bytes
3911 3911 node2str = short
3912 3912 for rev in logcmdutil.revrange(repo, revs):
3913 3913 ctx = repo[rev]
3914 3914 ui.write(b'%s\n' % ctx2str(ctx))
3915 3915 for succsset in obsutil.successorssets(
3916 3916 repo, ctx.node(), closest=opts['closest'], cache=cache
3917 3917 ):
3918 3918 if succsset:
3919 3919 ui.write(b' ')
3920 3920 ui.write(node2str(succsset[0]))
3921 3921 for node in succsset[1:]:
3922 3922 ui.write(b' ')
3923 3923 ui.write(node2str(node))
3924 3924 ui.write(b'\n')
3925 3925
3926 3926
3927 3927 @command(b'debugtagscache', [])
3928 3928 def debugtagscache(ui, repo):
3929 3929 """display the contents of .hg/cache/hgtagsfnodes1"""
3930 3930 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3931 3931 flog = repo.file(b'.hgtags')
3932 3932 for r in repo:
3933 3933 node = repo[r].node()
3934 3934 tagsnode = cache.getfnode(node, computemissing=False)
3935 3935 if tagsnode:
3936 3936 tagsnodedisplay = hex(tagsnode)
3937 3937 if not flog.hasnode(tagsnode):
3938 3938 tagsnodedisplay += b' (unknown node)'
3939 3939 elif tagsnode is None:
3940 3940 tagsnodedisplay = b'missing'
3941 3941 else:
3942 3942 tagsnodedisplay = b'invalid'
3943 3943
3944 3944 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3945 3945
3946 3946
3947 3947 @command(
3948 3948 b'debugtemplate',
3949 3949 [
3950 3950 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3951 3951 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3952 3952 ],
3953 3953 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3954 3954 optionalrepo=True,
3955 3955 )
3956 3956 def debugtemplate(ui, repo, tmpl, **opts):
3957 3957 """parse and apply a template
3958 3958
3959 3959 If -r/--rev is given, the template is processed as a log template and
3960 3960 applied to the given changesets. Otherwise, it is processed as a generic
3961 3961 template.
3962 3962
3963 3963 Use --verbose to print the parsed tree.
3964 3964 """
3965 3965 revs = None
3966 3966 if opts['rev']:
3967 3967 if repo is None:
3968 3968 raise error.RepoError(
3969 3969 _(b'there is no Mercurial repository here (.hg not found)')
3970 3970 )
3971 3971 revs = logcmdutil.revrange(repo, opts['rev'])
3972 3972
3973 3973 props = {}
3974 3974 for d in opts['define']:
3975 3975 try:
3976 3976 k, v = (e.strip() for e in d.split(b'=', 1))
3977 3977 if not k or k == b'ui':
3978 3978 raise ValueError
3979 3979 props[k] = v
3980 3980 except ValueError:
3981 3981 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3982 3982
3983 3983 if ui.verbose:
3984 3984 aliases = ui.configitems(b'templatealias')
3985 3985 tree = templater.parse(tmpl)
3986 3986 ui.note(templater.prettyformat(tree), b'\n')
3987 3987 newtree = templater.expandaliases(tree, aliases)
3988 3988 if newtree != tree:
3989 3989 ui.notenoi18n(
3990 3990 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3991 3991 )
3992 3992
3993 3993 if revs is None:
3994 3994 tres = formatter.templateresources(ui, repo)
3995 3995 t = formatter.maketemplater(ui, tmpl, resources=tres)
3996 3996 if ui.verbose:
3997 3997 kwds, funcs = t.symbolsuseddefault()
3998 3998 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3999 3999 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4000 4000 ui.write(t.renderdefault(props))
4001 4001 else:
4002 4002 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4003 4003 if ui.verbose:
4004 4004 kwds, funcs = displayer.t.symbolsuseddefault()
4005 4005 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4006 4006 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4007 4007 for r in revs:
4008 4008 displayer.show(repo[r], **pycompat.strkwargs(props))
4009 4009 displayer.close()
4010 4010
4011 4011
4012 4012 @command(
4013 4013 b'debuguigetpass',
4014 4014 [
4015 4015 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4016 4016 ],
4017 4017 _(b'[-p TEXT]'),
4018 4018 norepo=True,
4019 4019 )
4020 4020 def debuguigetpass(ui, prompt=b''):
4021 4021 """show prompt to type password"""
4022 4022 r = ui.getpass(prompt)
4023 4023 if r is None:
4024 4024 r = b"<default response>"
4025 4025 ui.writenoi18n(b'response: %s\n' % r)
4026 4026
4027 4027
4028 4028 @command(
4029 4029 b'debuguiprompt',
4030 4030 [
4031 4031 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4032 4032 ],
4033 4033 _(b'[-p TEXT]'),
4034 4034 norepo=True,
4035 4035 )
4036 4036 def debuguiprompt(ui, prompt=b''):
4037 4037 """show plain prompt"""
4038 4038 r = ui.prompt(prompt)
4039 4039 ui.writenoi18n(b'response: %s\n' % r)
4040 4040
4041 4041
4042 4042 @command(b'debugupdatecaches', [])
4043 4043 def debugupdatecaches(ui, repo, *pats, **opts):
4044 4044 """warm all known caches in the repository"""
4045 4045 with repo.wlock(), repo.lock():
4046 4046 repo.updatecaches(caches=repository.CACHES_ALL)
4047 4047
4048 4048
4049 4049 @command(
4050 4050 b'debugupgraderepo',
4051 4051 [
4052 4052 (
4053 4053 b'o',
4054 4054 b'optimize',
4055 4055 [],
4056 4056 _(b'extra optimization to perform'),
4057 4057 _(b'NAME'),
4058 4058 ),
4059 4059 (b'', b'run', False, _(b'performs an upgrade')),
4060 4060 (b'', b'backup', True, _(b'keep the old repository content around')),
4061 4061 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4062 4062 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4063 4063 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4064 4064 ],
4065 4065 )
4066 4066 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4067 4067 """upgrade a repository to use different features
4068 4068
4069 4069 If no arguments are specified, the repository is evaluated for upgrade
4070 4070 and a list of problems and potential optimizations is printed.
4071 4071
4072 4072 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4073 4073 can be influenced via additional arguments. More details will be provided
4074 4074 by the command output when run without ``--run``.
4075 4075
4076 4076 During the upgrade, the repository will be locked and no writes will be
4077 4077 allowed.
4078 4078
4079 4079 At the end of the upgrade, the repository may not be readable while new
4080 4080 repository data is swapped in. This window will be as long as it takes to
4081 4081 rename some directories inside the ``.hg`` directory. On most machines, this
4082 4082 should complete almost instantaneously and the chances of a consumer being
4083 4083 unable to access the repository should be low.
4084 4084
4085 4085 By default, all revlogs will be upgraded. You can restrict this using flags
4086 4086 such as `--manifest`:
4087 4087
4088 4088 * `--manifest`: only optimize the manifest
4089 4089 * `--no-manifest`: optimize all revlog but the manifest
4090 4090 * `--changelog`: optimize the changelog only
4091 4091 * `--no-changelog --no-manifest`: optimize filelogs only
4092 4092 * `--filelogs`: optimize the filelogs only
4093 4093 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4094 4094 """
4095 4095 return upgrade.upgraderepo(
4096 4096 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4097 4097 )
4098 4098
4099 4099
4100 4100 @command(
4101 4101 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4102 4102 )
4103 4103 def debugwalk(ui, repo, *pats, **opts):
4104 4104 """show how files match on given patterns"""
4105 4105 opts = pycompat.byteskwargs(opts)
4106 4106 m = scmutil.match(repo[None], pats, opts)
4107 4107 if ui.verbose:
4108 4108 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4109 4109 items = list(repo[None].walk(m))
4110 4110 if not items:
4111 4111 return
4112 4112 f = lambda fn: fn
4113 4113 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4114 4114 f = lambda fn: util.normpath(fn)
4115 4115 fmt = b'f %%-%ds %%-%ds %%s' % (
4116 4116 max([len(abs) for abs in items]),
4117 4117 max([len(repo.pathto(abs)) for abs in items]),
4118 4118 )
4119 4119 for abs in items:
4120 4120 line = fmt % (
4121 4121 abs,
4122 4122 f(repo.pathto(abs)),
4123 4123 m.exact(abs) and b'exact' or b'',
4124 4124 )
4125 4125 ui.write(b"%s\n" % line.rstrip())
4126 4126
4127 4127
4128 4128 @command(b'debugwhyunstable', [], _(b'REV'))
4129 4129 def debugwhyunstable(ui, repo, rev):
4130 4130 """explain instabilities of a changeset"""
4131 4131 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4132 4132 dnodes = b''
4133 4133 if entry.get(b'divergentnodes'):
4134 4134 dnodes = (
4135 4135 b' '.join(
4136 4136 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4137 4137 for ctx in entry[b'divergentnodes']
4138 4138 )
4139 4139 + b' '
4140 4140 )
4141 4141 ui.write(
4142 4142 b'%s: %s%s %s\n'
4143 4143 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4144 4144 )
4145 4145
4146 4146
4147 4147 @command(
4148 4148 b'debugwireargs',
4149 4149 [
4150 4150 (b'', b'three', b'', b'three'),
4151 4151 (b'', b'four', b'', b'four'),
4152 4152 (b'', b'five', b'', b'five'),
4153 4153 ]
4154 4154 + cmdutil.remoteopts,
4155 4155 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4156 4156 norepo=True,
4157 4157 )
4158 4158 def debugwireargs(ui, repopath, *vals, **opts):
4159 4159 opts = pycompat.byteskwargs(opts)
4160 4160 repo = hg.peer(ui, opts, repopath)
4161 4161 try:
4162 4162 for opt in cmdutil.remoteopts:
4163 4163 del opts[opt[1]]
4164 4164 args = {}
4165 4165 for k, v in opts.items():
4166 4166 if v:
4167 4167 args[k] = v
4168 4168 args = pycompat.strkwargs(args)
4169 4169 # run twice to check that we don't mess up the stream for the next command
4170 4170 res1 = repo.debugwireargs(*vals, **args)
4171 4171 res2 = repo.debugwireargs(*vals, **args)
4172 4172 ui.write(b"%s\n" % res1)
4173 4173 if res1 != res2:
4174 4174 ui.warn(b"%s\n" % res2)
4175 4175 finally:
4176 4176 repo.close()
4177 4177
4178 4178
4179 4179 def _parsewirelangblocks(fh):
4180 4180 activeaction = None
4181 4181 blocklines = []
4182 4182 lastindent = 0
4183 4183
4184 4184 for line in fh:
4185 4185 line = line.rstrip()
4186 4186 if not line:
4187 4187 continue
4188 4188
4189 4189 if line.startswith(b'#'):
4190 4190 continue
4191 4191
4192 4192 if not line.startswith(b' '):
4193 4193 # New block. Flush previous one.
4194 4194 if activeaction:
4195 4195 yield activeaction, blocklines
4196 4196
4197 4197 activeaction = line
4198 4198 blocklines = []
4199 4199 lastindent = 0
4200 4200 continue
4201 4201
4202 4202 # Else we start with an indent.
4203 4203
4204 4204 if not activeaction:
4205 4205 raise error.Abort(_(b'indented line outside of block'))
4206 4206
4207 4207 indent = len(line) - len(line.lstrip())
4208 4208
4209 4209 # If this line is indented more than the last line, concatenate it.
4210 4210 if indent > lastindent and blocklines:
4211 4211 blocklines[-1] += line.lstrip()
4212 4212 else:
4213 4213 blocklines.append(line)
4214 4214 lastindent = indent
4215 4215
4216 4216 # Flush last block.
4217 4217 if activeaction:
4218 4218 yield activeaction, blocklines
4219 4219
4220 4220
4221 4221 @command(
4222 4222 b'debugwireproto',
4223 4223 [
4224 4224 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4225 4225 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4226 4226 (
4227 4227 b'',
4228 4228 b'noreadstderr',
4229 4229 False,
4230 4230 _(b'do not read from stderr of the remote'),
4231 4231 ),
4232 4232 (
4233 4233 b'',
4234 4234 b'nologhandshake',
4235 4235 False,
4236 4236 _(b'do not log I/O related to the peer handshake'),
4237 4237 ),
4238 4238 ]
4239 4239 + cmdutil.remoteopts,
4240 4240 _(b'[PATH]'),
4241 4241 optionalrepo=True,
4242 4242 )
4243 4243 def debugwireproto(ui, repo, path=None, **opts):
4244 4244 """send wire protocol commands to a server
4245 4245
4246 4246 This command can be used to issue wire protocol commands to remote
4247 4247 peers and to debug the raw data being exchanged.
4248 4248
4249 4249 ``--localssh`` will start an SSH server against the current repository
4250 4250 and connect to that. By default, the connection will perform a handshake
4251 4251 and establish an appropriate peer instance.
4252 4252
4253 4253 ``--peer`` can be used to bypass the handshake protocol and construct a
4254 4254 peer instance using the specified class type. Valid values are ``raw``,
4255 4255 ``ssh1``. ``raw`` instances only allow sending raw data payloads and
4256 4256 don't support higher-level command actions.
4257 4257
4258 4258 ``--noreadstderr`` can be used to disable automatic reading from stderr
4259 4259 of the peer (for SSH connections only). Disabling automatic reading of
4260 4260 stderr is useful for making output more deterministic.
4261 4261
4262 4262 Commands are issued via a mini language which is specified via stdin.
4263 4263 The language consists of individual actions to perform. An action is
4264 4264 defined by a block. A block is defined as a line with no leading
4265 4265 space followed by 0 or more lines with leading space. Blocks are
4266 4266 effectively a high-level command with additional metadata.
4267 4267
4268 4268 Lines beginning with ``#`` are ignored.
4269 4269
4270 4270 The following sections denote available actions.
4271 4271
4272 4272 raw
4273 4273 ---
4274 4274
4275 4275 Send raw data to the server.
4276 4276
4277 4277 The block payload contains the raw data to send as one atomic send
4278 4278 operation. The data may not actually be delivered in a single system
4279 4279 call: it depends on the abilities of the transport being used.
4280 4280
4281 4281 Each line in the block is de-indented and concatenated. Then, that
4282 4282 value is evaluated as a Python b'' literal. This allows the use of
4283 4283 backslash escaping, etc.
4284 4284
4285 4285 raw+
4286 4286 ----
4287 4287
4288 4288 Behaves like ``raw`` except flushes output afterwards.
4289 4289
4290 4290 command <X>
4291 4291 -----------
4292 4292
4293 4293 Send a request to run a named command, whose name follows the ``command``
4294 4294 string.
4295 4295
4296 4296 Arguments to the command are defined as lines in this block. The format of
4297 4297 each line is ``<key> <value>``. e.g.::
4298 4298
4299 4299 command listkeys
4300 4300 namespace bookmarks
4301 4301
4302 4302 If the value begins with ``eval:``, it will be interpreted as a Python
4303 4303 literal expression. Otherwise values are interpreted as Python b'' literals.
4304 4304 This allows sending complex types and encoding special byte sequences via
4305 4305 backslash escaping.
4306 4306
4307 4307 The following arguments have special meaning:
4308 4308
4309 4309 ``PUSHFILE``
4310 4310 When defined, the *push* mechanism of the peer will be used instead
4311 4311 of the static request-response mechanism and the content of the
4312 4312 file specified in the value of this argument will be sent as the
4313 4313 command payload.
4314 4314
4315 4315 This can be used to submit a local bundle file to the remote.
4316 4316
4317 4317 batchbegin
4318 4318 ----------
4319 4319
4320 4320 Instruct the peer to begin a batched send.
4321 4321
4322 4322 All ``command`` blocks are queued for execution until the next
4323 4323 ``batchsubmit`` block.
4324 4324
4325 4325 batchsubmit
4326 4326 -----------
4327 4327
4328 4328 Submit previously queued ``command`` blocks as a batch request.
4329 4329
4330 4330 This action MUST be paired with a ``batchbegin`` action.
4331 4331
4332 4332 httprequest <method> <path>
4333 4333 ---------------------------
4334 4334
4335 4335 (HTTP peer only)
4336 4336
4337 4337 Send an HTTP request to the peer.
4338 4338
4339 4339 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4340 4340
4341 4341 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4342 4342 headers to add to the request. e.g. ``Accept: foo``.
4343 4343
4344 4344 The following arguments are special:
4345 4345
4346 4346 ``BODYFILE``
4347 4347 The content of the file defined as the value to this argument will be
4348 4348 transferred verbatim as the HTTP request body.
4349 4349
4350 4350 ``frame <type> <flags> <payload>``
4351 4351 Send a unified protocol frame as part of the request body.
4352 4352
4353 4353 All frames will be collected and sent as the body to the HTTP
4354 4354 request.
4355 4355
4356 4356 close
4357 4357 -----
4358 4358
4359 4359 Close the connection to the server.
4360 4360
4361 4361 flush
4362 4362 -----
4363 4363
4364 4364 Flush data written to the server.
4365 4365
4366 4366 readavailable
4367 4367 -------------
4368 4368
4369 4369 Close the write end of the connection and read all available data from
4370 4370 the server.
4371 4371
4372 4372 If the connection to the server encompasses multiple pipes, we poll both
4373 4373 pipes and read available data.
4374 4374
4375 4375 readline
4376 4376 --------
4377 4377
4378 4378 Read a line of output from the server. If there are multiple output
4379 4379 pipes, reads only the main pipe.
4380 4380
4381 4381 ereadline
4382 4382 ---------
4383 4383
4384 4384 Like ``readline``, but read from the stderr pipe, if available.
4385 4385
4386 4386 read <X>
4387 4387 --------
4388 4388
4389 4389 ``read()`` N bytes from the server's main output pipe.
4390 4390
4391 4391 eread <X>
4392 4392 ---------
4393 4393
4394 4394 ``read()`` N bytes from the server's stderr pipe, if available.
4395 4395
4396 4396 Specifying Unified Frame-Based Protocol Frames
4397 4397 ----------------------------------------------
4398 4398
4399 4399 It is possible to emit a *Unified Frame-Based Protocol* by using special
4400 4400 syntax.
4401 4401
4402 4402 A frame is composed as a type, flags, and payload. These can be parsed
4403 4403 from a string of the form:
4404 4404
4405 4405 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4406 4406
4407 4407 ``request-id`` and ``stream-id`` are integers defining the request and
4408 4408 stream identifiers.
4409 4409
4410 4410 ``type`` can be an integer value for the frame type or the string name
4411 4411 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4412 4412 ``command-name``.
4413 4413
4414 4414 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4415 4415 components. Each component (and there can be just one) can be an integer
4416 4416 or a flag name for stream flags or frame flags, respectively. Values are
4417 4417 resolved to integers and then bitwise OR'd together.
4418 4418
4419 4419 ``payload`` represents the raw frame payload. If it begins with
4420 4420 ``cbor:``, the following string is evaluated as Python code and the
4421 4421 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4422 4422 as a Python byte string literal.
4423 4423 """
4424 4424 opts = pycompat.byteskwargs(opts)
4425 4425
4426 4426 if opts[b'localssh'] and not repo:
4427 4427 raise error.Abort(_(b'--localssh requires a repository'))
4428 4428
4429 4429 if opts[b'peer'] and opts[b'peer'] not in (
4430 4430 b'raw',
4431 4431 b'ssh1',
4432 4432 ):
4433 4433 raise error.Abort(
4434 4434 _(b'invalid value for --peer'),
4435 4435 hint=_(b'valid values are "raw" and "ssh1"'),
4436 4436 )
4437 4437
4438 4438 if path and opts[b'localssh']:
4439 4439 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4440 4440
4441 4441 if ui.interactive():
4442 4442 ui.write(_(b'(waiting for commands on stdin)\n'))
4443 4443
4444 4444 blocks = list(_parsewirelangblocks(ui.fin))
4445 4445
4446 4446 proc = None
4447 4447 stdin = None
4448 4448 stdout = None
4449 4449 stderr = None
4450 4450 opener = None
4451 4451
4452 4452 if opts[b'localssh']:
4453 4453 # We start the SSH server in its own process so there is process
4454 4454 # separation. This prevents a whole class of potential bugs around
4455 4455 # shared state from interfering with server operation.
4456 4456 args = procutil.hgcmd() + [
4457 4457 b'-R',
4458 4458 repo.root,
4459 4459 b'debugserve',
4460 4460 b'--sshstdio',
4461 4461 ]
4462 4462 proc = subprocess.Popen(
4463 4463 pycompat.rapply(procutil.tonativestr, args),
4464 4464 stdin=subprocess.PIPE,
4465 4465 stdout=subprocess.PIPE,
4466 4466 stderr=subprocess.PIPE,
4467 4467 bufsize=0,
4468 4468 )
4469 4469
4470 4470 stdin = proc.stdin
4471 4471 stdout = proc.stdout
4472 4472 stderr = proc.stderr
4473 4473
4474 4474 # We turn the pipes into observers so we can log I/O.
4475 4475 if ui.verbose or opts[b'peer'] == b'raw':
4476 4476 stdin = util.makeloggingfileobject(
4477 4477 ui, proc.stdin, b'i', logdata=True
4478 4478 )
4479 4479 stdout = util.makeloggingfileobject(
4480 4480 ui, proc.stdout, b'o', logdata=True
4481 4481 )
4482 4482 stderr = util.makeloggingfileobject(
4483 4483 ui, proc.stderr, b'e', logdata=True
4484 4484 )
4485 4485
4486 4486 # --localssh also implies the peer connection settings.
4487 4487
4488 4488 url = b'ssh://localserver'
4489 4489 autoreadstderr = not opts[b'noreadstderr']
4490 4490
4491 4491 if opts[b'peer'] == b'ssh1':
4492 4492 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4493 4493 peer = sshpeer.sshv1peer(
4494 4494 ui,
4495 4495 url,
4496 4496 proc,
4497 4497 stdin,
4498 4498 stdout,
4499 4499 stderr,
4500 4500 None,
4501 4501 autoreadstderr=autoreadstderr,
4502 4502 )
4503 4503 elif opts[b'peer'] == b'raw':
4504 4504 ui.write(_(b'using raw connection to peer\n'))
4505 4505 peer = None
4506 4506 else:
4507 4507 ui.write(_(b'creating ssh peer from handshake results\n'))
4508 4508 peer = sshpeer.makepeer(
4509 4509 ui,
4510 4510 url,
4511 4511 proc,
4512 4512 stdin,
4513 4513 stdout,
4514 4514 stderr,
4515 4515 autoreadstderr=autoreadstderr,
4516 4516 )
4517 4517
4518 4518 elif path:
4519 4519 # We bypass hg.peer() so we can proxy the sockets.
4520 4520 # TODO consider not doing this because we skip
4521 4521 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4522 4522 u = urlutil.url(path)
4523 4523 if u.scheme != b'http':
4524 4524 raise error.Abort(_(b'only http:// paths are currently supported'))
4525 4525
4526 4526 url, authinfo = u.authinfo()
4527 4527 openerargs = {
4528 4528 'useragent': b'Mercurial debugwireproto',
4529 4529 }
4530 4530
4531 4531 # Turn pipes/sockets into observers so we can log I/O.
4532 4532 if ui.verbose:
4533 4533 openerargs.update(
4534 4534 {
4535 4535 'loggingfh': ui,
4536 4536 'loggingname': b's',
4537 4537 'loggingopts': {
4538 4538 'logdata': True,
4539 4539 'logdataapis': False,
4540 4540 },
4541 4541 }
4542 4542 )
4543 4543
4544 4544 if ui.debugflag:
4545 4545 openerargs['loggingopts']['logdataapis'] = True
4546 4546
4547 4547 # Don't send default headers when in raw mode. This allows us to
4548 4548 # bypass most of the behavior of our URL handling code so we can
4549 4549 # have near complete control over what's sent on the wire.
4550 4550 if opts[b'peer'] == b'raw':
4551 4551 openerargs['sendaccept'] = False
4552 4552
4553 4553 opener = urlmod.opener(ui, authinfo, **openerargs)
4554 4554
4555 4555 if opts[b'peer'] == b'raw':
4556 4556 ui.write(_(b'using raw connection to peer\n'))
4557 4557 peer = None
4558 4558 elif opts[b'peer']:
4559 4559 raise error.Abort(
4560 4560 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4561 4561 )
4562 4562 else:
4563 4563 peer_path = urlutil.try_path(ui, path)
4564 4564 peer = httppeer.makepeer(ui, peer_path, opener=opener)
4565 4565
4566 4566 # We /could/ populate stdin/stdout with sock.makefile()...
4567 4567 else:
4568 4568 raise error.Abort(_(b'unsupported connection configuration'))
4569 4569
4570 4570 batchedcommands = None
4571 4571
4572 4572 # Now perform actions based on the parsed wire language instructions.
4573 4573 for action, lines in blocks:
4574 4574 if action in (b'raw', b'raw+'):
4575 4575 if not stdin:
4576 4576 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4577 4577
4578 4578 # Concatenate the data together.
4579 4579 data = b''.join(l.lstrip() for l in lines)
4580 4580 data = stringutil.unescapestr(data)
4581 4581 stdin.write(data)
4582 4582
4583 4583 if action == b'raw+':
4584 4584 stdin.flush()
4585 4585 elif action == b'flush':
4586 4586 if not stdin:
4587 4587 raise error.Abort(_(b'cannot call flush on this peer'))
4588 4588 stdin.flush()
4589 4589 elif action.startswith(b'command'):
4590 4590 if not peer:
4591 4591 raise error.Abort(
4592 4592 _(
4593 4593 b'cannot send commands unless peer instance '
4594 4594 b'is available'
4595 4595 )
4596 4596 )
4597 4597
4598 4598 command = action.split(b' ', 1)[1]
4599 4599
4600 4600 args = {}
4601 4601 for line in lines:
4602 4602 # We need to allow empty values.
4603 4603 fields = line.lstrip().split(b' ', 1)
4604 4604 if len(fields) == 1:
4605 4605 key = fields[0]
4606 4606 value = b''
4607 4607 else:
4608 4608 key, value = fields
4609 4609
4610 4610 if value.startswith(b'eval:'):
4611 4611 value = stringutil.evalpythonliteral(value[5:])
4612 4612 else:
4613 4613 value = stringutil.unescapestr(value)
4614 4614
4615 4615 args[key] = value
4616 4616
4617 4617 if batchedcommands is not None:
4618 4618 batchedcommands.append((command, args))
4619 4619 continue
4620 4620
4621 4621 ui.status(_(b'sending %s command\n') % command)
4622 4622
4623 4623 if b'PUSHFILE' in args:
4624 4624 with open(args[b'PUSHFILE'], 'rb') as fh:
4625 4625 del args[b'PUSHFILE']
4626 4626 res, output = peer._callpush(
4627 4627 command, fh, **pycompat.strkwargs(args)
4628 4628 )
4629 4629 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4630 4630 ui.status(
4631 4631 _(b'remote output: %s\n') % stringutil.escapestr(output)
4632 4632 )
4633 4633 else:
4634 4634 with peer.commandexecutor() as e:
4635 4635 res = e.callcommand(command, args).result()
4636 4636
4637 4637 ui.status(
4638 4638 _(b'response: %s\n')
4639 4639 % stringutil.pprint(res, bprefix=True, indent=2)
4640 4640 )
4641 4641
4642 4642 elif action == b'batchbegin':
4643 4643 if batchedcommands is not None:
4644 4644 raise error.Abort(_(b'nested batchbegin not allowed'))
4645 4645
4646 4646 batchedcommands = []
4647 4647 elif action == b'batchsubmit':
4648 4648 # There is a batching API we could go through. But it would be
4649 4649 # difficult to normalize requests into function calls. It is easier
4650 4650 # to bypass this layer and normalize to commands + args.
4651 4651 ui.status(
4652 4652 _(b'sending batch with %d sub-commands\n')
4653 4653 % len(batchedcommands)
4654 4654 )
4655 4655 assert peer is not None
4656 4656 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4657 4657 ui.status(
4658 4658 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4659 4659 )
4660 4660
4661 4661 batchedcommands = None
4662 4662
4663 4663 elif action.startswith(b'httprequest '):
4664 4664 if not opener:
4665 4665 raise error.Abort(
4666 4666 _(b'cannot use httprequest without an HTTP peer')
4667 4667 )
4668 4668
4669 4669 request = action.split(b' ', 2)
4670 4670 if len(request) != 3:
4671 4671 raise error.Abort(
4672 4672 _(
4673 4673 b'invalid httprequest: expected format is '
4674 4674 b'"httprequest <method> <path>'
4675 4675 )
4676 4676 )
4677 4677
4678 4678 method, httppath = request[1:]
4679 4679 headers = {}
4680 4680 body = None
4681 4681 frames = []
4682 4682 for line in lines:
4683 4683 line = line.lstrip()
4684 4684 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4685 4685 if m:
4686 4686 # Headers need to use native strings.
4687 4687 key = pycompat.strurl(m.group(1))
4688 4688 value = pycompat.strurl(m.group(2))
4689 4689 headers[key] = value
4690 4690 continue
4691 4691
4692 4692 if line.startswith(b'BODYFILE '):
4693 4693 with open(line.split(b' ', 1), b'rb') as fh:
4694 4694 body = fh.read()
4695 4695 elif line.startswith(b'frame '):
4696 4696 frame = wireprotoframing.makeframefromhumanstring(
4697 4697 line[len(b'frame ') :]
4698 4698 )
4699 4699
4700 4700 frames.append(frame)
4701 4701 else:
4702 4702 raise error.Abort(
4703 4703 _(b'unknown argument to httprequest: %s') % line
4704 4704 )
4705 4705
4706 4706 url = path + httppath
4707 4707
4708 4708 if frames:
4709 4709 body = b''.join(bytes(f) for f in frames)
4710 4710
4711 4711 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4712 4712
4713 4713 # urllib.Request insists on using has_data() as a proxy for
4714 4714 # determining the request method. Override that to use our
4715 4715 # explicitly requested method.
4716 4716 req.get_method = lambda: pycompat.sysstr(method)
4717 4717
4718 4718 try:
4719 4719 res = opener.open(req)
4720 4720 body = res.read()
4721 4721 except util.urlerr.urlerror as e:
4722 4722 # read() method must be called, but only exists in Python 2
4723 4723 getattr(e, 'read', lambda: None)()
4724 4724 continue
4725 4725
4726 4726 ct = res.headers.get('Content-Type')
4727 4727 if ct == 'application/mercurial-cbor':
4728 4728 ui.write(
4729 4729 _(b'cbor> %s\n')
4730 4730 % stringutil.pprint(
4731 4731 cborutil.decodeall(body), bprefix=True, indent=2
4732 4732 )
4733 4733 )
4734 4734
4735 4735 elif action == b'close':
4736 4736 assert peer is not None
4737 4737 peer.close()
4738 4738 elif action == b'readavailable':
4739 4739 if not stdout or not stderr:
4740 4740 raise error.Abort(
4741 4741 _(b'readavailable not available on this peer')
4742 4742 )
4743 4743
4744 4744 stdin.close()
4745 4745 stdout.read()
4746 4746 stderr.read()
4747 4747
4748 4748 elif action == b'readline':
4749 4749 if not stdout:
4750 4750 raise error.Abort(_(b'readline not available on this peer'))
4751 4751 stdout.readline()
4752 4752 elif action == b'ereadline':
4753 4753 if not stderr:
4754 4754 raise error.Abort(_(b'ereadline not available on this peer'))
4755 4755 stderr.readline()
4756 4756 elif action.startswith(b'read '):
4757 4757 count = int(action.split(b' ', 1)[1])
4758 4758 if not stdout:
4759 4759 raise error.Abort(_(b'read not available on this peer'))
4760 4760 stdout.read(count)
4761 4761 elif action.startswith(b'eread '):
4762 4762 count = int(action.split(b' ', 1)[1])
4763 4763 if not stderr:
4764 4764 raise error.Abort(_(b'eread not available on this peer'))
4765 4765 stderr.read(count)
4766 4766 else:
4767 4767 raise error.Abort(_(b'unknown action: %s') % action)
4768 4768
4769 4769 if batchedcommands is not None:
4770 4770 raise error.Abort(_(b'unclosed "batchbegin" request'))
4771 4771
4772 4772 if peer:
4773 4773 peer.close()
4774 4774
4775 4775 if proc:
4776 4776 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now