##// END OF EJS Templates
debugdiscovery: add missing byte string marker to some help text...
marmoute -
r47504:b6ac6124 default
parent child Browse files
Show More
@@ -1,4779 +1,4779
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullid,
34 34 nullrev,
35 35 short,
36 36 )
37 37 from .pycompat import (
38 38 getattr,
39 39 open,
40 40 )
41 41 from . import (
42 42 bundle2,
43 43 bundlerepo,
44 44 changegroup,
45 45 cmdutil,
46 46 color,
47 47 context,
48 48 copies,
49 49 dagparser,
50 50 encoding,
51 51 error,
52 52 exchange,
53 53 extensions,
54 54 filemerge,
55 55 filesetlang,
56 56 formatter,
57 57 hg,
58 58 httppeer,
59 59 localrepo,
60 60 lock as lockmod,
61 61 logcmdutil,
62 62 mergestate as mergestatemod,
63 63 metadata,
64 64 obsolete,
65 65 obsutil,
66 66 pathutil,
67 67 phases,
68 68 policy,
69 69 pvec,
70 70 pycompat,
71 71 registrar,
72 72 repair,
73 73 repoview,
74 74 revlog,
75 75 revset,
76 76 revsetlang,
77 77 scmutil,
78 78 setdiscovery,
79 79 simplemerge,
80 80 sshpeer,
81 81 sslutil,
82 82 streamclone,
83 83 strip,
84 84 tags as tagsmod,
85 85 templater,
86 86 treediscovery,
87 87 upgrade,
88 88 url as urlmod,
89 89 util,
90 90 vfs as vfsmod,
91 91 wireprotoframing,
92 92 wireprotoserver,
93 93 wireprotov2peer,
94 94 )
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 )
102 102
103 103 from .revlogutils import (
104 104 deltas as deltautil,
105 105 nodemap,
106 106 sidedata,
107 107 )
108 108
109 109 release = lockmod.release
110 110
111 111 table = {}
112 112 table.update(strip.command._table)
113 113 command = registrar.command(table)
114 114
115 115
116 116 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
117 117 def debugancestor(ui, repo, *args):
118 118 """find the ancestor revision of two revisions in a given index"""
119 119 if len(args) == 3:
120 120 index, rev1, rev2 = args
121 121 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
122 122 lookup = r.lookup
123 123 elif len(args) == 2:
124 124 if not repo:
125 125 raise error.Abort(
126 126 _(b'there is no Mercurial repository here (.hg not found)')
127 127 )
128 128 rev1, rev2 = args
129 129 r = repo.changelog
130 130 lookup = repo.lookup
131 131 else:
132 132 raise error.Abort(_(b'either two or three arguments required'))
133 133 a = r.ancestor(lookup(rev1), lookup(rev2))
134 134 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
135 135
136 136
137 137 @command(b'debugantivirusrunning', [])
138 138 def debugantivirusrunning(ui, repo):
139 139 """attempt to trigger an antivirus scanner to see if one is active"""
140 140 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
141 141 f.write(
142 142 util.b85decode(
143 143 # This is a base85-armored version of the EICAR test file. See
144 144 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
145 145 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
146 146 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
147 147 )
148 148 )
149 149 # Give an AV engine time to scan the file.
150 150 time.sleep(2)
151 151 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
152 152
153 153
154 154 @command(b'debugapplystreamclonebundle', [], b'FILE')
155 155 def debugapplystreamclonebundle(ui, repo, fname):
156 156 """apply a stream clone bundle file"""
157 157 f = hg.openpath(ui, fname)
158 158 gen = exchange.readbundle(ui, f, fname)
159 159 gen.apply(repo)
160 160
161 161
162 162 @command(
163 163 b'debugbuilddag',
164 164 [
165 165 (
166 166 b'm',
167 167 b'mergeable-file',
168 168 None,
169 169 _(b'add single file mergeable changes'),
170 170 ),
171 171 (
172 172 b'o',
173 173 b'overwritten-file',
174 174 None,
175 175 _(b'add single file all revs overwrite'),
176 176 ),
177 177 (b'n', b'new-file', None, _(b'add new file at each rev')),
178 178 ],
179 179 _(b'[OPTION]... [TEXT]'),
180 180 )
181 181 def debugbuilddag(
182 182 ui,
183 183 repo,
184 184 text=None,
185 185 mergeable_file=False,
186 186 overwritten_file=False,
187 187 new_file=False,
188 188 ):
189 189 """builds a repo with a given DAG from scratch in the current empty repo
190 190
191 191 The description of the DAG is read from stdin if not given on the
192 192 command line.
193 193
194 194 Elements:
195 195
196 196 - "+n" is a linear run of n nodes based on the current default parent
197 197 - "." is a single node based on the current default parent
198 198 - "$" resets the default parent to null (implied at the start);
199 199 otherwise the default parent is always the last node created
200 200 - "<p" sets the default parent to the backref p
201 201 - "*p" is a fork at parent p, which is a backref
202 202 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
203 203 - "/p2" is a merge of the preceding node and p2
204 204 - ":tag" defines a local tag for the preceding node
205 205 - "@branch" sets the named branch for subsequent nodes
206 206 - "#...\\n" is a comment up to the end of the line
207 207
208 208 Whitespace between the above elements is ignored.
209 209
210 210 A backref is either
211 211
212 212 - a number n, which references the node curr-n, where curr is the current
213 213 node, or
214 214 - the name of a local tag you placed earlier using ":tag", or
215 215 - empty to denote the default parent.
216 216
217 217 All string valued-elements are either strictly alphanumeric, or must
218 218 be enclosed in double quotes ("..."), with "\\" as escape character.
219 219 """
220 220
221 221 if text is None:
222 222 ui.status(_(b"reading DAG from stdin\n"))
223 223 text = ui.fin.read()
224 224
225 225 cl = repo.changelog
226 226 if len(cl) > 0:
227 227 raise error.Abort(_(b'repository is not empty'))
228 228
229 229 # determine number of revs in DAG
230 230 total = 0
231 231 for type, data in dagparser.parsedag(text):
232 232 if type == b'n':
233 233 total += 1
234 234
235 235 if mergeable_file:
236 236 linesperrev = 2
237 237 # make a file with k lines per rev
238 238 initialmergedlines = [
239 239 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
240 240 ]
241 241 initialmergedlines.append(b"")
242 242
243 243 tags = []
244 244 progress = ui.makeprogress(
245 245 _(b'building'), unit=_(b'revisions'), total=total
246 246 )
247 247 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
248 248 at = -1
249 249 atbranch = b'default'
250 250 nodeids = []
251 251 id = 0
252 252 progress.update(id)
253 253 for type, data in dagparser.parsedag(text):
254 254 if type == b'n':
255 255 ui.note((b'node %s\n' % pycompat.bytestr(data)))
256 256 id, ps = data
257 257
258 258 files = []
259 259 filecontent = {}
260 260
261 261 p2 = None
262 262 if mergeable_file:
263 263 fn = b"mf"
264 264 p1 = repo[ps[0]]
265 265 if len(ps) > 1:
266 266 p2 = repo[ps[1]]
267 267 pa = p1.ancestor(p2)
268 268 base, local, other = [
269 269 x[fn].data() for x in (pa, p1, p2)
270 270 ]
271 271 m3 = simplemerge.Merge3Text(base, local, other)
272 272 ml = [l.strip() for l in m3.merge_lines()]
273 273 ml.append(b"")
274 274 elif at > 0:
275 275 ml = p1[fn].data().split(b"\n")
276 276 else:
277 277 ml = initialmergedlines
278 278 ml[id * linesperrev] += b" r%i" % id
279 279 mergedtext = b"\n".join(ml)
280 280 files.append(fn)
281 281 filecontent[fn] = mergedtext
282 282
283 283 if overwritten_file:
284 284 fn = b"of"
285 285 files.append(fn)
286 286 filecontent[fn] = b"r%i\n" % id
287 287
288 288 if new_file:
289 289 fn = b"nf%i" % id
290 290 files.append(fn)
291 291 filecontent[fn] = b"r%i\n" % id
292 292 if len(ps) > 1:
293 293 if not p2:
294 294 p2 = repo[ps[1]]
295 295 for fn in p2:
296 296 if fn.startswith(b"nf"):
297 297 files.append(fn)
298 298 filecontent[fn] = p2[fn].data()
299 299
300 300 def fctxfn(repo, cx, path):
301 301 if path in filecontent:
302 302 return context.memfilectx(
303 303 repo, cx, path, filecontent[path]
304 304 )
305 305 return None
306 306
307 307 if len(ps) == 0 or ps[0] < 0:
308 308 pars = [None, None]
309 309 elif len(ps) == 1:
310 310 pars = [nodeids[ps[0]], None]
311 311 else:
312 312 pars = [nodeids[p] for p in ps]
313 313 cx = context.memctx(
314 314 repo,
315 315 pars,
316 316 b"r%i" % id,
317 317 files,
318 318 fctxfn,
319 319 date=(id, 0),
320 320 user=b"debugbuilddag",
321 321 extra={b'branch': atbranch},
322 322 )
323 323 nodeid = repo.commitctx(cx)
324 324 nodeids.append(nodeid)
325 325 at = id
326 326 elif type == b'l':
327 327 id, name = data
328 328 ui.note((b'tag %s\n' % name))
329 329 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
330 330 elif type == b'a':
331 331 ui.note((b'branch %s\n' % data))
332 332 atbranch = data
333 333 progress.update(id)
334 334
335 335 if tags:
336 336 repo.vfs.write(b"localtags", b"".join(tags))
337 337
338 338
339 339 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
340 340 indent_string = b' ' * indent
341 341 if all:
342 342 ui.writenoi18n(
343 343 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
344 344 % indent_string
345 345 )
346 346
347 347 def showchunks(named):
348 348 ui.write(b"\n%s%s\n" % (indent_string, named))
349 349 for deltadata in gen.deltaiter():
350 350 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
351 351 ui.write(
352 352 b"%s%s %s %s %s %s %d\n"
353 353 % (
354 354 indent_string,
355 355 hex(node),
356 356 hex(p1),
357 357 hex(p2),
358 358 hex(cs),
359 359 hex(deltabase),
360 360 len(delta),
361 361 )
362 362 )
363 363
364 364 gen.changelogheader()
365 365 showchunks(b"changelog")
366 366 gen.manifestheader()
367 367 showchunks(b"manifest")
368 368 for chunkdata in iter(gen.filelogheader, {}):
369 369 fname = chunkdata[b'filename']
370 370 showchunks(fname)
371 371 else:
372 372 if isinstance(gen, bundle2.unbundle20):
373 373 raise error.Abort(_(b'use debugbundle2 for this file'))
374 374 gen.changelogheader()
375 375 for deltadata in gen.deltaiter():
376 376 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
377 377 ui.write(b"%s%s\n" % (indent_string, hex(node)))
378 378
379 379
380 380 def _debugobsmarkers(ui, part, indent=0, **opts):
381 381 """display version and markers contained in 'data'"""
382 382 opts = pycompat.byteskwargs(opts)
383 383 data = part.read()
384 384 indent_string = b' ' * indent
385 385 try:
386 386 version, markers = obsolete._readmarkers(data)
387 387 except error.UnknownVersion as exc:
388 388 msg = b"%sunsupported version: %s (%d bytes)\n"
389 389 msg %= indent_string, exc.version, len(data)
390 390 ui.write(msg)
391 391 else:
392 392 msg = b"%sversion: %d (%d bytes)\n"
393 393 msg %= indent_string, version, len(data)
394 394 ui.write(msg)
395 395 fm = ui.formatter(b'debugobsolete', opts)
396 396 for rawmarker in sorted(markers):
397 397 m = obsutil.marker(None, rawmarker)
398 398 fm.startitem()
399 399 fm.plain(indent_string)
400 400 cmdutil.showmarker(fm, m)
401 401 fm.end()
402 402
403 403
404 404 def _debugphaseheads(ui, data, indent=0):
405 405 """display version and markers contained in 'data'"""
406 406 indent_string = b' ' * indent
407 407 headsbyphase = phases.binarydecode(data)
408 408 for phase in phases.allphases:
409 409 for head in headsbyphase[phase]:
410 410 ui.write(indent_string)
411 411 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
412 412
413 413
414 414 def _quasirepr(thing):
415 415 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
416 416 return b'{%s}' % (
417 417 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
418 418 )
419 419 return pycompat.bytestr(repr(thing))
420 420
421 421
422 422 def _debugbundle2(ui, gen, all=None, **opts):
423 423 """lists the contents of a bundle2"""
424 424 if not isinstance(gen, bundle2.unbundle20):
425 425 raise error.Abort(_(b'not a bundle2 file'))
426 426 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
427 427 parttypes = opts.get('part_type', [])
428 428 for part in gen.iterparts():
429 429 if parttypes and part.type not in parttypes:
430 430 continue
431 431 msg = b'%s -- %s (mandatory: %r)\n'
432 432 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
433 433 if part.type == b'changegroup':
434 434 version = part.params.get(b'version', b'01')
435 435 cg = changegroup.getunbundler(version, part, b'UN')
436 436 if not ui.quiet:
437 437 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
438 438 if part.type == b'obsmarkers':
439 439 if not ui.quiet:
440 440 _debugobsmarkers(ui, part, indent=4, **opts)
441 441 if part.type == b'phase-heads':
442 442 if not ui.quiet:
443 443 _debugphaseheads(ui, part, indent=4)
444 444
445 445
446 446 @command(
447 447 b'debugbundle',
448 448 [
449 449 (b'a', b'all', None, _(b'show all details')),
450 450 (b'', b'part-type', [], _(b'show only the named part type')),
451 451 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
452 452 ],
453 453 _(b'FILE'),
454 454 norepo=True,
455 455 )
456 456 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
457 457 """lists the contents of a bundle"""
458 458 with hg.openpath(ui, bundlepath) as f:
459 459 if spec:
460 460 spec = exchange.getbundlespec(ui, f)
461 461 ui.write(b'%s\n' % spec)
462 462 return
463 463
464 464 gen = exchange.readbundle(ui, f, bundlepath)
465 465 if isinstance(gen, bundle2.unbundle20):
466 466 return _debugbundle2(ui, gen, all=all, **opts)
467 467 _debugchangegroup(ui, gen, all=all, **opts)
468 468
469 469
470 470 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
471 471 def debugcapabilities(ui, path, **opts):
472 472 """lists the capabilities of a remote peer"""
473 473 opts = pycompat.byteskwargs(opts)
474 474 peer = hg.peer(ui, opts, path)
475 475 try:
476 476 caps = peer.capabilities()
477 477 ui.writenoi18n(b'Main capabilities:\n')
478 478 for c in sorted(caps):
479 479 ui.write(b' %s\n' % c)
480 480 b2caps = bundle2.bundle2caps(peer)
481 481 if b2caps:
482 482 ui.writenoi18n(b'Bundle2 capabilities:\n')
483 483 for key, values in sorted(pycompat.iteritems(b2caps)):
484 484 ui.write(b' %s\n' % key)
485 485 for v in values:
486 486 ui.write(b' %s\n' % v)
487 487 finally:
488 488 peer.close()
489 489
490 490
491 491 @command(
492 492 b'debugchangedfiles',
493 493 [
494 494 (
495 495 b'',
496 496 b'compute',
497 497 False,
498 498 b"compute information instead of reading it from storage",
499 499 ),
500 500 ],
501 501 b'REV',
502 502 )
503 503 def debugchangedfiles(ui, repo, rev, **opts):
504 504 """list the stored files changes for a revision"""
505 505 ctx = scmutil.revsingle(repo, rev, None)
506 506 files = None
507 507
508 508 if opts['compute']:
509 509 files = metadata.compute_all_files_changes(ctx)
510 510 else:
511 511 sd = repo.changelog.sidedata(ctx.rev())
512 512 files_block = sd.get(sidedata.SD_FILES)
513 513 if files_block is not None:
514 514 files = metadata.decode_files_sidedata(sd)
515 515 if files is not None:
516 516 for f in sorted(files.touched):
517 517 if f in files.added:
518 518 action = b"added"
519 519 elif f in files.removed:
520 520 action = b"removed"
521 521 elif f in files.merged:
522 522 action = b"merged"
523 523 elif f in files.salvaged:
524 524 action = b"salvaged"
525 525 else:
526 526 action = b"touched"
527 527
528 528 copy_parent = b""
529 529 copy_source = b""
530 530 if f in files.copied_from_p1:
531 531 copy_parent = b"p1"
532 532 copy_source = files.copied_from_p1[f]
533 533 elif f in files.copied_from_p2:
534 534 copy_parent = b"p2"
535 535 copy_source = files.copied_from_p2[f]
536 536
537 537 data = (action, copy_parent, f, copy_source)
538 538 template = b"%-8s %2s: %s, %s;\n"
539 539 ui.write(template % data)
540 540
541 541
542 542 @command(b'debugcheckstate', [], b'')
543 543 def debugcheckstate(ui, repo):
544 544 """validate the correctness of the current dirstate"""
545 545 parent1, parent2 = repo.dirstate.parents()
546 546 m1 = repo[parent1].manifest()
547 547 m2 = repo[parent2].manifest()
548 548 errors = 0
549 549 for f in repo.dirstate:
550 550 state = repo.dirstate[f]
551 551 if state in b"nr" and f not in m1:
552 552 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
553 553 errors += 1
554 554 if state in b"a" and f in m1:
555 555 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
556 556 errors += 1
557 557 if state in b"m" and f not in m1 and f not in m2:
558 558 ui.warn(
559 559 _(b"%s in state %s, but not in either manifest\n") % (f, state)
560 560 )
561 561 errors += 1
562 562 for f in m1:
563 563 state = repo.dirstate[f]
564 564 if state not in b"nrm":
565 565 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
566 566 errors += 1
567 567 if errors:
568 568 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
569 569 raise error.Abort(errstr)
570 570
571 571
572 572 @command(
573 573 b'debugcolor',
574 574 [(b'', b'style', None, _(b'show all configured styles'))],
575 575 b'hg debugcolor',
576 576 )
577 577 def debugcolor(ui, repo, **opts):
578 578 """show available color, effects or style"""
579 579 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
580 580 if opts.get('style'):
581 581 return _debugdisplaystyle(ui)
582 582 else:
583 583 return _debugdisplaycolor(ui)
584 584
585 585
586 586 def _debugdisplaycolor(ui):
587 587 ui = ui.copy()
588 588 ui._styles.clear()
589 589 for effect in color._activeeffects(ui).keys():
590 590 ui._styles[effect] = effect
591 591 if ui._terminfoparams:
592 592 for k, v in ui.configitems(b'color'):
593 593 if k.startswith(b'color.'):
594 594 ui._styles[k] = k[6:]
595 595 elif k.startswith(b'terminfo.'):
596 596 ui._styles[k] = k[9:]
597 597 ui.write(_(b'available colors:\n'))
598 598 # sort label with a '_' after the other to group '_background' entry.
599 599 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
600 600 for colorname, label in items:
601 601 ui.write(b'%s\n' % colorname, label=label)
602 602
603 603
604 604 def _debugdisplaystyle(ui):
605 605 ui.write(_(b'available style:\n'))
606 606 if not ui._styles:
607 607 return
608 608 width = max(len(s) for s in ui._styles)
609 609 for label, effects in sorted(ui._styles.items()):
610 610 ui.write(b'%s' % label, label=label)
611 611 if effects:
612 612 # 50
613 613 ui.write(b': ')
614 614 ui.write(b' ' * (max(0, width - len(label))))
615 615 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
616 616 ui.write(b'\n')
617 617
618 618
619 619 @command(b'debugcreatestreamclonebundle', [], b'FILE')
620 620 def debugcreatestreamclonebundle(ui, repo, fname):
621 621 """create a stream clone bundle file
622 622
623 623 Stream bundles are special bundles that are essentially archives of
624 624 revlog files. They are commonly used for cloning very quickly.
625 625 """
626 626 # TODO we may want to turn this into an abort when this functionality
627 627 # is moved into `hg bundle`.
628 628 if phases.hassecret(repo):
629 629 ui.warn(
630 630 _(
631 631 b'(warning: stream clone bundle will contain secret '
632 632 b'revisions)\n'
633 633 )
634 634 )
635 635
636 636 requirements, gen = streamclone.generatebundlev1(repo)
637 637 changegroup.writechunks(ui, gen, fname)
638 638
639 639 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
640 640
641 641
642 642 @command(
643 643 b'debugdag',
644 644 [
645 645 (b't', b'tags', None, _(b'use tags as labels')),
646 646 (b'b', b'branches', None, _(b'annotate with branch names')),
647 647 (b'', b'dots', None, _(b'use dots for runs')),
648 648 (b's', b'spaces', None, _(b'separate elements by spaces')),
649 649 ],
650 650 _(b'[OPTION]... [FILE [REV]...]'),
651 651 optionalrepo=True,
652 652 )
653 653 def debugdag(ui, repo, file_=None, *revs, **opts):
654 654 """format the changelog or an index DAG as a concise textual description
655 655
656 656 If you pass a revlog index, the revlog's DAG is emitted. If you list
657 657 revision numbers, they get labeled in the output as rN.
658 658
659 659 Otherwise, the changelog DAG of the current repo is emitted.
660 660 """
661 661 spaces = opts.get('spaces')
662 662 dots = opts.get('dots')
663 663 if file_:
664 664 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
665 665 revs = {int(r) for r in revs}
666 666
667 667 def events():
668 668 for r in rlog:
669 669 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
670 670 if r in revs:
671 671 yield b'l', (r, b"r%i" % r)
672 672
673 673 elif repo:
674 674 cl = repo.changelog
675 675 tags = opts.get('tags')
676 676 branches = opts.get('branches')
677 677 if tags:
678 678 labels = {}
679 679 for l, n in repo.tags().items():
680 680 labels.setdefault(cl.rev(n), []).append(l)
681 681
682 682 def events():
683 683 b = b"default"
684 684 for r in cl:
685 685 if branches:
686 686 newb = cl.read(cl.node(r))[5][b'branch']
687 687 if newb != b:
688 688 yield b'a', newb
689 689 b = newb
690 690 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
691 691 if tags:
692 692 ls = labels.get(r)
693 693 if ls:
694 694 for l in ls:
695 695 yield b'l', (r, l)
696 696
697 697 else:
698 698 raise error.Abort(_(b'need repo for changelog dag'))
699 699
700 700 for line in dagparser.dagtextlines(
701 701 events(),
702 702 addspaces=spaces,
703 703 wraplabels=True,
704 704 wrapannotations=True,
705 705 wrapnonlinear=dots,
706 706 usedots=dots,
707 707 maxlinewidth=70,
708 708 ):
709 709 ui.write(line)
710 710 ui.write(b"\n")
711 711
712 712
713 713 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
714 714 def debugdata(ui, repo, file_, rev=None, **opts):
715 715 """dump the contents of a data file revision"""
716 716 opts = pycompat.byteskwargs(opts)
717 717 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
718 718 if rev is not None:
719 719 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
720 720 file_, rev = None, file_
721 721 elif rev is None:
722 722 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
723 723 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
724 724 try:
725 725 ui.write(r.rawdata(r.lookup(rev)))
726 726 except KeyError:
727 727 raise error.Abort(_(b'invalid revision identifier %s') % rev)
728 728
729 729
730 730 @command(
731 731 b'debugdate',
732 732 [(b'e', b'extended', None, _(b'try extended date formats'))],
733 733 _(b'[-e] DATE [RANGE]'),
734 734 norepo=True,
735 735 optionalrepo=True,
736 736 )
737 737 def debugdate(ui, date, range=None, **opts):
738 738 """parse and display a date"""
739 739 if opts["extended"]:
740 740 d = dateutil.parsedate(date, dateutil.extendeddateformats)
741 741 else:
742 742 d = dateutil.parsedate(date)
743 743 ui.writenoi18n(b"internal: %d %d\n" % d)
744 744 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
745 745 if range:
746 746 m = dateutil.matchdate(range)
747 747 ui.writenoi18n(b"match: %s\n" % m(d[0]))
748 748
749 749
750 750 @command(
751 751 b'debugdeltachain',
752 752 cmdutil.debugrevlogopts + cmdutil.formatteropts,
753 753 _(b'-c|-m|FILE'),
754 754 optionalrepo=True,
755 755 )
756 756 def debugdeltachain(ui, repo, file_=None, **opts):
757 757 """dump information about delta chains in a revlog
758 758
759 759 Output can be templatized. Available template keywords are:
760 760
761 761 :``rev``: revision number
762 762 :``chainid``: delta chain identifier (numbered by unique base)
763 763 :``chainlen``: delta chain length to this revision
764 764 :``prevrev``: previous revision in delta chain
765 765 :``deltatype``: role of delta / how it was computed
766 766 :``compsize``: compressed size of revision
767 767 :``uncompsize``: uncompressed size of revision
768 768 :``chainsize``: total size of compressed revisions in chain
769 769 :``chainratio``: total chain size divided by uncompressed revision size
770 770 (new delta chains typically start at ratio 2.00)
771 771 :``lindist``: linear distance from base revision in delta chain to end
772 772 of this revision
773 773 :``extradist``: total size of revisions not part of this delta chain from
774 774 base of delta chain to end of this revision; a measurement
775 775 of how much extra data we need to read/seek across to read
776 776 the delta chain for this revision
777 777 :``extraratio``: extradist divided by chainsize; another representation of
778 778 how much unrelated data is needed to load this delta chain
779 779
780 780 If the repository is configured to use the sparse read, additional keywords
781 781 are available:
782 782
783 783 :``readsize``: total size of data read from the disk for a revision
784 784 (sum of the sizes of all the blocks)
785 785 :``largestblock``: size of the largest block of data read from the disk
786 786 :``readdensity``: density of useful bytes in the data read from the disk
787 787 :``srchunks``: in how many data hunks the whole revision would be read
788 788
789 789 The sparse read can be enabled with experimental.sparse-read = True
790 790 """
791 791 opts = pycompat.byteskwargs(opts)
792 792 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
793 793 index = r.index
794 794 start = r.start
795 795 length = r.length
796 796 generaldelta = r.version & revlog.FLAG_GENERALDELTA
797 797 withsparseread = getattr(r, '_withsparseread', False)
798 798
799 799 def revinfo(rev):
800 800 e = index[rev]
801 801 compsize = e[1]
802 802 uncompsize = e[2]
803 803 chainsize = 0
804 804
805 805 if generaldelta:
806 806 if e[3] == e[5]:
807 807 deltatype = b'p1'
808 808 elif e[3] == e[6]:
809 809 deltatype = b'p2'
810 810 elif e[3] == rev - 1:
811 811 deltatype = b'prev'
812 812 elif e[3] == rev:
813 813 deltatype = b'base'
814 814 else:
815 815 deltatype = b'other'
816 816 else:
817 817 if e[3] == rev:
818 818 deltatype = b'base'
819 819 else:
820 820 deltatype = b'prev'
821 821
822 822 chain = r._deltachain(rev)[0]
823 823 for iterrev in chain:
824 824 e = index[iterrev]
825 825 chainsize += e[1]
826 826
827 827 return compsize, uncompsize, deltatype, chain, chainsize
828 828
829 829 fm = ui.formatter(b'debugdeltachain', opts)
830 830
831 831 fm.plain(
832 832 b' rev chain# chainlen prev delta '
833 833 b'size rawsize chainsize ratio lindist extradist '
834 834 b'extraratio'
835 835 )
836 836 if withsparseread:
837 837 fm.plain(b' readsize largestblk rddensity srchunks')
838 838 fm.plain(b'\n')
839 839
840 840 chainbases = {}
841 841 for rev in r:
842 842 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
843 843 chainbase = chain[0]
844 844 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
845 845 basestart = start(chainbase)
846 846 revstart = start(rev)
847 847 lineardist = revstart + comp - basestart
848 848 extradist = lineardist - chainsize
849 849 try:
850 850 prevrev = chain[-2]
851 851 except IndexError:
852 852 prevrev = -1
853 853
854 854 if uncomp != 0:
855 855 chainratio = float(chainsize) / float(uncomp)
856 856 else:
857 857 chainratio = chainsize
858 858
859 859 if chainsize != 0:
860 860 extraratio = float(extradist) / float(chainsize)
861 861 else:
862 862 extraratio = extradist
863 863
864 864 fm.startitem()
865 865 fm.write(
866 866 b'rev chainid chainlen prevrev deltatype compsize '
867 867 b'uncompsize chainsize chainratio lindist extradist '
868 868 b'extraratio',
869 869 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
870 870 rev,
871 871 chainid,
872 872 len(chain),
873 873 prevrev,
874 874 deltatype,
875 875 comp,
876 876 uncomp,
877 877 chainsize,
878 878 chainratio,
879 879 lineardist,
880 880 extradist,
881 881 extraratio,
882 882 rev=rev,
883 883 chainid=chainid,
884 884 chainlen=len(chain),
885 885 prevrev=prevrev,
886 886 deltatype=deltatype,
887 887 compsize=comp,
888 888 uncompsize=uncomp,
889 889 chainsize=chainsize,
890 890 chainratio=chainratio,
891 891 lindist=lineardist,
892 892 extradist=extradist,
893 893 extraratio=extraratio,
894 894 )
895 895 if withsparseread:
896 896 readsize = 0
897 897 largestblock = 0
898 898 srchunks = 0
899 899
900 900 for revschunk in deltautil.slicechunk(r, chain):
901 901 srchunks += 1
902 902 blkend = start(revschunk[-1]) + length(revschunk[-1])
903 903 blksize = blkend - start(revschunk[0])
904 904
905 905 readsize += blksize
906 906 if largestblock < blksize:
907 907 largestblock = blksize
908 908
909 909 if readsize:
910 910 readdensity = float(chainsize) / float(readsize)
911 911 else:
912 912 readdensity = 1
913 913
914 914 fm.write(
915 915 b'readsize largestblock readdensity srchunks',
916 916 b' %10d %10d %9.5f %8d',
917 917 readsize,
918 918 largestblock,
919 919 readdensity,
920 920 srchunks,
921 921 readsize=readsize,
922 922 largestblock=largestblock,
923 923 readdensity=readdensity,
924 924 srchunks=srchunks,
925 925 )
926 926
927 927 fm.plain(b'\n')
928 928
929 929 fm.end()
930 930
931 931
932 932 @command(
933 933 b'debugdirstate|debugstate',
934 934 [
935 935 (
936 936 b'',
937 937 b'nodates',
938 938 None,
939 939 _(b'do not display the saved mtime (DEPRECATED)'),
940 940 ),
941 941 (b'', b'dates', True, _(b'display the saved mtime')),
942 942 (b'', b'datesort', None, _(b'sort by saved mtime')),
943 943 ],
944 944 _(b'[OPTION]...'),
945 945 )
946 946 def debugstate(ui, repo, **opts):
947 947 """show the contents of the current dirstate"""
948 948
949 949 nodates = not opts['dates']
950 950 if opts.get('nodates') is not None:
951 951 nodates = True
952 952 datesort = opts.get('datesort')
953 953
954 954 if datesort:
955 955 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
956 956 else:
957 957 keyfunc = None # sort by filename
958 958 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
959 959 if ent[3] == -1:
960 960 timestr = b'unset '
961 961 elif nodates:
962 962 timestr = b'set '
963 963 else:
964 964 timestr = time.strftime(
965 965 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
966 966 )
967 967 timestr = encoding.strtolocal(timestr)
968 968 if ent[1] & 0o20000:
969 969 mode = b'lnk'
970 970 else:
971 971 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
972 972 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
973 973 for f in repo.dirstate.copies():
974 974 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
975 975
976 976
977 977 @command(
978 978 b'debugdiscovery',
979 979 [
980 980 (b'', b'old', None, _(b'use old-style discovery')),
981 981 (
982 982 b'',
983 983 b'nonheads',
984 984 None,
985 985 _(b'use old-style discovery with non-heads included'),
986 986 ),
987 987 (b'', b'rev', [], b'restrict discovery to this set of revs'),
988 988 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
989 989 (
990 990 b'',
991 991 b'local-as-revs',
992 "",
993 'treat local has having these revisions only',
992 b"",
993 b'treat local has having these revisions only',
994 994 ),
995 995 (
996 996 b'',
997 997 b'remote-as-revs',
998 "",
999 'use local as remote, with only these these revisions',
998 b"",
999 b'use local as remote, with only these these revisions',
1000 1000 ),
1001 1001 ]
1002 1002 + cmdutil.remoteopts
1003 1003 + cmdutil.formatteropts,
1004 1004 _(b'[--rev REV] [OTHER]'),
1005 1005 )
1006 1006 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1007 1007 """runs the changeset discovery protocol in isolation
1008 1008
1009 1009 The local peer can be "replaced" by a subset of the local repository by
1010 1010 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1011 1011 be "replaced" by a subset of the local repository using the
1012 1012 `--local-as-revs` flag. This is useful to efficiently debug pathological
1013 1013 discovery situation.
1014 1014 """
1015 1015 opts = pycompat.byteskwargs(opts)
1016 1016 unfi = repo.unfiltered()
1017 1017
1018 1018 # setup potential extra filtering
1019 1019 local_revs = opts[b"local_as_revs"]
1020 1020 remote_revs = opts[b"remote_as_revs"]
1021 1021
1022 1022 # make sure tests are repeatable
1023 1023 random.seed(int(opts[b'seed']))
1024 1024
1025 1025 if not remote_revs:
1026 1026
1027 1027 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
1028 1028 remote = hg.peer(repo, opts, remoteurl)
1029 1029 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
1030 1030 else:
1031 1031 branches = (None, [])
1032 1032 remote_filtered_revs = scmutil.revrange(
1033 1033 unfi, [b"not (::(%s))" % remote_revs]
1034 1034 )
1035 1035 remote_filtered_revs = frozenset(remote_filtered_revs)
1036 1036
1037 1037 def remote_func(x):
1038 1038 return remote_filtered_revs
1039 1039
1040 1040 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1041 1041
1042 1042 remote = repo.peer()
1043 1043 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1044 1044
1045 1045 if local_revs:
1046 1046 local_filtered_revs = scmutil.revrange(
1047 1047 unfi, [b"not (::(%s))" % local_revs]
1048 1048 )
1049 1049 local_filtered_revs = frozenset(local_filtered_revs)
1050 1050
1051 1051 def local_func(x):
1052 1052 return local_filtered_revs
1053 1053
1054 1054 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1055 1055 repo = repo.filtered(b'debug-discovery-local-filter')
1056 1056
1057 1057 data = {}
1058 1058 if opts.get(b'old'):
1059 1059
1060 1060 def doit(pushedrevs, remoteheads, remote=remote):
1061 1061 if not util.safehasattr(remote, b'branches'):
1062 1062 # enable in-client legacy support
1063 1063 remote = localrepo.locallegacypeer(remote.local())
1064 1064 common, _in, hds = treediscovery.findcommonincoming(
1065 1065 repo, remote, force=True, audit=data
1066 1066 )
1067 1067 common = set(common)
1068 1068 if not opts.get(b'nonheads'):
1069 1069 ui.writenoi18n(
1070 1070 b"unpruned common: %s\n"
1071 1071 % b" ".join(sorted(short(n) for n in common))
1072 1072 )
1073 1073
1074 1074 clnode = repo.changelog.node
1075 1075 common = repo.revs(b'heads(::%ln)', common)
1076 1076 common = {clnode(r) for r in common}
1077 1077 return common, hds
1078 1078
1079 1079 else:
1080 1080
1081 1081 def doit(pushedrevs, remoteheads, remote=remote):
1082 1082 nodes = None
1083 1083 if pushedrevs:
1084 1084 revs = scmutil.revrange(repo, pushedrevs)
1085 1085 nodes = [repo[r].node() for r in revs]
1086 1086 common, any, hds = setdiscovery.findcommonheads(
1087 1087 ui, repo, remote, ancestorsof=nodes, audit=data
1088 1088 )
1089 1089 return common, hds
1090 1090
1091 1091 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1092 1092 localrevs = opts[b'rev']
1093 1093
1094 1094 fm = ui.formatter(b'debugdiscovery', opts)
1095 1095 if fm.strict_format:
1096 1096
1097 1097 @contextlib.contextmanager
1098 1098 def may_capture_output():
1099 1099 ui.pushbuffer()
1100 1100 yield
1101 1101 data[b'output'] = ui.popbuffer()
1102 1102
1103 1103 else:
1104 1104 may_capture_output = util.nullcontextmanager
1105 1105 with may_capture_output():
1106 1106 with util.timedcm('debug-discovery') as t:
1107 1107 common, hds = doit(localrevs, remoterevs)
1108 1108
1109 1109 # compute all statistics
1110 1110 heads_common = set(common)
1111 1111 heads_remote = set(hds)
1112 1112 heads_local = set(repo.heads())
1113 1113 # note: they cannot be a local or remote head that is in common and not
1114 1114 # itself a head of common.
1115 1115 heads_common_local = heads_common & heads_local
1116 1116 heads_common_remote = heads_common & heads_remote
1117 1117 heads_common_both = heads_common & heads_remote & heads_local
1118 1118
1119 1119 all = repo.revs(b'all()')
1120 1120 common = repo.revs(b'::%ln', common)
1121 1121 roots_common = repo.revs(b'roots(::%ld)', common)
1122 1122 missing = repo.revs(b'not ::%ld', common)
1123 1123 heads_missing = repo.revs(b'heads(%ld)', missing)
1124 1124 roots_missing = repo.revs(b'roots(%ld)', missing)
1125 1125 assert len(common) + len(missing) == len(all)
1126 1126
1127 1127 initial_undecided = repo.revs(
1128 1128 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1129 1129 )
1130 1130 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1131 1131 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1132 1132 common_initial_undecided = initial_undecided & common
1133 1133 missing_initial_undecided = initial_undecided & missing
1134 1134
1135 1135 data[b'elapsed'] = t.elapsed
1136 1136 data[b'nb-common-heads'] = len(heads_common)
1137 1137 data[b'nb-common-heads-local'] = len(heads_common_local)
1138 1138 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1139 1139 data[b'nb-common-heads-both'] = len(heads_common_both)
1140 1140 data[b'nb-common-roots'] = len(roots_common)
1141 1141 data[b'nb-head-local'] = len(heads_local)
1142 1142 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1143 1143 data[b'nb-head-remote'] = len(heads_remote)
1144 1144 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1145 1145 heads_common_remote
1146 1146 )
1147 1147 data[b'nb-revs'] = len(all)
1148 1148 data[b'nb-revs-common'] = len(common)
1149 1149 data[b'nb-revs-missing'] = len(missing)
1150 1150 data[b'nb-missing-heads'] = len(heads_missing)
1151 1151 data[b'nb-missing-roots'] = len(roots_missing)
1152 1152 data[b'nb-ini_und'] = len(initial_undecided)
1153 1153 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1154 1154 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1155 1155 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1156 1156 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1157 1157
1158 1158 fm.startitem()
1159 1159 fm.data(**pycompat.strkwargs(data))
1160 1160 # display discovery summary
1161 1161 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1162 1162 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1163 1163 fm.plain(b"heads summary:\n")
1164 1164 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1165 1165 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1166 1166 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1167 1167 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1168 1168 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1169 1169 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1170 1170 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1171 1171 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1172 1172 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1173 1173 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1174 1174 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1175 1175 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1176 1176 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1177 1177 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1178 1178 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1179 1179 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1180 1180 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1181 1181 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1182 1182 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1183 1183 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1184 1184 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1185 1185 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1186 1186
1187 1187 if ui.verbose:
1188 1188 fm.plain(
1189 1189 b"common heads: %s\n"
1190 1190 % b" ".join(sorted(short(n) for n in heads_common))
1191 1191 )
1192 1192 fm.end()
1193 1193
1194 1194
1195 1195 _chunksize = 4 << 10
1196 1196
1197 1197
1198 1198 @command(
1199 1199 b'debugdownload',
1200 1200 [
1201 1201 (b'o', b'output', b'', _(b'path')),
1202 1202 ],
1203 1203 optionalrepo=True,
1204 1204 )
1205 1205 def debugdownload(ui, repo, url, output=None, **opts):
1206 1206 """download a resource using Mercurial logic and config"""
1207 1207 fh = urlmod.open(ui, url, output)
1208 1208
1209 1209 dest = ui
1210 1210 if output:
1211 1211 dest = open(output, b"wb", _chunksize)
1212 1212 try:
1213 1213 data = fh.read(_chunksize)
1214 1214 while data:
1215 1215 dest.write(data)
1216 1216 data = fh.read(_chunksize)
1217 1217 finally:
1218 1218 if output:
1219 1219 dest.close()
1220 1220
1221 1221
1222 1222 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1223 1223 def debugextensions(ui, repo, **opts):
1224 1224 '''show information about active extensions'''
1225 1225 opts = pycompat.byteskwargs(opts)
1226 1226 exts = extensions.extensions(ui)
1227 1227 hgver = util.version()
1228 1228 fm = ui.formatter(b'debugextensions', opts)
1229 1229 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1230 1230 isinternal = extensions.ismoduleinternal(extmod)
1231 1231 extsource = None
1232 1232
1233 1233 if util.safehasattr(extmod, '__file__'):
1234 1234 extsource = pycompat.fsencode(extmod.__file__)
1235 1235 elif getattr(sys, 'oxidized', False):
1236 1236 extsource = pycompat.sysexecutable
1237 1237 if isinternal:
1238 1238 exttestedwith = [] # never expose magic string to users
1239 1239 else:
1240 1240 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1241 1241 extbuglink = getattr(extmod, 'buglink', None)
1242 1242
1243 1243 fm.startitem()
1244 1244
1245 1245 if ui.quiet or ui.verbose:
1246 1246 fm.write(b'name', b'%s\n', extname)
1247 1247 else:
1248 1248 fm.write(b'name', b'%s', extname)
1249 1249 if isinternal or hgver in exttestedwith:
1250 1250 fm.plain(b'\n')
1251 1251 elif not exttestedwith:
1252 1252 fm.plain(_(b' (untested!)\n'))
1253 1253 else:
1254 1254 lasttestedversion = exttestedwith[-1]
1255 1255 fm.plain(b' (%s!)\n' % lasttestedversion)
1256 1256
1257 1257 fm.condwrite(
1258 1258 ui.verbose and extsource,
1259 1259 b'source',
1260 1260 _(b' location: %s\n'),
1261 1261 extsource or b"",
1262 1262 )
1263 1263
1264 1264 if ui.verbose:
1265 1265 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1266 1266 fm.data(bundled=isinternal)
1267 1267
1268 1268 fm.condwrite(
1269 1269 ui.verbose and exttestedwith,
1270 1270 b'testedwith',
1271 1271 _(b' tested with: %s\n'),
1272 1272 fm.formatlist(exttestedwith, name=b'ver'),
1273 1273 )
1274 1274
1275 1275 fm.condwrite(
1276 1276 ui.verbose and extbuglink,
1277 1277 b'buglink',
1278 1278 _(b' bug reporting: %s\n'),
1279 1279 extbuglink or b"",
1280 1280 )
1281 1281
1282 1282 fm.end()
1283 1283
1284 1284
1285 1285 @command(
1286 1286 b'debugfileset',
1287 1287 [
1288 1288 (
1289 1289 b'r',
1290 1290 b'rev',
1291 1291 b'',
1292 1292 _(b'apply the filespec on this revision'),
1293 1293 _(b'REV'),
1294 1294 ),
1295 1295 (
1296 1296 b'',
1297 1297 b'all-files',
1298 1298 False,
1299 1299 _(b'test files from all revisions and working directory'),
1300 1300 ),
1301 1301 (
1302 1302 b's',
1303 1303 b'show-matcher',
1304 1304 None,
1305 1305 _(b'print internal representation of matcher'),
1306 1306 ),
1307 1307 (
1308 1308 b'p',
1309 1309 b'show-stage',
1310 1310 [],
1311 1311 _(b'print parsed tree at the given stage'),
1312 1312 _(b'NAME'),
1313 1313 ),
1314 1314 ],
1315 1315 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1316 1316 )
1317 1317 def debugfileset(ui, repo, expr, **opts):
1318 1318 '''parse and apply a fileset specification'''
1319 1319 from . import fileset
1320 1320
1321 1321 fileset.symbols # force import of fileset so we have predicates to optimize
1322 1322 opts = pycompat.byteskwargs(opts)
1323 1323 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1324 1324
1325 1325 stages = [
1326 1326 (b'parsed', pycompat.identity),
1327 1327 (b'analyzed', filesetlang.analyze),
1328 1328 (b'optimized', filesetlang.optimize),
1329 1329 ]
1330 1330 stagenames = {n for n, f in stages}
1331 1331
1332 1332 showalways = set()
1333 1333 if ui.verbose and not opts[b'show_stage']:
1334 1334 # show parsed tree by --verbose (deprecated)
1335 1335 showalways.add(b'parsed')
1336 1336 if opts[b'show_stage'] == [b'all']:
1337 1337 showalways.update(stagenames)
1338 1338 else:
1339 1339 for n in opts[b'show_stage']:
1340 1340 if n not in stagenames:
1341 1341 raise error.Abort(_(b'invalid stage name: %s') % n)
1342 1342 showalways.update(opts[b'show_stage'])
1343 1343
1344 1344 tree = filesetlang.parse(expr)
1345 1345 for n, f in stages:
1346 1346 tree = f(tree)
1347 1347 if n in showalways:
1348 1348 if opts[b'show_stage'] or n != b'parsed':
1349 1349 ui.write(b"* %s:\n" % n)
1350 1350 ui.write(filesetlang.prettyformat(tree), b"\n")
1351 1351
1352 1352 files = set()
1353 1353 if opts[b'all_files']:
1354 1354 for r in repo:
1355 1355 c = repo[r]
1356 1356 files.update(c.files())
1357 1357 files.update(c.substate)
1358 1358 if opts[b'all_files'] or ctx.rev() is None:
1359 1359 wctx = repo[None]
1360 1360 files.update(
1361 1361 repo.dirstate.walk(
1362 1362 scmutil.matchall(repo),
1363 1363 subrepos=list(wctx.substate),
1364 1364 unknown=True,
1365 1365 ignored=True,
1366 1366 )
1367 1367 )
1368 1368 files.update(wctx.substate)
1369 1369 else:
1370 1370 files.update(ctx.files())
1371 1371 files.update(ctx.substate)
1372 1372
1373 1373 m = ctx.matchfileset(repo.getcwd(), expr)
1374 1374 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1375 1375 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1376 1376 for f in sorted(files):
1377 1377 if not m(f):
1378 1378 continue
1379 1379 ui.write(b"%s\n" % f)
1380 1380
1381 1381
1382 1382 @command(b'debugformat', [] + cmdutil.formatteropts)
1383 1383 def debugformat(ui, repo, **opts):
1384 1384 """display format information about the current repository
1385 1385
1386 1386 Use --verbose to get extra information about current config value and
1387 1387 Mercurial default."""
1388 1388 opts = pycompat.byteskwargs(opts)
1389 1389 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1390 1390 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1391 1391
1392 1392 def makeformatname(name):
1393 1393 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1394 1394
1395 1395 fm = ui.formatter(b'debugformat', opts)
1396 1396 if fm.isplain():
1397 1397
1398 1398 def formatvalue(value):
1399 1399 if util.safehasattr(value, b'startswith'):
1400 1400 return value
1401 1401 if value:
1402 1402 return b'yes'
1403 1403 else:
1404 1404 return b'no'
1405 1405
1406 1406 else:
1407 1407 formatvalue = pycompat.identity
1408 1408
1409 1409 fm.plain(b'format-variant')
1410 1410 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1411 1411 fm.plain(b' repo')
1412 1412 if ui.verbose:
1413 1413 fm.plain(b' config default')
1414 1414 fm.plain(b'\n')
1415 1415 for fv in upgrade.allformatvariant:
1416 1416 fm.startitem()
1417 1417 repovalue = fv.fromrepo(repo)
1418 1418 configvalue = fv.fromconfig(repo)
1419 1419
1420 1420 if repovalue != configvalue:
1421 1421 namelabel = b'formatvariant.name.mismatchconfig'
1422 1422 repolabel = b'formatvariant.repo.mismatchconfig'
1423 1423 elif repovalue != fv.default:
1424 1424 namelabel = b'formatvariant.name.mismatchdefault'
1425 1425 repolabel = b'formatvariant.repo.mismatchdefault'
1426 1426 else:
1427 1427 namelabel = b'formatvariant.name.uptodate'
1428 1428 repolabel = b'formatvariant.repo.uptodate'
1429 1429
1430 1430 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1431 1431 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1432 1432 if fv.default != configvalue:
1433 1433 configlabel = b'formatvariant.config.special'
1434 1434 else:
1435 1435 configlabel = b'formatvariant.config.default'
1436 1436 fm.condwrite(
1437 1437 ui.verbose,
1438 1438 b'config',
1439 1439 b' %6s',
1440 1440 formatvalue(configvalue),
1441 1441 label=configlabel,
1442 1442 )
1443 1443 fm.condwrite(
1444 1444 ui.verbose,
1445 1445 b'default',
1446 1446 b' %7s',
1447 1447 formatvalue(fv.default),
1448 1448 label=b'formatvariant.default',
1449 1449 )
1450 1450 fm.plain(b'\n')
1451 1451 fm.end()
1452 1452
1453 1453
1454 1454 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1455 1455 def debugfsinfo(ui, path=b"."):
1456 1456 """show information detected about current filesystem"""
1457 1457 ui.writenoi18n(b'path: %s\n' % path)
1458 1458 ui.writenoi18n(
1459 1459 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1460 1460 )
1461 1461 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1462 1462 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1463 1463 ui.writenoi18n(
1464 1464 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1465 1465 )
1466 1466 ui.writenoi18n(
1467 1467 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1468 1468 )
1469 1469 casesensitive = b'(unknown)'
1470 1470 try:
1471 1471 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1472 1472 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1473 1473 except OSError:
1474 1474 pass
1475 1475 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1476 1476
1477 1477
1478 1478 @command(
1479 1479 b'debuggetbundle',
1480 1480 [
1481 1481 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1482 1482 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1483 1483 (
1484 1484 b't',
1485 1485 b'type',
1486 1486 b'bzip2',
1487 1487 _(b'bundle compression type to use'),
1488 1488 _(b'TYPE'),
1489 1489 ),
1490 1490 ],
1491 1491 _(b'REPO FILE [-H|-C ID]...'),
1492 1492 norepo=True,
1493 1493 )
1494 1494 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1495 1495 """retrieves a bundle from a repo
1496 1496
1497 1497 Every ID must be a full-length hex node id string. Saves the bundle to the
1498 1498 given file.
1499 1499 """
1500 1500 opts = pycompat.byteskwargs(opts)
1501 1501 repo = hg.peer(ui, opts, repopath)
1502 1502 if not repo.capable(b'getbundle'):
1503 1503 raise error.Abort(b"getbundle() not supported by target repository")
1504 1504 args = {}
1505 1505 if common:
1506 1506 args['common'] = [bin(s) for s in common]
1507 1507 if head:
1508 1508 args['heads'] = [bin(s) for s in head]
1509 1509 # TODO: get desired bundlecaps from command line.
1510 1510 args['bundlecaps'] = None
1511 1511 bundle = repo.getbundle(b'debug', **args)
1512 1512
1513 1513 bundletype = opts.get(b'type', b'bzip2').lower()
1514 1514 btypes = {
1515 1515 b'none': b'HG10UN',
1516 1516 b'bzip2': b'HG10BZ',
1517 1517 b'gzip': b'HG10GZ',
1518 1518 b'bundle2': b'HG20',
1519 1519 }
1520 1520 bundletype = btypes.get(bundletype)
1521 1521 if bundletype not in bundle2.bundletypes:
1522 1522 raise error.Abort(_(b'unknown bundle type specified with --type'))
1523 1523 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1524 1524
1525 1525
1526 1526 @command(b'debugignore', [], b'[FILE]')
1527 1527 def debugignore(ui, repo, *files, **opts):
1528 1528 """display the combined ignore pattern and information about ignored files
1529 1529
1530 1530 With no argument display the combined ignore pattern.
1531 1531
1532 1532 Given space separated file names, shows if the given file is ignored and
1533 1533 if so, show the ignore rule (file and line number) that matched it.
1534 1534 """
1535 1535 ignore = repo.dirstate._ignore
1536 1536 if not files:
1537 1537 # Show all the patterns
1538 1538 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1539 1539 else:
1540 1540 m = scmutil.match(repo[None], pats=files)
1541 1541 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1542 1542 for f in m.files():
1543 1543 nf = util.normpath(f)
1544 1544 ignored = None
1545 1545 ignoredata = None
1546 1546 if nf != b'.':
1547 1547 if ignore(nf):
1548 1548 ignored = nf
1549 1549 ignoredata = repo.dirstate._ignorefileandline(nf)
1550 1550 else:
1551 1551 for p in pathutil.finddirs(nf):
1552 1552 if ignore(p):
1553 1553 ignored = p
1554 1554 ignoredata = repo.dirstate._ignorefileandline(p)
1555 1555 break
1556 1556 if ignored:
1557 1557 if ignored == nf:
1558 1558 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1559 1559 else:
1560 1560 ui.write(
1561 1561 _(
1562 1562 b"%s is ignored because of "
1563 1563 b"containing directory %s\n"
1564 1564 )
1565 1565 % (uipathfn(f), ignored)
1566 1566 )
1567 1567 ignorefile, lineno, line = ignoredata
1568 1568 ui.write(
1569 1569 _(b"(ignore rule in %s, line %d: '%s')\n")
1570 1570 % (ignorefile, lineno, line)
1571 1571 )
1572 1572 else:
1573 1573 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1574 1574
1575 1575
1576 1576 @command(
1577 1577 b'debugindex',
1578 1578 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1579 1579 _(b'-c|-m|FILE'),
1580 1580 )
1581 1581 def debugindex(ui, repo, file_=None, **opts):
1582 1582 """dump index data for a storage primitive"""
1583 1583 opts = pycompat.byteskwargs(opts)
1584 1584 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1585 1585
1586 1586 if ui.debugflag:
1587 1587 shortfn = hex
1588 1588 else:
1589 1589 shortfn = short
1590 1590
1591 1591 idlen = 12
1592 1592 for i in store:
1593 1593 idlen = len(shortfn(store.node(i)))
1594 1594 break
1595 1595
1596 1596 fm = ui.formatter(b'debugindex', opts)
1597 1597 fm.plain(
1598 1598 b' rev linkrev %s %s p2\n'
1599 1599 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1600 1600 )
1601 1601
1602 1602 for rev in store:
1603 1603 node = store.node(rev)
1604 1604 parents = store.parents(node)
1605 1605
1606 1606 fm.startitem()
1607 1607 fm.write(b'rev', b'%6d ', rev)
1608 1608 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1609 1609 fm.write(b'node', b'%s ', shortfn(node))
1610 1610 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1611 1611 fm.write(b'p2', b'%s', shortfn(parents[1]))
1612 1612 fm.plain(b'\n')
1613 1613
1614 1614 fm.end()
1615 1615
1616 1616
1617 1617 @command(
1618 1618 b'debugindexdot',
1619 1619 cmdutil.debugrevlogopts,
1620 1620 _(b'-c|-m|FILE'),
1621 1621 optionalrepo=True,
1622 1622 )
1623 1623 def debugindexdot(ui, repo, file_=None, **opts):
1624 1624 """dump an index DAG as a graphviz dot file"""
1625 1625 opts = pycompat.byteskwargs(opts)
1626 1626 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1627 1627 ui.writenoi18n(b"digraph G {\n")
1628 1628 for i in r:
1629 1629 node = r.node(i)
1630 1630 pp = r.parents(node)
1631 1631 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1632 1632 if pp[1] != nullid:
1633 1633 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1634 1634 ui.write(b"}\n")
1635 1635
1636 1636
1637 1637 @command(b'debugindexstats', [])
1638 1638 def debugindexstats(ui, repo):
1639 1639 """show stats related to the changelog index"""
1640 1640 repo.changelog.shortest(nullid, 1)
1641 1641 index = repo.changelog.index
1642 1642 if not util.safehasattr(index, b'stats'):
1643 1643 raise error.Abort(_(b'debugindexstats only works with native code'))
1644 1644 for k, v in sorted(index.stats().items()):
1645 1645 ui.write(b'%s: %d\n' % (k, v))
1646 1646
1647 1647
1648 1648 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1649 1649 def debuginstall(ui, **opts):
1650 1650 """test Mercurial installation
1651 1651
1652 1652 Returns 0 on success.
1653 1653 """
1654 1654 opts = pycompat.byteskwargs(opts)
1655 1655
1656 1656 problems = 0
1657 1657
1658 1658 fm = ui.formatter(b'debuginstall', opts)
1659 1659 fm.startitem()
1660 1660
1661 1661 # encoding might be unknown or wrong. don't translate these messages.
1662 1662 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1663 1663 err = None
1664 1664 try:
1665 1665 codecs.lookup(pycompat.sysstr(encoding.encoding))
1666 1666 except LookupError as inst:
1667 1667 err = stringutil.forcebytestr(inst)
1668 1668 problems += 1
1669 1669 fm.condwrite(
1670 1670 err,
1671 1671 b'encodingerror',
1672 1672 b" %s\n (check that your locale is properly set)\n",
1673 1673 err,
1674 1674 )
1675 1675
1676 1676 # Python
1677 1677 pythonlib = None
1678 1678 if util.safehasattr(os, '__file__'):
1679 1679 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1680 1680 elif getattr(sys, 'oxidized', False):
1681 1681 pythonlib = pycompat.sysexecutable
1682 1682
1683 1683 fm.write(
1684 1684 b'pythonexe',
1685 1685 _(b"checking Python executable (%s)\n"),
1686 1686 pycompat.sysexecutable or _(b"unknown"),
1687 1687 )
1688 1688 fm.write(
1689 1689 b'pythonimplementation',
1690 1690 _(b"checking Python implementation (%s)\n"),
1691 1691 pycompat.sysbytes(platform.python_implementation()),
1692 1692 )
1693 1693 fm.write(
1694 1694 b'pythonver',
1695 1695 _(b"checking Python version (%s)\n"),
1696 1696 (b"%d.%d.%d" % sys.version_info[:3]),
1697 1697 )
1698 1698 fm.write(
1699 1699 b'pythonlib',
1700 1700 _(b"checking Python lib (%s)...\n"),
1701 1701 pythonlib or _(b"unknown"),
1702 1702 )
1703 1703
1704 1704 try:
1705 1705 from . import rustext
1706 1706
1707 1707 rustext.__doc__ # trigger lazy import
1708 1708 except ImportError:
1709 1709 rustext = None
1710 1710
1711 1711 security = set(sslutil.supportedprotocols)
1712 1712 if sslutil.hassni:
1713 1713 security.add(b'sni')
1714 1714
1715 1715 fm.write(
1716 1716 b'pythonsecurity',
1717 1717 _(b"checking Python security support (%s)\n"),
1718 1718 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1719 1719 )
1720 1720
1721 1721 # These are warnings, not errors. So don't increment problem count. This
1722 1722 # may change in the future.
1723 1723 if b'tls1.2' not in security:
1724 1724 fm.plain(
1725 1725 _(
1726 1726 b' TLS 1.2 not supported by Python install; '
1727 1727 b'network connections lack modern security\n'
1728 1728 )
1729 1729 )
1730 1730 if b'sni' not in security:
1731 1731 fm.plain(
1732 1732 _(
1733 1733 b' SNI not supported by Python install; may have '
1734 1734 b'connectivity issues with some servers\n'
1735 1735 )
1736 1736 )
1737 1737
1738 1738 fm.plain(
1739 1739 _(
1740 1740 b"checking Rust extensions (%s)\n"
1741 1741 % (b'missing' if rustext is None else b'installed')
1742 1742 ),
1743 1743 )
1744 1744
1745 1745 # TODO print CA cert info
1746 1746
1747 1747 # hg version
1748 1748 hgver = util.version()
1749 1749 fm.write(
1750 1750 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1751 1751 )
1752 1752 fm.write(
1753 1753 b'hgverextra',
1754 1754 _(b"checking Mercurial custom build (%s)\n"),
1755 1755 b'+'.join(hgver.split(b'+')[1:]),
1756 1756 )
1757 1757
1758 1758 # compiled modules
1759 1759 hgmodules = None
1760 1760 if util.safehasattr(sys.modules[__name__], '__file__'):
1761 1761 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1762 1762 elif getattr(sys, 'oxidized', False):
1763 1763 hgmodules = pycompat.sysexecutable
1764 1764
1765 1765 fm.write(
1766 1766 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1767 1767 )
1768 1768 fm.write(
1769 1769 b'hgmodules',
1770 1770 _(b"checking installed modules (%s)...\n"),
1771 1771 hgmodules or _(b"unknown"),
1772 1772 )
1773 1773
1774 1774 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1775 1775 rustext = rustandc # for now, that's the only case
1776 1776 cext = policy.policy in (b'c', b'allow') or rustandc
1777 1777 nopure = cext or rustext
1778 1778 if nopure:
1779 1779 err = None
1780 1780 try:
1781 1781 if cext:
1782 1782 from .cext import ( # pytype: disable=import-error
1783 1783 base85,
1784 1784 bdiff,
1785 1785 mpatch,
1786 1786 osutil,
1787 1787 )
1788 1788
1789 1789 # quiet pyflakes
1790 1790 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1791 1791 if rustext:
1792 1792 from .rustext import ( # pytype: disable=import-error
1793 1793 ancestor,
1794 1794 dirstate,
1795 1795 )
1796 1796
1797 1797 dir(ancestor), dir(dirstate) # quiet pyflakes
1798 1798 except Exception as inst:
1799 1799 err = stringutil.forcebytestr(inst)
1800 1800 problems += 1
1801 1801 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1802 1802
1803 1803 compengines = util.compengines._engines.values()
1804 1804 fm.write(
1805 1805 b'compengines',
1806 1806 _(b'checking registered compression engines (%s)\n'),
1807 1807 fm.formatlist(
1808 1808 sorted(e.name() for e in compengines),
1809 1809 name=b'compengine',
1810 1810 fmt=b'%s',
1811 1811 sep=b', ',
1812 1812 ),
1813 1813 )
1814 1814 fm.write(
1815 1815 b'compenginesavail',
1816 1816 _(b'checking available compression engines (%s)\n'),
1817 1817 fm.formatlist(
1818 1818 sorted(e.name() for e in compengines if e.available()),
1819 1819 name=b'compengine',
1820 1820 fmt=b'%s',
1821 1821 sep=b', ',
1822 1822 ),
1823 1823 )
1824 1824 wirecompengines = compression.compengines.supportedwireengines(
1825 1825 compression.SERVERROLE
1826 1826 )
1827 1827 fm.write(
1828 1828 b'compenginesserver',
1829 1829 _(
1830 1830 b'checking available compression engines '
1831 1831 b'for wire protocol (%s)\n'
1832 1832 ),
1833 1833 fm.formatlist(
1834 1834 [e.name() for e in wirecompengines if e.wireprotosupport()],
1835 1835 name=b'compengine',
1836 1836 fmt=b'%s',
1837 1837 sep=b', ',
1838 1838 ),
1839 1839 )
1840 1840 re2 = b'missing'
1841 1841 if util._re2:
1842 1842 re2 = b'available'
1843 1843 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1844 1844 fm.data(re2=bool(util._re2))
1845 1845
1846 1846 # templates
1847 1847 p = templater.templatedir()
1848 1848 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1849 1849 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1850 1850 if p:
1851 1851 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1852 1852 if m:
1853 1853 # template found, check if it is working
1854 1854 err = None
1855 1855 try:
1856 1856 templater.templater.frommapfile(m)
1857 1857 except Exception as inst:
1858 1858 err = stringutil.forcebytestr(inst)
1859 1859 p = None
1860 1860 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1861 1861 else:
1862 1862 p = None
1863 1863 fm.condwrite(
1864 1864 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1865 1865 )
1866 1866 fm.condwrite(
1867 1867 not m,
1868 1868 b'defaulttemplatenotfound',
1869 1869 _(b" template '%s' not found\n"),
1870 1870 b"default",
1871 1871 )
1872 1872 if not p:
1873 1873 problems += 1
1874 1874 fm.condwrite(
1875 1875 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1876 1876 )
1877 1877
1878 1878 # editor
1879 1879 editor = ui.geteditor()
1880 1880 editor = util.expandpath(editor)
1881 1881 editorbin = procutil.shellsplit(editor)[0]
1882 1882 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1883 1883 cmdpath = procutil.findexe(editorbin)
1884 1884 fm.condwrite(
1885 1885 not cmdpath and editor == b'vi',
1886 1886 b'vinotfound',
1887 1887 _(
1888 1888 b" No commit editor set and can't find %s in PATH\n"
1889 1889 b" (specify a commit editor in your configuration"
1890 1890 b" file)\n"
1891 1891 ),
1892 1892 not cmdpath and editor == b'vi' and editorbin,
1893 1893 )
1894 1894 fm.condwrite(
1895 1895 not cmdpath and editor != b'vi',
1896 1896 b'editornotfound',
1897 1897 _(
1898 1898 b" Can't find editor '%s' in PATH\n"
1899 1899 b" (specify a commit editor in your configuration"
1900 1900 b" file)\n"
1901 1901 ),
1902 1902 not cmdpath and editorbin,
1903 1903 )
1904 1904 if not cmdpath and editor != b'vi':
1905 1905 problems += 1
1906 1906
1907 1907 # check username
1908 1908 username = None
1909 1909 err = None
1910 1910 try:
1911 1911 username = ui.username()
1912 1912 except error.Abort as e:
1913 1913 err = e.message
1914 1914 problems += 1
1915 1915
1916 1916 fm.condwrite(
1917 1917 username, b'username', _(b"checking username (%s)\n"), username
1918 1918 )
1919 1919 fm.condwrite(
1920 1920 err,
1921 1921 b'usernameerror',
1922 1922 _(
1923 1923 b"checking username...\n %s\n"
1924 1924 b" (specify a username in your configuration file)\n"
1925 1925 ),
1926 1926 err,
1927 1927 )
1928 1928
1929 1929 for name, mod in extensions.extensions():
1930 1930 handler = getattr(mod, 'debuginstall', None)
1931 1931 if handler is not None:
1932 1932 problems += handler(ui, fm)
1933 1933
1934 1934 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1935 1935 if not problems:
1936 1936 fm.data(problems=problems)
1937 1937 fm.condwrite(
1938 1938 problems,
1939 1939 b'problems',
1940 1940 _(b"%d problems detected, please check your install!\n"),
1941 1941 problems,
1942 1942 )
1943 1943 fm.end()
1944 1944
1945 1945 return problems
1946 1946
1947 1947
1948 1948 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1949 1949 def debugknown(ui, repopath, *ids, **opts):
1950 1950 """test whether node ids are known to a repo
1951 1951
1952 1952 Every ID must be a full-length hex node id string. Returns a list of 0s
1953 1953 and 1s indicating unknown/known.
1954 1954 """
1955 1955 opts = pycompat.byteskwargs(opts)
1956 1956 repo = hg.peer(ui, opts, repopath)
1957 1957 if not repo.capable(b'known'):
1958 1958 raise error.Abort(b"known() not supported by target repository")
1959 1959 flags = repo.known([bin(s) for s in ids])
1960 1960 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1961 1961
1962 1962
1963 1963 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1964 1964 def debuglabelcomplete(ui, repo, *args):
1965 1965 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1966 1966 debugnamecomplete(ui, repo, *args)
1967 1967
1968 1968
1969 1969 @command(
1970 1970 b'debuglocks',
1971 1971 [
1972 1972 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
1973 1973 (
1974 1974 b'W',
1975 1975 b'force-free-wlock',
1976 1976 None,
1977 1977 _(b'free the working state lock (DANGEROUS)'),
1978 1978 ),
1979 1979 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1980 1980 (
1981 1981 b'S',
1982 1982 b'set-wlock',
1983 1983 None,
1984 1984 _(b'set the working state lock until stopped'),
1985 1985 ),
1986 1986 ],
1987 1987 _(b'[OPTION]...'),
1988 1988 )
1989 1989 def debuglocks(ui, repo, **opts):
1990 1990 """show or modify state of locks
1991 1991
1992 1992 By default, this command will show which locks are held. This
1993 1993 includes the user and process holding the lock, the amount of time
1994 1994 the lock has been held, and the machine name where the process is
1995 1995 running if it's not local.
1996 1996
1997 1997 Locks protect the integrity of Mercurial's data, so should be
1998 1998 treated with care. System crashes or other interruptions may cause
1999 1999 locks to not be properly released, though Mercurial will usually
2000 2000 detect and remove such stale locks automatically.
2001 2001
2002 2002 However, detecting stale locks may not always be possible (for
2003 2003 instance, on a shared filesystem). Removing locks may also be
2004 2004 blocked by filesystem permissions.
2005 2005
2006 2006 Setting a lock will prevent other commands from changing the data.
2007 2007 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2008 2008 The set locks are removed when the command exits.
2009 2009
2010 2010 Returns 0 if no locks are held.
2011 2011
2012 2012 """
2013 2013
2014 2014 if opts.get('force_free_lock'):
2015 2015 repo.svfs.unlink(b'lock')
2016 2016 if opts.get('force_free_wlock'):
2017 2017 repo.vfs.unlink(b'wlock')
2018 2018 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2019 2019 return 0
2020 2020
2021 2021 locks = []
2022 2022 try:
2023 2023 if opts.get('set_wlock'):
2024 2024 try:
2025 2025 locks.append(repo.wlock(False))
2026 2026 except error.LockHeld:
2027 2027 raise error.Abort(_(b'wlock is already held'))
2028 2028 if opts.get('set_lock'):
2029 2029 try:
2030 2030 locks.append(repo.lock(False))
2031 2031 except error.LockHeld:
2032 2032 raise error.Abort(_(b'lock is already held'))
2033 2033 if len(locks):
2034 2034 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2035 2035 return 0
2036 2036 finally:
2037 2037 release(*locks)
2038 2038
2039 2039 now = time.time()
2040 2040 held = 0
2041 2041
2042 2042 def report(vfs, name, method):
2043 2043 # this causes stale locks to get reaped for more accurate reporting
2044 2044 try:
2045 2045 l = method(False)
2046 2046 except error.LockHeld:
2047 2047 l = None
2048 2048
2049 2049 if l:
2050 2050 l.release()
2051 2051 else:
2052 2052 try:
2053 2053 st = vfs.lstat(name)
2054 2054 age = now - st[stat.ST_MTIME]
2055 2055 user = util.username(st.st_uid)
2056 2056 locker = vfs.readlock(name)
2057 2057 if b":" in locker:
2058 2058 host, pid = locker.split(b':')
2059 2059 if host == socket.gethostname():
2060 2060 locker = b'user %s, process %s' % (user or b'None', pid)
2061 2061 else:
2062 2062 locker = b'user %s, process %s, host %s' % (
2063 2063 user or b'None',
2064 2064 pid,
2065 2065 host,
2066 2066 )
2067 2067 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2068 2068 return 1
2069 2069 except OSError as e:
2070 2070 if e.errno != errno.ENOENT:
2071 2071 raise
2072 2072
2073 2073 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2074 2074 return 0
2075 2075
2076 2076 held += report(repo.svfs, b"lock", repo.lock)
2077 2077 held += report(repo.vfs, b"wlock", repo.wlock)
2078 2078
2079 2079 return held
2080 2080
2081 2081
2082 2082 @command(
2083 2083 b'debugmanifestfulltextcache',
2084 2084 [
2085 2085 (b'', b'clear', False, _(b'clear the cache')),
2086 2086 (
2087 2087 b'a',
2088 2088 b'add',
2089 2089 [],
2090 2090 _(b'add the given manifest nodes to the cache'),
2091 2091 _(b'NODE'),
2092 2092 ),
2093 2093 ],
2094 2094 b'',
2095 2095 )
2096 2096 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2097 2097 """show, clear or amend the contents of the manifest fulltext cache"""
2098 2098
2099 2099 def getcache():
2100 2100 r = repo.manifestlog.getstorage(b'')
2101 2101 try:
2102 2102 return r._fulltextcache
2103 2103 except AttributeError:
2104 2104 msg = _(
2105 2105 b"Current revlog implementation doesn't appear to have a "
2106 2106 b"manifest fulltext cache\n"
2107 2107 )
2108 2108 raise error.Abort(msg)
2109 2109
2110 2110 if opts.get('clear'):
2111 2111 with repo.wlock():
2112 2112 cache = getcache()
2113 2113 cache.clear(clear_persisted_data=True)
2114 2114 return
2115 2115
2116 2116 if add:
2117 2117 with repo.wlock():
2118 2118 m = repo.manifestlog
2119 2119 store = m.getstorage(b'')
2120 2120 for n in add:
2121 2121 try:
2122 2122 manifest = m[store.lookup(n)]
2123 2123 except error.LookupError as e:
2124 2124 raise error.Abort(e, hint=b"Check your manifest node id")
2125 2125 manifest.read() # stores revisision in cache too
2126 2126 return
2127 2127
2128 2128 cache = getcache()
2129 2129 if not len(cache):
2130 2130 ui.write(_(b'cache empty\n'))
2131 2131 else:
2132 2132 ui.write(
2133 2133 _(
2134 2134 b'cache contains %d manifest entries, in order of most to '
2135 2135 b'least recent:\n'
2136 2136 )
2137 2137 % (len(cache),)
2138 2138 )
2139 2139 totalsize = 0
2140 2140 for nodeid in cache:
2141 2141 # Use cache.get to not update the LRU order
2142 2142 data = cache.peek(nodeid)
2143 2143 size = len(data)
2144 2144 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2145 2145 ui.write(
2146 2146 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2147 2147 )
2148 2148 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2149 2149 ui.write(
2150 2150 _(b'total cache data size %s, on-disk %s\n')
2151 2151 % (util.bytecount(totalsize), util.bytecount(ondisk))
2152 2152 )
2153 2153
2154 2154
2155 2155 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2156 2156 def debugmergestate(ui, repo, *args, **opts):
2157 2157 """print merge state
2158 2158
2159 2159 Use --verbose to print out information about whether v1 or v2 merge state
2160 2160 was chosen."""
2161 2161
2162 2162 if ui.verbose:
2163 2163 ms = mergestatemod.mergestate(repo)
2164 2164
2165 2165 # sort so that reasonable information is on top
2166 2166 v1records = ms._readrecordsv1()
2167 2167 v2records = ms._readrecordsv2()
2168 2168
2169 2169 if not v1records and not v2records:
2170 2170 pass
2171 2171 elif not v2records:
2172 2172 ui.writenoi18n(b'no version 2 merge state\n')
2173 2173 elif ms._v1v2match(v1records, v2records):
2174 2174 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2175 2175 else:
2176 2176 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2177 2177
2178 2178 opts = pycompat.byteskwargs(opts)
2179 2179 if not opts[b'template']:
2180 2180 opts[b'template'] = (
2181 2181 b'{if(commits, "", "no merge state found\n")}'
2182 2182 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2183 2183 b'{files % "file: {path} (state \\"{state}\\")\n'
2184 2184 b'{if(local_path, "'
2185 2185 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2186 2186 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2187 2187 b' other path: {other_path} (node {other_node})\n'
2188 2188 b'")}'
2189 2189 b'{if(rename_side, "'
2190 2190 b' rename side: {rename_side}\n'
2191 2191 b' renamed path: {renamed_path}\n'
2192 2192 b'")}'
2193 2193 b'{extras % " extra: {key} = {value}\n"}'
2194 2194 b'"}'
2195 2195 b'{extras % "extra: {file} ({key} = {value})\n"}'
2196 2196 )
2197 2197
2198 2198 ms = mergestatemod.mergestate.read(repo)
2199 2199
2200 2200 fm = ui.formatter(b'debugmergestate', opts)
2201 2201 fm.startitem()
2202 2202
2203 2203 fm_commits = fm.nested(b'commits')
2204 2204 if ms.active():
2205 2205 for name, node, label_index in (
2206 2206 (b'local', ms.local, 0),
2207 2207 (b'other', ms.other, 1),
2208 2208 ):
2209 2209 fm_commits.startitem()
2210 2210 fm_commits.data(name=name)
2211 2211 fm_commits.data(node=hex(node))
2212 2212 if ms._labels and len(ms._labels) > label_index:
2213 2213 fm_commits.data(label=ms._labels[label_index])
2214 2214 fm_commits.end()
2215 2215
2216 2216 fm_files = fm.nested(b'files')
2217 2217 if ms.active():
2218 2218 for f in ms:
2219 2219 fm_files.startitem()
2220 2220 fm_files.data(path=f)
2221 2221 state = ms._state[f]
2222 2222 fm_files.data(state=state[0])
2223 2223 if state[0] in (
2224 2224 mergestatemod.MERGE_RECORD_UNRESOLVED,
2225 2225 mergestatemod.MERGE_RECORD_RESOLVED,
2226 2226 ):
2227 2227 fm_files.data(local_key=state[1])
2228 2228 fm_files.data(local_path=state[2])
2229 2229 fm_files.data(ancestor_path=state[3])
2230 2230 fm_files.data(ancestor_node=state[4])
2231 2231 fm_files.data(other_path=state[5])
2232 2232 fm_files.data(other_node=state[6])
2233 2233 fm_files.data(local_flags=state[7])
2234 2234 elif state[0] in (
2235 2235 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2236 2236 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2237 2237 ):
2238 2238 fm_files.data(renamed_path=state[1])
2239 2239 fm_files.data(rename_side=state[2])
2240 2240 fm_extras = fm_files.nested(b'extras')
2241 2241 for k, v in sorted(ms.extras(f).items()):
2242 2242 fm_extras.startitem()
2243 2243 fm_extras.data(key=k)
2244 2244 fm_extras.data(value=v)
2245 2245 fm_extras.end()
2246 2246
2247 2247 fm_files.end()
2248 2248
2249 2249 fm_extras = fm.nested(b'extras')
2250 2250 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2251 2251 if f in ms:
2252 2252 # If file is in mergestate, we have already processed it's extras
2253 2253 continue
2254 2254 for k, v in pycompat.iteritems(d):
2255 2255 fm_extras.startitem()
2256 2256 fm_extras.data(file=f)
2257 2257 fm_extras.data(key=k)
2258 2258 fm_extras.data(value=v)
2259 2259 fm_extras.end()
2260 2260
2261 2261 fm.end()
2262 2262
2263 2263
2264 2264 @command(b'debugnamecomplete', [], _(b'NAME...'))
2265 2265 def debugnamecomplete(ui, repo, *args):
2266 2266 '''complete "names" - tags, open branch names, bookmark names'''
2267 2267
2268 2268 names = set()
2269 2269 # since we previously only listed open branches, we will handle that
2270 2270 # specially (after this for loop)
2271 2271 for name, ns in pycompat.iteritems(repo.names):
2272 2272 if name != b'branches':
2273 2273 names.update(ns.listnames(repo))
2274 2274 names.update(
2275 2275 tag
2276 2276 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2277 2277 if not closed
2278 2278 )
2279 2279 completions = set()
2280 2280 if not args:
2281 2281 args = [b'']
2282 2282 for a in args:
2283 2283 completions.update(n for n in names if n.startswith(a))
2284 2284 ui.write(b'\n'.join(sorted(completions)))
2285 2285 ui.write(b'\n')
2286 2286
2287 2287
2288 2288 @command(
2289 2289 b'debugnodemap',
2290 2290 [
2291 2291 (
2292 2292 b'',
2293 2293 b'dump-new',
2294 2294 False,
2295 2295 _(b'write a (new) persistent binary nodemap on stdout'),
2296 2296 ),
2297 2297 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2298 2298 (
2299 2299 b'',
2300 2300 b'check',
2301 2301 False,
2302 2302 _(b'check that the data on disk data are correct.'),
2303 2303 ),
2304 2304 (
2305 2305 b'',
2306 2306 b'metadata',
2307 2307 False,
2308 2308 _(b'display the on disk meta data for the nodemap'),
2309 2309 ),
2310 2310 ],
2311 2311 )
2312 2312 def debugnodemap(ui, repo, **opts):
2313 2313 """write and inspect on disk nodemap"""
2314 2314 if opts['dump_new']:
2315 2315 unfi = repo.unfiltered()
2316 2316 cl = unfi.changelog
2317 2317 if util.safehasattr(cl.index, "nodemap_data_all"):
2318 2318 data = cl.index.nodemap_data_all()
2319 2319 else:
2320 2320 data = nodemap.persistent_data(cl.index)
2321 2321 ui.write(data)
2322 2322 elif opts['dump_disk']:
2323 2323 unfi = repo.unfiltered()
2324 2324 cl = unfi.changelog
2325 2325 nm_data = nodemap.persisted_data(cl)
2326 2326 if nm_data is not None:
2327 2327 docket, data = nm_data
2328 2328 ui.write(data[:])
2329 2329 elif opts['check']:
2330 2330 unfi = repo.unfiltered()
2331 2331 cl = unfi.changelog
2332 2332 nm_data = nodemap.persisted_data(cl)
2333 2333 if nm_data is not None:
2334 2334 docket, data = nm_data
2335 2335 return nodemap.check_data(ui, cl.index, data)
2336 2336 elif opts['metadata']:
2337 2337 unfi = repo.unfiltered()
2338 2338 cl = unfi.changelog
2339 2339 nm_data = nodemap.persisted_data(cl)
2340 2340 if nm_data is not None:
2341 2341 docket, data = nm_data
2342 2342 ui.write((b"uid: %s\n") % docket.uid)
2343 2343 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2344 2344 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2345 2345 ui.write((b"data-length: %d\n") % docket.data_length)
2346 2346 ui.write((b"data-unused: %d\n") % docket.data_unused)
2347 2347 unused_perc = docket.data_unused * 100.0 / docket.data_length
2348 2348 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2349 2349
2350 2350
2351 2351 @command(
2352 2352 b'debugobsolete',
2353 2353 [
2354 2354 (b'', b'flags', 0, _(b'markers flag')),
2355 2355 (
2356 2356 b'',
2357 2357 b'record-parents',
2358 2358 False,
2359 2359 _(b'record parent information for the precursor'),
2360 2360 ),
2361 2361 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2362 2362 (
2363 2363 b'',
2364 2364 b'exclusive',
2365 2365 False,
2366 2366 _(b'restrict display to markers only relevant to REV'),
2367 2367 ),
2368 2368 (b'', b'index', False, _(b'display index of the marker')),
2369 2369 (b'', b'delete', [], _(b'delete markers specified by indices')),
2370 2370 ]
2371 2371 + cmdutil.commitopts2
2372 2372 + cmdutil.formatteropts,
2373 2373 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2374 2374 )
2375 2375 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2376 2376 """create arbitrary obsolete marker
2377 2377
2378 2378 With no arguments, displays the list of obsolescence markers."""
2379 2379
2380 2380 opts = pycompat.byteskwargs(opts)
2381 2381
2382 2382 def parsenodeid(s):
2383 2383 try:
2384 2384 # We do not use revsingle/revrange functions here to accept
2385 2385 # arbitrary node identifiers, possibly not present in the
2386 2386 # local repository.
2387 2387 n = bin(s)
2388 2388 if len(n) != len(nullid):
2389 2389 raise TypeError()
2390 2390 return n
2391 2391 except TypeError:
2392 2392 raise error.InputError(
2393 2393 b'changeset references must be full hexadecimal '
2394 2394 b'node identifiers'
2395 2395 )
2396 2396
2397 2397 if opts.get(b'delete'):
2398 2398 indices = []
2399 2399 for v in opts.get(b'delete'):
2400 2400 try:
2401 2401 indices.append(int(v))
2402 2402 except ValueError:
2403 2403 raise error.InputError(
2404 2404 _(b'invalid index value: %r') % v,
2405 2405 hint=_(b'use integers for indices'),
2406 2406 )
2407 2407
2408 2408 if repo.currenttransaction():
2409 2409 raise error.Abort(
2410 2410 _(b'cannot delete obsmarkers in the middle of transaction.')
2411 2411 )
2412 2412
2413 2413 with repo.lock():
2414 2414 n = repair.deleteobsmarkers(repo.obsstore, indices)
2415 2415 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2416 2416
2417 2417 return
2418 2418
2419 2419 if precursor is not None:
2420 2420 if opts[b'rev']:
2421 2421 raise error.InputError(
2422 2422 b'cannot select revision when creating marker'
2423 2423 )
2424 2424 metadata = {}
2425 2425 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2426 2426 succs = tuple(parsenodeid(succ) for succ in successors)
2427 2427 l = repo.lock()
2428 2428 try:
2429 2429 tr = repo.transaction(b'debugobsolete')
2430 2430 try:
2431 2431 date = opts.get(b'date')
2432 2432 if date:
2433 2433 date = dateutil.parsedate(date)
2434 2434 else:
2435 2435 date = None
2436 2436 prec = parsenodeid(precursor)
2437 2437 parents = None
2438 2438 if opts[b'record_parents']:
2439 2439 if prec not in repo.unfiltered():
2440 2440 raise error.Abort(
2441 2441 b'cannot used --record-parents on '
2442 2442 b'unknown changesets'
2443 2443 )
2444 2444 parents = repo.unfiltered()[prec].parents()
2445 2445 parents = tuple(p.node() for p in parents)
2446 2446 repo.obsstore.create(
2447 2447 tr,
2448 2448 prec,
2449 2449 succs,
2450 2450 opts[b'flags'],
2451 2451 parents=parents,
2452 2452 date=date,
2453 2453 metadata=metadata,
2454 2454 ui=ui,
2455 2455 )
2456 2456 tr.close()
2457 2457 except ValueError as exc:
2458 2458 raise error.Abort(
2459 2459 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2460 2460 )
2461 2461 finally:
2462 2462 tr.release()
2463 2463 finally:
2464 2464 l.release()
2465 2465 else:
2466 2466 if opts[b'rev']:
2467 2467 revs = scmutil.revrange(repo, opts[b'rev'])
2468 2468 nodes = [repo[r].node() for r in revs]
2469 2469 markers = list(
2470 2470 obsutil.getmarkers(
2471 2471 repo, nodes=nodes, exclusive=opts[b'exclusive']
2472 2472 )
2473 2473 )
2474 2474 markers.sort(key=lambda x: x._data)
2475 2475 else:
2476 2476 markers = obsutil.getmarkers(repo)
2477 2477
2478 2478 markerstoiter = markers
2479 2479 isrelevant = lambda m: True
2480 2480 if opts.get(b'rev') and opts.get(b'index'):
2481 2481 markerstoiter = obsutil.getmarkers(repo)
2482 2482 markerset = set(markers)
2483 2483 isrelevant = lambda m: m in markerset
2484 2484
2485 2485 fm = ui.formatter(b'debugobsolete', opts)
2486 2486 for i, m in enumerate(markerstoiter):
2487 2487 if not isrelevant(m):
2488 2488 # marker can be irrelevant when we're iterating over a set
2489 2489 # of markers (markerstoiter) which is bigger than the set
2490 2490 # of markers we want to display (markers)
2491 2491 # this can happen if both --index and --rev options are
2492 2492 # provided and thus we need to iterate over all of the markers
2493 2493 # to get the correct indices, but only display the ones that
2494 2494 # are relevant to --rev value
2495 2495 continue
2496 2496 fm.startitem()
2497 2497 ind = i if opts.get(b'index') else None
2498 2498 cmdutil.showmarker(fm, m, index=ind)
2499 2499 fm.end()
2500 2500
2501 2501
2502 2502 @command(
2503 2503 b'debugp1copies',
2504 2504 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2505 2505 _(b'[-r REV]'),
2506 2506 )
2507 2507 def debugp1copies(ui, repo, **opts):
2508 2508 """dump copy information compared to p1"""
2509 2509
2510 2510 opts = pycompat.byteskwargs(opts)
2511 2511 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2512 2512 for dst, src in ctx.p1copies().items():
2513 2513 ui.write(b'%s -> %s\n' % (src, dst))
2514 2514
2515 2515
2516 2516 @command(
2517 2517 b'debugp2copies',
2518 2518 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2519 2519 _(b'[-r REV]'),
2520 2520 )
2521 2521 def debugp1copies(ui, repo, **opts):
2522 2522 """dump copy information compared to p2"""
2523 2523
2524 2524 opts = pycompat.byteskwargs(opts)
2525 2525 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2526 2526 for dst, src in ctx.p2copies().items():
2527 2527 ui.write(b'%s -> %s\n' % (src, dst))
2528 2528
2529 2529
2530 2530 @command(
2531 2531 b'debugpathcomplete',
2532 2532 [
2533 2533 (b'f', b'full', None, _(b'complete an entire path')),
2534 2534 (b'n', b'normal', None, _(b'show only normal files')),
2535 2535 (b'a', b'added', None, _(b'show only added files')),
2536 2536 (b'r', b'removed', None, _(b'show only removed files')),
2537 2537 ],
2538 2538 _(b'FILESPEC...'),
2539 2539 )
2540 2540 def debugpathcomplete(ui, repo, *specs, **opts):
2541 2541 """complete part or all of a tracked path
2542 2542
2543 2543 This command supports shells that offer path name completion. It
2544 2544 currently completes only files already known to the dirstate.
2545 2545
2546 2546 Completion extends only to the next path segment unless
2547 2547 --full is specified, in which case entire paths are used."""
2548 2548
2549 2549 def complete(path, acceptable):
2550 2550 dirstate = repo.dirstate
2551 2551 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2552 2552 rootdir = repo.root + pycompat.ossep
2553 2553 if spec != repo.root and not spec.startswith(rootdir):
2554 2554 return [], []
2555 2555 if os.path.isdir(spec):
2556 2556 spec += b'/'
2557 2557 spec = spec[len(rootdir) :]
2558 2558 fixpaths = pycompat.ossep != b'/'
2559 2559 if fixpaths:
2560 2560 spec = spec.replace(pycompat.ossep, b'/')
2561 2561 speclen = len(spec)
2562 2562 fullpaths = opts['full']
2563 2563 files, dirs = set(), set()
2564 2564 adddir, addfile = dirs.add, files.add
2565 2565 for f, st in pycompat.iteritems(dirstate):
2566 2566 if f.startswith(spec) and st[0] in acceptable:
2567 2567 if fixpaths:
2568 2568 f = f.replace(b'/', pycompat.ossep)
2569 2569 if fullpaths:
2570 2570 addfile(f)
2571 2571 continue
2572 2572 s = f.find(pycompat.ossep, speclen)
2573 2573 if s >= 0:
2574 2574 adddir(f[:s])
2575 2575 else:
2576 2576 addfile(f)
2577 2577 return files, dirs
2578 2578
2579 2579 acceptable = b''
2580 2580 if opts['normal']:
2581 2581 acceptable += b'nm'
2582 2582 if opts['added']:
2583 2583 acceptable += b'a'
2584 2584 if opts['removed']:
2585 2585 acceptable += b'r'
2586 2586 cwd = repo.getcwd()
2587 2587 if not specs:
2588 2588 specs = [b'.']
2589 2589
2590 2590 files, dirs = set(), set()
2591 2591 for spec in specs:
2592 2592 f, d = complete(spec, acceptable or b'nmar')
2593 2593 files.update(f)
2594 2594 dirs.update(d)
2595 2595 files.update(dirs)
2596 2596 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2597 2597 ui.write(b'\n')
2598 2598
2599 2599
2600 2600 @command(
2601 2601 b'debugpathcopies',
2602 2602 cmdutil.walkopts,
2603 2603 b'hg debugpathcopies REV1 REV2 [FILE]',
2604 2604 inferrepo=True,
2605 2605 )
2606 2606 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2607 2607 """show copies between two revisions"""
2608 2608 ctx1 = scmutil.revsingle(repo, rev1)
2609 2609 ctx2 = scmutil.revsingle(repo, rev2)
2610 2610 m = scmutil.match(ctx1, pats, opts)
2611 2611 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2612 2612 ui.write(b'%s -> %s\n' % (src, dst))
2613 2613
2614 2614
2615 2615 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2616 2616 def debugpeer(ui, path):
2617 2617 """establish a connection to a peer repository"""
2618 2618 # Always enable peer request logging. Requires --debug to display
2619 2619 # though.
2620 2620 overrides = {
2621 2621 (b'devel', b'debug.peer-request'): True,
2622 2622 }
2623 2623
2624 2624 with ui.configoverride(overrides):
2625 2625 peer = hg.peer(ui, {}, path)
2626 2626
2627 2627 try:
2628 2628 local = peer.local() is not None
2629 2629 canpush = peer.canpush()
2630 2630
2631 2631 ui.write(_(b'url: %s\n') % peer.url())
2632 2632 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2633 2633 ui.write(
2634 2634 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2635 2635 )
2636 2636 finally:
2637 2637 peer.close()
2638 2638
2639 2639
2640 2640 @command(
2641 2641 b'debugpickmergetool',
2642 2642 [
2643 2643 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2644 2644 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2645 2645 ]
2646 2646 + cmdutil.walkopts
2647 2647 + cmdutil.mergetoolopts,
2648 2648 _(b'[PATTERN]...'),
2649 2649 inferrepo=True,
2650 2650 )
2651 2651 def debugpickmergetool(ui, repo, *pats, **opts):
2652 2652 """examine which merge tool is chosen for specified file
2653 2653
2654 2654 As described in :hg:`help merge-tools`, Mercurial examines
2655 2655 configurations below in this order to decide which merge tool is
2656 2656 chosen for specified file.
2657 2657
2658 2658 1. ``--tool`` option
2659 2659 2. ``HGMERGE`` environment variable
2660 2660 3. configurations in ``merge-patterns`` section
2661 2661 4. configuration of ``ui.merge``
2662 2662 5. configurations in ``merge-tools`` section
2663 2663 6. ``hgmerge`` tool (for historical reason only)
2664 2664 7. default tool for fallback (``:merge`` or ``:prompt``)
2665 2665
2666 2666 This command writes out examination result in the style below::
2667 2667
2668 2668 FILE = MERGETOOL
2669 2669
2670 2670 By default, all files known in the first parent context of the
2671 2671 working directory are examined. Use file patterns and/or -I/-X
2672 2672 options to limit target files. -r/--rev is also useful to examine
2673 2673 files in another context without actual updating to it.
2674 2674
2675 2675 With --debug, this command shows warning messages while matching
2676 2676 against ``merge-patterns`` and so on, too. It is recommended to
2677 2677 use this option with explicit file patterns and/or -I/-X options,
2678 2678 because this option increases amount of output per file according
2679 2679 to configurations in hgrc.
2680 2680
2681 2681 With -v/--verbose, this command shows configurations below at
2682 2682 first (only if specified).
2683 2683
2684 2684 - ``--tool`` option
2685 2685 - ``HGMERGE`` environment variable
2686 2686 - configuration of ``ui.merge``
2687 2687
2688 2688 If merge tool is chosen before matching against
2689 2689 ``merge-patterns``, this command can't show any helpful
2690 2690 information, even with --debug. In such case, information above is
2691 2691 useful to know why a merge tool is chosen.
2692 2692 """
2693 2693 opts = pycompat.byteskwargs(opts)
2694 2694 overrides = {}
2695 2695 if opts[b'tool']:
2696 2696 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2697 2697 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2698 2698
2699 2699 with ui.configoverride(overrides, b'debugmergepatterns'):
2700 2700 hgmerge = encoding.environ.get(b"HGMERGE")
2701 2701 if hgmerge is not None:
2702 2702 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2703 2703 uimerge = ui.config(b"ui", b"merge")
2704 2704 if uimerge:
2705 2705 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2706 2706
2707 2707 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2708 2708 m = scmutil.match(ctx, pats, opts)
2709 2709 changedelete = opts[b'changedelete']
2710 2710 for path in ctx.walk(m):
2711 2711 fctx = ctx[path]
2712 2712 try:
2713 2713 if not ui.debugflag:
2714 2714 ui.pushbuffer(error=True)
2715 2715 tool, toolpath = filemerge._picktool(
2716 2716 repo,
2717 2717 ui,
2718 2718 path,
2719 2719 fctx.isbinary(),
2720 2720 b'l' in fctx.flags(),
2721 2721 changedelete,
2722 2722 )
2723 2723 finally:
2724 2724 if not ui.debugflag:
2725 2725 ui.popbuffer()
2726 2726 ui.write(b'%s = %s\n' % (path, tool))
2727 2727
2728 2728
2729 2729 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2730 2730 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2731 2731 """access the pushkey key/value protocol
2732 2732
2733 2733 With two args, list the keys in the given namespace.
2734 2734
2735 2735 With five args, set a key to new if it currently is set to old.
2736 2736 Reports success or failure.
2737 2737 """
2738 2738
2739 2739 target = hg.peer(ui, {}, repopath)
2740 2740 try:
2741 2741 if keyinfo:
2742 2742 key, old, new = keyinfo
2743 2743 with target.commandexecutor() as e:
2744 2744 r = e.callcommand(
2745 2745 b'pushkey',
2746 2746 {
2747 2747 b'namespace': namespace,
2748 2748 b'key': key,
2749 2749 b'old': old,
2750 2750 b'new': new,
2751 2751 },
2752 2752 ).result()
2753 2753
2754 2754 ui.status(pycompat.bytestr(r) + b'\n')
2755 2755 return not r
2756 2756 else:
2757 2757 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2758 2758 ui.write(
2759 2759 b"%s\t%s\n"
2760 2760 % (stringutil.escapestr(k), stringutil.escapestr(v))
2761 2761 )
2762 2762 finally:
2763 2763 target.close()
2764 2764
2765 2765
2766 2766 @command(b'debugpvec', [], _(b'A B'))
2767 2767 def debugpvec(ui, repo, a, b=None):
2768 2768 ca = scmutil.revsingle(repo, a)
2769 2769 cb = scmutil.revsingle(repo, b)
2770 2770 pa = pvec.ctxpvec(ca)
2771 2771 pb = pvec.ctxpvec(cb)
2772 2772 if pa == pb:
2773 2773 rel = b"="
2774 2774 elif pa > pb:
2775 2775 rel = b">"
2776 2776 elif pa < pb:
2777 2777 rel = b"<"
2778 2778 elif pa | pb:
2779 2779 rel = b"|"
2780 2780 ui.write(_(b"a: %s\n") % pa)
2781 2781 ui.write(_(b"b: %s\n") % pb)
2782 2782 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2783 2783 ui.write(
2784 2784 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2785 2785 % (
2786 2786 abs(pa._depth - pb._depth),
2787 2787 pvec._hamming(pa._vec, pb._vec),
2788 2788 pa.distance(pb),
2789 2789 rel,
2790 2790 )
2791 2791 )
2792 2792
2793 2793
2794 2794 @command(
2795 2795 b'debugrebuilddirstate|debugrebuildstate',
2796 2796 [
2797 2797 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2798 2798 (
2799 2799 b'',
2800 2800 b'minimal',
2801 2801 None,
2802 2802 _(
2803 2803 b'only rebuild files that are inconsistent with '
2804 2804 b'the working copy parent'
2805 2805 ),
2806 2806 ),
2807 2807 ],
2808 2808 _(b'[-r REV]'),
2809 2809 )
2810 2810 def debugrebuilddirstate(ui, repo, rev, **opts):
2811 2811 """rebuild the dirstate as it would look like for the given revision
2812 2812
2813 2813 If no revision is specified the first current parent will be used.
2814 2814
2815 2815 The dirstate will be set to the files of the given revision.
2816 2816 The actual working directory content or existing dirstate
2817 2817 information such as adds or removes is not considered.
2818 2818
2819 2819 ``minimal`` will only rebuild the dirstate status for files that claim to be
2820 2820 tracked but are not in the parent manifest, or that exist in the parent
2821 2821 manifest but are not in the dirstate. It will not change adds, removes, or
2822 2822 modified files that are in the working copy parent.
2823 2823
2824 2824 One use of this command is to make the next :hg:`status` invocation
2825 2825 check the actual file content.
2826 2826 """
2827 2827 ctx = scmutil.revsingle(repo, rev)
2828 2828 with repo.wlock():
2829 2829 dirstate = repo.dirstate
2830 2830 changedfiles = None
2831 2831 # See command doc for what minimal does.
2832 2832 if opts.get('minimal'):
2833 2833 manifestfiles = set(ctx.manifest().keys())
2834 2834 dirstatefiles = set(dirstate)
2835 2835 manifestonly = manifestfiles - dirstatefiles
2836 2836 dsonly = dirstatefiles - manifestfiles
2837 2837 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2838 2838 changedfiles = manifestonly | dsnotadded
2839 2839
2840 2840 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2841 2841
2842 2842
2843 2843 @command(b'debugrebuildfncache', [], b'')
2844 2844 def debugrebuildfncache(ui, repo):
2845 2845 """rebuild the fncache file"""
2846 2846 repair.rebuildfncache(ui, repo)
2847 2847
2848 2848
2849 2849 @command(
2850 2850 b'debugrename',
2851 2851 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2852 2852 _(b'[-r REV] [FILE]...'),
2853 2853 )
2854 2854 def debugrename(ui, repo, *pats, **opts):
2855 2855 """dump rename information"""
2856 2856
2857 2857 opts = pycompat.byteskwargs(opts)
2858 2858 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2859 2859 m = scmutil.match(ctx, pats, opts)
2860 2860 for abs in ctx.walk(m):
2861 2861 fctx = ctx[abs]
2862 2862 o = fctx.filelog().renamed(fctx.filenode())
2863 2863 rel = repo.pathto(abs)
2864 2864 if o:
2865 2865 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2866 2866 else:
2867 2867 ui.write(_(b"%s not renamed\n") % rel)
2868 2868
2869 2869
2870 2870 @command(b'debugrequires|debugrequirements', [], b'')
2871 2871 def debugrequirements(ui, repo):
2872 2872 """ print the current repo requirements """
2873 2873 for r in sorted(repo.requirements):
2874 2874 ui.write(b"%s\n" % r)
2875 2875
2876 2876
2877 2877 @command(
2878 2878 b'debugrevlog',
2879 2879 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2880 2880 _(b'-c|-m|FILE'),
2881 2881 optionalrepo=True,
2882 2882 )
2883 2883 def debugrevlog(ui, repo, file_=None, **opts):
2884 2884 """show data and statistics about a revlog"""
2885 2885 opts = pycompat.byteskwargs(opts)
2886 2886 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2887 2887
2888 2888 if opts.get(b"dump"):
2889 2889 numrevs = len(r)
2890 2890 ui.write(
2891 2891 (
2892 2892 b"# rev p1rev p2rev start end deltastart base p1 p2"
2893 2893 b" rawsize totalsize compression heads chainlen\n"
2894 2894 )
2895 2895 )
2896 2896 ts = 0
2897 2897 heads = set()
2898 2898
2899 2899 for rev in pycompat.xrange(numrevs):
2900 2900 dbase = r.deltaparent(rev)
2901 2901 if dbase == -1:
2902 2902 dbase = rev
2903 2903 cbase = r.chainbase(rev)
2904 2904 clen = r.chainlen(rev)
2905 2905 p1, p2 = r.parentrevs(rev)
2906 2906 rs = r.rawsize(rev)
2907 2907 ts = ts + rs
2908 2908 heads -= set(r.parentrevs(rev))
2909 2909 heads.add(rev)
2910 2910 try:
2911 2911 compression = ts / r.end(rev)
2912 2912 except ZeroDivisionError:
2913 2913 compression = 0
2914 2914 ui.write(
2915 2915 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2916 2916 b"%11d %5d %8d\n"
2917 2917 % (
2918 2918 rev,
2919 2919 p1,
2920 2920 p2,
2921 2921 r.start(rev),
2922 2922 r.end(rev),
2923 2923 r.start(dbase),
2924 2924 r.start(cbase),
2925 2925 r.start(p1),
2926 2926 r.start(p2),
2927 2927 rs,
2928 2928 ts,
2929 2929 compression,
2930 2930 len(heads),
2931 2931 clen,
2932 2932 )
2933 2933 )
2934 2934 return 0
2935 2935
2936 2936 v = r.version
2937 2937 format = v & 0xFFFF
2938 2938 flags = []
2939 2939 gdelta = False
2940 2940 if v & revlog.FLAG_INLINE_DATA:
2941 2941 flags.append(b'inline')
2942 2942 if v & revlog.FLAG_GENERALDELTA:
2943 2943 gdelta = True
2944 2944 flags.append(b'generaldelta')
2945 2945 if not flags:
2946 2946 flags = [b'(none)']
2947 2947
2948 2948 ### tracks merge vs single parent
2949 2949 nummerges = 0
2950 2950
2951 2951 ### tracks ways the "delta" are build
2952 2952 # nodelta
2953 2953 numempty = 0
2954 2954 numemptytext = 0
2955 2955 numemptydelta = 0
2956 2956 # full file content
2957 2957 numfull = 0
2958 2958 # intermediate snapshot against a prior snapshot
2959 2959 numsemi = 0
2960 2960 # snapshot count per depth
2961 2961 numsnapdepth = collections.defaultdict(lambda: 0)
2962 2962 # delta against previous revision
2963 2963 numprev = 0
2964 2964 # delta against first or second parent (not prev)
2965 2965 nump1 = 0
2966 2966 nump2 = 0
2967 2967 # delta against neither prev nor parents
2968 2968 numother = 0
2969 2969 # delta against prev that are also first or second parent
2970 2970 # (details of `numprev`)
2971 2971 nump1prev = 0
2972 2972 nump2prev = 0
2973 2973
2974 2974 # data about delta chain of each revs
2975 2975 chainlengths = []
2976 2976 chainbases = []
2977 2977 chainspans = []
2978 2978
2979 2979 # data about each revision
2980 2980 datasize = [None, 0, 0]
2981 2981 fullsize = [None, 0, 0]
2982 2982 semisize = [None, 0, 0]
2983 2983 # snapshot count per depth
2984 2984 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2985 2985 deltasize = [None, 0, 0]
2986 2986 chunktypecounts = {}
2987 2987 chunktypesizes = {}
2988 2988
2989 2989 def addsize(size, l):
2990 2990 if l[0] is None or size < l[0]:
2991 2991 l[0] = size
2992 2992 if size > l[1]:
2993 2993 l[1] = size
2994 2994 l[2] += size
2995 2995
2996 2996 numrevs = len(r)
2997 2997 for rev in pycompat.xrange(numrevs):
2998 2998 p1, p2 = r.parentrevs(rev)
2999 2999 delta = r.deltaparent(rev)
3000 3000 if format > 0:
3001 3001 addsize(r.rawsize(rev), datasize)
3002 3002 if p2 != nullrev:
3003 3003 nummerges += 1
3004 3004 size = r.length(rev)
3005 3005 if delta == nullrev:
3006 3006 chainlengths.append(0)
3007 3007 chainbases.append(r.start(rev))
3008 3008 chainspans.append(size)
3009 3009 if size == 0:
3010 3010 numempty += 1
3011 3011 numemptytext += 1
3012 3012 else:
3013 3013 numfull += 1
3014 3014 numsnapdepth[0] += 1
3015 3015 addsize(size, fullsize)
3016 3016 addsize(size, snapsizedepth[0])
3017 3017 else:
3018 3018 chainlengths.append(chainlengths[delta] + 1)
3019 3019 baseaddr = chainbases[delta]
3020 3020 revaddr = r.start(rev)
3021 3021 chainbases.append(baseaddr)
3022 3022 chainspans.append((revaddr - baseaddr) + size)
3023 3023 if size == 0:
3024 3024 numempty += 1
3025 3025 numemptydelta += 1
3026 3026 elif r.issnapshot(rev):
3027 3027 addsize(size, semisize)
3028 3028 numsemi += 1
3029 3029 depth = r.snapshotdepth(rev)
3030 3030 numsnapdepth[depth] += 1
3031 3031 addsize(size, snapsizedepth[depth])
3032 3032 else:
3033 3033 addsize(size, deltasize)
3034 3034 if delta == rev - 1:
3035 3035 numprev += 1
3036 3036 if delta == p1:
3037 3037 nump1prev += 1
3038 3038 elif delta == p2:
3039 3039 nump2prev += 1
3040 3040 elif delta == p1:
3041 3041 nump1 += 1
3042 3042 elif delta == p2:
3043 3043 nump2 += 1
3044 3044 elif delta != nullrev:
3045 3045 numother += 1
3046 3046
3047 3047 # Obtain data on the raw chunks in the revlog.
3048 3048 if util.safehasattr(r, b'_getsegmentforrevs'):
3049 3049 segment = r._getsegmentforrevs(rev, rev)[1]
3050 3050 else:
3051 3051 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3052 3052 if segment:
3053 3053 chunktype = bytes(segment[0:1])
3054 3054 else:
3055 3055 chunktype = b'empty'
3056 3056
3057 3057 if chunktype not in chunktypecounts:
3058 3058 chunktypecounts[chunktype] = 0
3059 3059 chunktypesizes[chunktype] = 0
3060 3060
3061 3061 chunktypecounts[chunktype] += 1
3062 3062 chunktypesizes[chunktype] += size
3063 3063
3064 3064 # Adjust size min value for empty cases
3065 3065 for size in (datasize, fullsize, semisize, deltasize):
3066 3066 if size[0] is None:
3067 3067 size[0] = 0
3068 3068
3069 3069 numdeltas = numrevs - numfull - numempty - numsemi
3070 3070 numoprev = numprev - nump1prev - nump2prev
3071 3071 totalrawsize = datasize[2]
3072 3072 datasize[2] /= numrevs
3073 3073 fulltotal = fullsize[2]
3074 3074 if numfull == 0:
3075 3075 fullsize[2] = 0
3076 3076 else:
3077 3077 fullsize[2] /= numfull
3078 3078 semitotal = semisize[2]
3079 3079 snaptotal = {}
3080 3080 if numsemi > 0:
3081 3081 semisize[2] /= numsemi
3082 3082 for depth in snapsizedepth:
3083 3083 snaptotal[depth] = snapsizedepth[depth][2]
3084 3084 snapsizedepth[depth][2] /= numsnapdepth[depth]
3085 3085
3086 3086 deltatotal = deltasize[2]
3087 3087 if numdeltas > 0:
3088 3088 deltasize[2] /= numdeltas
3089 3089 totalsize = fulltotal + semitotal + deltatotal
3090 3090 avgchainlen = sum(chainlengths) / numrevs
3091 3091 maxchainlen = max(chainlengths)
3092 3092 maxchainspan = max(chainspans)
3093 3093 compratio = 1
3094 3094 if totalsize:
3095 3095 compratio = totalrawsize / totalsize
3096 3096
3097 3097 basedfmtstr = b'%%%dd\n'
3098 3098 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3099 3099
3100 3100 def dfmtstr(max):
3101 3101 return basedfmtstr % len(str(max))
3102 3102
3103 3103 def pcfmtstr(max, padding=0):
3104 3104 return basepcfmtstr % (len(str(max)), b' ' * padding)
3105 3105
3106 3106 def pcfmt(value, total):
3107 3107 if total:
3108 3108 return (value, 100 * float(value) / total)
3109 3109 else:
3110 3110 return value, 100.0
3111 3111
3112 3112 ui.writenoi18n(b'format : %d\n' % format)
3113 3113 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3114 3114
3115 3115 ui.write(b'\n')
3116 3116 fmt = pcfmtstr(totalsize)
3117 3117 fmt2 = dfmtstr(totalsize)
3118 3118 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3119 3119 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3120 3120 ui.writenoi18n(
3121 3121 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3122 3122 )
3123 3123 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3124 3124 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3125 3125 ui.writenoi18n(
3126 3126 b' text : '
3127 3127 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3128 3128 )
3129 3129 ui.writenoi18n(
3130 3130 b' delta : '
3131 3131 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3132 3132 )
3133 3133 ui.writenoi18n(
3134 3134 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3135 3135 )
3136 3136 for depth in sorted(numsnapdepth):
3137 3137 ui.write(
3138 3138 (b' lvl-%-3d : ' % depth)
3139 3139 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3140 3140 )
3141 3141 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3142 3142 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3143 3143 ui.writenoi18n(
3144 3144 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3145 3145 )
3146 3146 for depth in sorted(numsnapdepth):
3147 3147 ui.write(
3148 3148 (b' lvl-%-3d : ' % depth)
3149 3149 + fmt % pcfmt(snaptotal[depth], totalsize)
3150 3150 )
3151 3151 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3152 3152
3153 3153 def fmtchunktype(chunktype):
3154 3154 if chunktype == b'empty':
3155 3155 return b' %s : ' % chunktype
3156 3156 elif chunktype in pycompat.bytestr(string.ascii_letters):
3157 3157 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3158 3158 else:
3159 3159 return b' 0x%s : ' % hex(chunktype)
3160 3160
3161 3161 ui.write(b'\n')
3162 3162 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3163 3163 for chunktype in sorted(chunktypecounts):
3164 3164 ui.write(fmtchunktype(chunktype))
3165 3165 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3166 3166 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3167 3167 for chunktype in sorted(chunktypecounts):
3168 3168 ui.write(fmtchunktype(chunktype))
3169 3169 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3170 3170
3171 3171 ui.write(b'\n')
3172 3172 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3173 3173 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3174 3174 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3175 3175 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3176 3176 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3177 3177
3178 3178 if format > 0:
3179 3179 ui.write(b'\n')
3180 3180 ui.writenoi18n(
3181 3181 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3182 3182 % tuple(datasize)
3183 3183 )
3184 3184 ui.writenoi18n(
3185 3185 b'full revision size (min/max/avg) : %d / %d / %d\n'
3186 3186 % tuple(fullsize)
3187 3187 )
3188 3188 ui.writenoi18n(
3189 3189 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3190 3190 % tuple(semisize)
3191 3191 )
3192 3192 for depth in sorted(snapsizedepth):
3193 3193 if depth == 0:
3194 3194 continue
3195 3195 ui.writenoi18n(
3196 3196 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3197 3197 % ((depth,) + tuple(snapsizedepth[depth]))
3198 3198 )
3199 3199 ui.writenoi18n(
3200 3200 b'delta size (min/max/avg) : %d / %d / %d\n'
3201 3201 % tuple(deltasize)
3202 3202 )
3203 3203
3204 3204 if numdeltas > 0:
3205 3205 ui.write(b'\n')
3206 3206 fmt = pcfmtstr(numdeltas)
3207 3207 fmt2 = pcfmtstr(numdeltas, 4)
3208 3208 ui.writenoi18n(
3209 3209 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3210 3210 )
3211 3211 if numprev > 0:
3212 3212 ui.writenoi18n(
3213 3213 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3214 3214 )
3215 3215 ui.writenoi18n(
3216 3216 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3217 3217 )
3218 3218 ui.writenoi18n(
3219 3219 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3220 3220 )
3221 3221 if gdelta:
3222 3222 ui.writenoi18n(
3223 3223 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3224 3224 )
3225 3225 ui.writenoi18n(
3226 3226 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3227 3227 )
3228 3228 ui.writenoi18n(
3229 3229 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3230 3230 )
3231 3231
3232 3232
3233 3233 @command(
3234 3234 b'debugrevlogindex',
3235 3235 cmdutil.debugrevlogopts
3236 3236 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3237 3237 _(b'[-f FORMAT] -c|-m|FILE'),
3238 3238 optionalrepo=True,
3239 3239 )
3240 3240 def debugrevlogindex(ui, repo, file_=None, **opts):
3241 3241 """dump the contents of a revlog index"""
3242 3242 opts = pycompat.byteskwargs(opts)
3243 3243 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3244 3244 format = opts.get(b'format', 0)
3245 3245 if format not in (0, 1):
3246 3246 raise error.Abort(_(b"unknown format %d") % format)
3247 3247
3248 3248 if ui.debugflag:
3249 3249 shortfn = hex
3250 3250 else:
3251 3251 shortfn = short
3252 3252
3253 3253 # There might not be anything in r, so have a sane default
3254 3254 idlen = 12
3255 3255 for i in r:
3256 3256 idlen = len(shortfn(r.node(i)))
3257 3257 break
3258 3258
3259 3259 if format == 0:
3260 3260 if ui.verbose:
3261 3261 ui.writenoi18n(
3262 3262 b" rev offset length linkrev %s %s p2\n"
3263 3263 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3264 3264 )
3265 3265 else:
3266 3266 ui.writenoi18n(
3267 3267 b" rev linkrev %s %s p2\n"
3268 3268 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3269 3269 )
3270 3270 elif format == 1:
3271 3271 if ui.verbose:
3272 3272 ui.writenoi18n(
3273 3273 (
3274 3274 b" rev flag offset length size link p1"
3275 3275 b" p2 %s\n"
3276 3276 )
3277 3277 % b"nodeid".rjust(idlen)
3278 3278 )
3279 3279 else:
3280 3280 ui.writenoi18n(
3281 3281 b" rev flag size link p1 p2 %s\n"
3282 3282 % b"nodeid".rjust(idlen)
3283 3283 )
3284 3284
3285 3285 for i in r:
3286 3286 node = r.node(i)
3287 3287 if format == 0:
3288 3288 try:
3289 3289 pp = r.parents(node)
3290 3290 except Exception:
3291 3291 pp = [nullid, nullid]
3292 3292 if ui.verbose:
3293 3293 ui.write(
3294 3294 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3295 3295 % (
3296 3296 i,
3297 3297 r.start(i),
3298 3298 r.length(i),
3299 3299 r.linkrev(i),
3300 3300 shortfn(node),
3301 3301 shortfn(pp[0]),
3302 3302 shortfn(pp[1]),
3303 3303 )
3304 3304 )
3305 3305 else:
3306 3306 ui.write(
3307 3307 b"% 6d % 7d %s %s %s\n"
3308 3308 % (
3309 3309 i,
3310 3310 r.linkrev(i),
3311 3311 shortfn(node),
3312 3312 shortfn(pp[0]),
3313 3313 shortfn(pp[1]),
3314 3314 )
3315 3315 )
3316 3316 elif format == 1:
3317 3317 pr = r.parentrevs(i)
3318 3318 if ui.verbose:
3319 3319 ui.write(
3320 3320 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3321 3321 % (
3322 3322 i,
3323 3323 r.flags(i),
3324 3324 r.start(i),
3325 3325 r.length(i),
3326 3326 r.rawsize(i),
3327 3327 r.linkrev(i),
3328 3328 pr[0],
3329 3329 pr[1],
3330 3330 shortfn(node),
3331 3331 )
3332 3332 )
3333 3333 else:
3334 3334 ui.write(
3335 3335 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3336 3336 % (
3337 3337 i,
3338 3338 r.flags(i),
3339 3339 r.rawsize(i),
3340 3340 r.linkrev(i),
3341 3341 pr[0],
3342 3342 pr[1],
3343 3343 shortfn(node),
3344 3344 )
3345 3345 )
3346 3346
3347 3347
3348 3348 @command(
3349 3349 b'debugrevspec',
3350 3350 [
3351 3351 (
3352 3352 b'',
3353 3353 b'optimize',
3354 3354 None,
3355 3355 _(b'print parsed tree after optimizing (DEPRECATED)'),
3356 3356 ),
3357 3357 (
3358 3358 b'',
3359 3359 b'show-revs',
3360 3360 True,
3361 3361 _(b'print list of result revisions (default)'),
3362 3362 ),
3363 3363 (
3364 3364 b's',
3365 3365 b'show-set',
3366 3366 None,
3367 3367 _(b'print internal representation of result set'),
3368 3368 ),
3369 3369 (
3370 3370 b'p',
3371 3371 b'show-stage',
3372 3372 [],
3373 3373 _(b'print parsed tree at the given stage'),
3374 3374 _(b'NAME'),
3375 3375 ),
3376 3376 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3377 3377 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3378 3378 ],
3379 3379 b'REVSPEC',
3380 3380 )
3381 3381 def debugrevspec(ui, repo, expr, **opts):
3382 3382 """parse and apply a revision specification
3383 3383
3384 3384 Use -p/--show-stage option to print the parsed tree at the given stages.
3385 3385 Use -p all to print tree at every stage.
3386 3386
3387 3387 Use --no-show-revs option with -s or -p to print only the set
3388 3388 representation or the parsed tree respectively.
3389 3389
3390 3390 Use --verify-optimized to compare the optimized result with the unoptimized
3391 3391 one. Returns 1 if the optimized result differs.
3392 3392 """
3393 3393 opts = pycompat.byteskwargs(opts)
3394 3394 aliases = ui.configitems(b'revsetalias')
3395 3395 stages = [
3396 3396 (b'parsed', lambda tree: tree),
3397 3397 (
3398 3398 b'expanded',
3399 3399 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3400 3400 ),
3401 3401 (b'concatenated', revsetlang.foldconcat),
3402 3402 (b'analyzed', revsetlang.analyze),
3403 3403 (b'optimized', revsetlang.optimize),
3404 3404 ]
3405 3405 if opts[b'no_optimized']:
3406 3406 stages = stages[:-1]
3407 3407 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3408 3408 raise error.Abort(
3409 3409 _(b'cannot use --verify-optimized with --no-optimized')
3410 3410 )
3411 3411 stagenames = {n for n, f in stages}
3412 3412
3413 3413 showalways = set()
3414 3414 showchanged = set()
3415 3415 if ui.verbose and not opts[b'show_stage']:
3416 3416 # show parsed tree by --verbose (deprecated)
3417 3417 showalways.add(b'parsed')
3418 3418 showchanged.update([b'expanded', b'concatenated'])
3419 3419 if opts[b'optimize']:
3420 3420 showalways.add(b'optimized')
3421 3421 if opts[b'show_stage'] and opts[b'optimize']:
3422 3422 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3423 3423 if opts[b'show_stage'] == [b'all']:
3424 3424 showalways.update(stagenames)
3425 3425 else:
3426 3426 for n in opts[b'show_stage']:
3427 3427 if n not in stagenames:
3428 3428 raise error.Abort(_(b'invalid stage name: %s') % n)
3429 3429 showalways.update(opts[b'show_stage'])
3430 3430
3431 3431 treebystage = {}
3432 3432 printedtree = None
3433 3433 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3434 3434 for n, f in stages:
3435 3435 treebystage[n] = tree = f(tree)
3436 3436 if n in showalways or (n in showchanged and tree != printedtree):
3437 3437 if opts[b'show_stage'] or n != b'parsed':
3438 3438 ui.write(b"* %s:\n" % n)
3439 3439 ui.write(revsetlang.prettyformat(tree), b"\n")
3440 3440 printedtree = tree
3441 3441
3442 3442 if opts[b'verify_optimized']:
3443 3443 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3444 3444 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3445 3445 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3446 3446 ui.writenoi18n(
3447 3447 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3448 3448 )
3449 3449 ui.writenoi18n(
3450 3450 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3451 3451 )
3452 3452 arevs = list(arevs)
3453 3453 brevs = list(brevs)
3454 3454 if arevs == brevs:
3455 3455 return 0
3456 3456 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3457 3457 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3458 3458 sm = difflib.SequenceMatcher(None, arevs, brevs)
3459 3459 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3460 3460 if tag in ('delete', 'replace'):
3461 3461 for c in arevs[alo:ahi]:
3462 3462 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3463 3463 if tag in ('insert', 'replace'):
3464 3464 for c in brevs[blo:bhi]:
3465 3465 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3466 3466 if tag == 'equal':
3467 3467 for c in arevs[alo:ahi]:
3468 3468 ui.write(b' %d\n' % c)
3469 3469 return 1
3470 3470
3471 3471 func = revset.makematcher(tree)
3472 3472 revs = func(repo)
3473 3473 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3474 3474 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3475 3475 if not opts[b'show_revs']:
3476 3476 return
3477 3477 for c in revs:
3478 3478 ui.write(b"%d\n" % c)
3479 3479
3480 3480
3481 3481 @command(
3482 3482 b'debugserve',
3483 3483 [
3484 3484 (
3485 3485 b'',
3486 3486 b'sshstdio',
3487 3487 False,
3488 3488 _(b'run an SSH server bound to process handles'),
3489 3489 ),
3490 3490 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3491 3491 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3492 3492 ],
3493 3493 b'',
3494 3494 )
3495 3495 def debugserve(ui, repo, **opts):
3496 3496 """run a server with advanced settings
3497 3497
3498 3498 This command is similar to :hg:`serve`. It exists partially as a
3499 3499 workaround to the fact that ``hg serve --stdio`` must have specific
3500 3500 arguments for security reasons.
3501 3501 """
3502 3502 opts = pycompat.byteskwargs(opts)
3503 3503
3504 3504 if not opts[b'sshstdio']:
3505 3505 raise error.Abort(_(b'only --sshstdio is currently supported'))
3506 3506
3507 3507 logfh = None
3508 3508
3509 3509 if opts[b'logiofd'] and opts[b'logiofile']:
3510 3510 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3511 3511
3512 3512 if opts[b'logiofd']:
3513 3513 # Ideally we would be line buffered. But line buffering in binary
3514 3514 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3515 3515 # buffering could have performance impacts. But since this isn't
3516 3516 # performance critical code, it should be fine.
3517 3517 try:
3518 3518 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3519 3519 except OSError as e:
3520 3520 if e.errno != errno.ESPIPE:
3521 3521 raise
3522 3522 # can't seek a pipe, so `ab` mode fails on py3
3523 3523 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3524 3524 elif opts[b'logiofile']:
3525 3525 logfh = open(opts[b'logiofile'], b'ab', 0)
3526 3526
3527 3527 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3528 3528 s.serve_forever()
3529 3529
3530 3530
3531 3531 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3532 3532 def debugsetparents(ui, repo, rev1, rev2=None):
3533 3533 """manually set the parents of the current working directory (DANGEROUS)
3534 3534
3535 3535 This command is not what you are looking for and should not be used. Using
3536 3536 this command will most certainly results in slight corruption of the file
3537 3537 level histories withing your repository. DO NOT USE THIS COMMAND.
3538 3538
3539 3539 The command update the p1 and p2 field in the dirstate, and not touching
3540 3540 anything else. This useful for writing repository conversion tools, but
3541 3541 should be used with extreme care. For example, neither the working
3542 3542 directory nor the dirstate is updated, so file status may be incorrect
3543 3543 after running this command. Only used if you are one of the few people that
3544 3544 deeply unstand both conversion tools and file level histories. If you are
3545 3545 reading this help, you are not one of this people (most of them sailed west
3546 3546 from Mithlond anyway.
3547 3547
3548 3548 So one last time DO NOT USE THIS COMMAND.
3549 3549
3550 3550 Returns 0 on success.
3551 3551 """
3552 3552
3553 3553 node1 = scmutil.revsingle(repo, rev1).node()
3554 3554 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3555 3555
3556 3556 with repo.wlock():
3557 3557 repo.setparents(node1, node2)
3558 3558
3559 3559
3560 3560 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3561 3561 def debugsidedata(ui, repo, file_, rev=None, **opts):
3562 3562 """dump the side data for a cl/manifest/file revision
3563 3563
3564 3564 Use --verbose to dump the sidedata content."""
3565 3565 opts = pycompat.byteskwargs(opts)
3566 3566 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3567 3567 if rev is not None:
3568 3568 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3569 3569 file_, rev = None, file_
3570 3570 elif rev is None:
3571 3571 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3572 3572 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3573 3573 r = getattr(r, '_revlog', r)
3574 3574 try:
3575 3575 sidedata = r.sidedata(r.lookup(rev))
3576 3576 except KeyError:
3577 3577 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3578 3578 if sidedata:
3579 3579 sidedata = list(sidedata.items())
3580 3580 sidedata.sort()
3581 3581 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3582 3582 for key, value in sidedata:
3583 3583 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3584 3584 if ui.verbose:
3585 3585 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3586 3586
3587 3587
3588 3588 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3589 3589 def debugssl(ui, repo, source=None, **opts):
3590 3590 """test a secure connection to a server
3591 3591
3592 3592 This builds the certificate chain for the server on Windows, installing the
3593 3593 missing intermediates and trusted root via Windows Update if necessary. It
3594 3594 does nothing on other platforms.
3595 3595
3596 3596 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3597 3597 that server is used. See :hg:`help urls` for more information.
3598 3598
3599 3599 If the update succeeds, retry the original operation. Otherwise, the cause
3600 3600 of the SSL error is likely another issue.
3601 3601 """
3602 3602 if not pycompat.iswindows:
3603 3603 raise error.Abort(
3604 3604 _(b'certificate chain building is only possible on Windows')
3605 3605 )
3606 3606
3607 3607 if not source:
3608 3608 if not repo:
3609 3609 raise error.Abort(
3610 3610 _(
3611 3611 b"there is no Mercurial repository here, and no "
3612 3612 b"server specified"
3613 3613 )
3614 3614 )
3615 3615 source = b"default"
3616 3616
3617 3617 source, branches = hg.parseurl(ui.expandpath(source))
3618 3618 url = util.url(source)
3619 3619
3620 3620 defaultport = {b'https': 443, b'ssh': 22}
3621 3621 if url.scheme in defaultport:
3622 3622 try:
3623 3623 addr = (url.host, int(url.port or defaultport[url.scheme]))
3624 3624 except ValueError:
3625 3625 raise error.Abort(_(b"malformed port number in URL"))
3626 3626 else:
3627 3627 raise error.Abort(_(b"only https and ssh connections are supported"))
3628 3628
3629 3629 from . import win32
3630 3630
3631 3631 s = ssl.wrap_socket(
3632 3632 socket.socket(),
3633 3633 ssl_version=ssl.PROTOCOL_TLS,
3634 3634 cert_reqs=ssl.CERT_NONE,
3635 3635 ca_certs=None,
3636 3636 )
3637 3637
3638 3638 try:
3639 3639 s.connect(addr)
3640 3640 cert = s.getpeercert(True)
3641 3641
3642 3642 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3643 3643
3644 3644 complete = win32.checkcertificatechain(cert, build=False)
3645 3645
3646 3646 if not complete:
3647 3647 ui.status(_(b'certificate chain is incomplete, updating... '))
3648 3648
3649 3649 if not win32.checkcertificatechain(cert):
3650 3650 ui.status(_(b'failed.\n'))
3651 3651 else:
3652 3652 ui.status(_(b'done.\n'))
3653 3653 else:
3654 3654 ui.status(_(b'full certificate chain is available\n'))
3655 3655 finally:
3656 3656 s.close()
3657 3657
3658 3658
3659 3659 @command(
3660 3660 b"debugbackupbundle",
3661 3661 [
3662 3662 (
3663 3663 b"",
3664 3664 b"recover",
3665 3665 b"",
3666 3666 b"brings the specified changeset back into the repository",
3667 3667 )
3668 3668 ]
3669 3669 + cmdutil.logopts,
3670 3670 _(b"hg debugbackupbundle [--recover HASH]"),
3671 3671 )
3672 3672 def debugbackupbundle(ui, repo, *pats, **opts):
3673 3673 """lists the changesets available in backup bundles
3674 3674
3675 3675 Without any arguments, this command prints a list of the changesets in each
3676 3676 backup bundle.
3677 3677
3678 3678 --recover takes a changeset hash and unbundles the first bundle that
3679 3679 contains that hash, which puts that changeset back in your repository.
3680 3680
3681 3681 --verbose will print the entire commit message and the bundle path for that
3682 3682 backup.
3683 3683 """
3684 3684 backups = list(
3685 3685 filter(
3686 3686 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3687 3687 )
3688 3688 )
3689 3689 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3690 3690
3691 3691 opts = pycompat.byteskwargs(opts)
3692 3692 opts[b"bundle"] = b""
3693 3693 opts[b"force"] = None
3694 3694 limit = logcmdutil.getlimit(opts)
3695 3695
3696 3696 def display(other, chlist, displayer):
3697 3697 if opts.get(b"newest_first"):
3698 3698 chlist.reverse()
3699 3699 count = 0
3700 3700 for n in chlist:
3701 3701 if limit is not None and count >= limit:
3702 3702 break
3703 3703 parents = [True for p in other.changelog.parents(n) if p != nullid]
3704 3704 if opts.get(b"no_merges") and len(parents) == 2:
3705 3705 continue
3706 3706 count += 1
3707 3707 displayer.show(other[n])
3708 3708
3709 3709 recovernode = opts.get(b"recover")
3710 3710 if recovernode:
3711 3711 if scmutil.isrevsymbol(repo, recovernode):
3712 3712 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3713 3713 return
3714 3714 elif backups:
3715 3715 msg = _(
3716 3716 b"Recover changesets using: hg debugbackupbundle --recover "
3717 3717 b"<changeset hash>\n\nAvailable backup changesets:"
3718 3718 )
3719 3719 ui.status(msg, label=b"status.removed")
3720 3720 else:
3721 3721 ui.status(_(b"no backup changesets found\n"))
3722 3722 return
3723 3723
3724 3724 for backup in backups:
3725 3725 # Much of this is copied from the hg incoming logic
3726 3726 source = ui.expandpath(os.path.relpath(backup, encoding.getcwd()))
3727 3727 source, branches = hg.parseurl(source, opts.get(b"branch"))
3728 3728 try:
3729 3729 other = hg.peer(repo, opts, source)
3730 3730 except error.LookupError as ex:
3731 3731 msg = _(b"\nwarning: unable to open bundle %s") % source
3732 3732 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3733 3733 ui.warn(msg, hint=hint)
3734 3734 continue
3735 3735 revs, checkout = hg.addbranchrevs(
3736 3736 repo, other, branches, opts.get(b"rev")
3737 3737 )
3738 3738
3739 3739 if revs:
3740 3740 revs = [other.lookup(rev) for rev in revs]
3741 3741
3742 3742 quiet = ui.quiet
3743 3743 try:
3744 3744 ui.quiet = True
3745 3745 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3746 3746 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3747 3747 )
3748 3748 except error.LookupError:
3749 3749 continue
3750 3750 finally:
3751 3751 ui.quiet = quiet
3752 3752
3753 3753 try:
3754 3754 if not chlist:
3755 3755 continue
3756 3756 if recovernode:
3757 3757 with repo.lock(), repo.transaction(b"unbundle") as tr:
3758 3758 if scmutil.isrevsymbol(other, recovernode):
3759 3759 ui.status(_(b"Unbundling %s\n") % (recovernode))
3760 3760 f = hg.openpath(ui, source)
3761 3761 gen = exchange.readbundle(ui, f, source)
3762 3762 if isinstance(gen, bundle2.unbundle20):
3763 3763 bundle2.applybundle(
3764 3764 repo,
3765 3765 gen,
3766 3766 tr,
3767 3767 source=b"unbundle",
3768 3768 url=b"bundle:" + source,
3769 3769 )
3770 3770 else:
3771 3771 gen.apply(repo, b"unbundle", b"bundle:" + source)
3772 3772 break
3773 3773 else:
3774 3774 backupdate = encoding.strtolocal(
3775 3775 time.strftime(
3776 3776 "%a %H:%M, %Y-%m-%d",
3777 3777 time.localtime(os.path.getmtime(source)),
3778 3778 )
3779 3779 )
3780 3780 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3781 3781 if ui.verbose:
3782 3782 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3783 3783 else:
3784 3784 opts[
3785 3785 b"template"
3786 3786 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3787 3787 displayer = logcmdutil.changesetdisplayer(
3788 3788 ui, other, opts, False
3789 3789 )
3790 3790 display(other, chlist, displayer)
3791 3791 displayer.close()
3792 3792 finally:
3793 3793 cleanupfn()
3794 3794
3795 3795
3796 3796 @command(
3797 3797 b'debugsub',
3798 3798 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3799 3799 _(b'[-r REV] [REV]'),
3800 3800 )
3801 3801 def debugsub(ui, repo, rev=None):
3802 3802 ctx = scmutil.revsingle(repo, rev, None)
3803 3803 for k, v in sorted(ctx.substate.items()):
3804 3804 ui.writenoi18n(b'path %s\n' % k)
3805 3805 ui.writenoi18n(b' source %s\n' % v[0])
3806 3806 ui.writenoi18n(b' revision %s\n' % v[1])
3807 3807
3808 3808
3809 3809 @command(b'debugshell', optionalrepo=True)
3810 3810 def debugshell(ui, repo):
3811 3811 """run an interactive Python interpreter
3812 3812
3813 3813 The local namespace is provided with a reference to the ui and
3814 3814 the repo instance (if available).
3815 3815 """
3816 3816 import code
3817 3817
3818 3818 imported_objects = {
3819 3819 'ui': ui,
3820 3820 'repo': repo,
3821 3821 }
3822 3822
3823 3823 code.interact(local=imported_objects)
3824 3824
3825 3825
3826 3826 @command(
3827 3827 b'debugsuccessorssets',
3828 3828 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3829 3829 _(b'[REV]'),
3830 3830 )
3831 3831 def debugsuccessorssets(ui, repo, *revs, **opts):
3832 3832 """show set of successors for revision
3833 3833
3834 3834 A successors set of changeset A is a consistent group of revisions that
3835 3835 succeed A. It contains non-obsolete changesets only unless closests
3836 3836 successors set is set.
3837 3837
3838 3838 In most cases a changeset A has a single successors set containing a single
3839 3839 successor (changeset A replaced by A').
3840 3840
3841 3841 A changeset that is made obsolete with no successors are called "pruned".
3842 3842 Such changesets have no successors sets at all.
3843 3843
3844 3844 A changeset that has been "split" will have a successors set containing
3845 3845 more than one successor.
3846 3846
3847 3847 A changeset that has been rewritten in multiple different ways is called
3848 3848 "divergent". Such changesets have multiple successor sets (each of which
3849 3849 may also be split, i.e. have multiple successors).
3850 3850
3851 3851 Results are displayed as follows::
3852 3852
3853 3853 <rev1>
3854 3854 <successors-1A>
3855 3855 <rev2>
3856 3856 <successors-2A>
3857 3857 <successors-2B1> <successors-2B2> <successors-2B3>
3858 3858
3859 3859 Here rev2 has two possible (i.e. divergent) successors sets. The first
3860 3860 holds one element, whereas the second holds three (i.e. the changeset has
3861 3861 been split).
3862 3862 """
3863 3863 # passed to successorssets caching computation from one call to another
3864 3864 cache = {}
3865 3865 ctx2str = bytes
3866 3866 node2str = short
3867 3867 for rev in scmutil.revrange(repo, revs):
3868 3868 ctx = repo[rev]
3869 3869 ui.write(b'%s\n' % ctx2str(ctx))
3870 3870 for succsset in obsutil.successorssets(
3871 3871 repo, ctx.node(), closest=opts['closest'], cache=cache
3872 3872 ):
3873 3873 if succsset:
3874 3874 ui.write(b' ')
3875 3875 ui.write(node2str(succsset[0]))
3876 3876 for node in succsset[1:]:
3877 3877 ui.write(b' ')
3878 3878 ui.write(node2str(node))
3879 3879 ui.write(b'\n')
3880 3880
3881 3881
3882 3882 @command(b'debugtagscache', [])
3883 3883 def debugtagscache(ui, repo):
3884 3884 """display the contents of .hg/cache/hgtagsfnodes1"""
3885 3885 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3886 3886 flog = repo.file(b'.hgtags')
3887 3887 for r in repo:
3888 3888 node = repo[r].node()
3889 3889 tagsnode = cache.getfnode(node, computemissing=False)
3890 3890 if tagsnode:
3891 3891 tagsnodedisplay = hex(tagsnode)
3892 3892 if not flog.hasnode(tagsnode):
3893 3893 tagsnodedisplay += b' (unknown node)'
3894 3894 elif tagsnode is None:
3895 3895 tagsnodedisplay = b'missing'
3896 3896 else:
3897 3897 tagsnodedisplay = b'invalid'
3898 3898
3899 3899 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3900 3900
3901 3901
3902 3902 @command(
3903 3903 b'debugtemplate',
3904 3904 [
3905 3905 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3906 3906 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3907 3907 ],
3908 3908 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3909 3909 optionalrepo=True,
3910 3910 )
3911 3911 def debugtemplate(ui, repo, tmpl, **opts):
3912 3912 """parse and apply a template
3913 3913
3914 3914 If -r/--rev is given, the template is processed as a log template and
3915 3915 applied to the given changesets. Otherwise, it is processed as a generic
3916 3916 template.
3917 3917
3918 3918 Use --verbose to print the parsed tree.
3919 3919 """
3920 3920 revs = None
3921 3921 if opts['rev']:
3922 3922 if repo is None:
3923 3923 raise error.RepoError(
3924 3924 _(b'there is no Mercurial repository here (.hg not found)')
3925 3925 )
3926 3926 revs = scmutil.revrange(repo, opts['rev'])
3927 3927
3928 3928 props = {}
3929 3929 for d in opts['define']:
3930 3930 try:
3931 3931 k, v = (e.strip() for e in d.split(b'=', 1))
3932 3932 if not k or k == b'ui':
3933 3933 raise ValueError
3934 3934 props[k] = v
3935 3935 except ValueError:
3936 3936 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3937 3937
3938 3938 if ui.verbose:
3939 3939 aliases = ui.configitems(b'templatealias')
3940 3940 tree = templater.parse(tmpl)
3941 3941 ui.note(templater.prettyformat(tree), b'\n')
3942 3942 newtree = templater.expandaliases(tree, aliases)
3943 3943 if newtree != tree:
3944 3944 ui.notenoi18n(
3945 3945 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3946 3946 )
3947 3947
3948 3948 if revs is None:
3949 3949 tres = formatter.templateresources(ui, repo)
3950 3950 t = formatter.maketemplater(ui, tmpl, resources=tres)
3951 3951 if ui.verbose:
3952 3952 kwds, funcs = t.symbolsuseddefault()
3953 3953 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3954 3954 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3955 3955 ui.write(t.renderdefault(props))
3956 3956 else:
3957 3957 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3958 3958 if ui.verbose:
3959 3959 kwds, funcs = displayer.t.symbolsuseddefault()
3960 3960 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3961 3961 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3962 3962 for r in revs:
3963 3963 displayer.show(repo[r], **pycompat.strkwargs(props))
3964 3964 displayer.close()
3965 3965
3966 3966
3967 3967 @command(
3968 3968 b'debuguigetpass',
3969 3969 [
3970 3970 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3971 3971 ],
3972 3972 _(b'[-p TEXT]'),
3973 3973 norepo=True,
3974 3974 )
3975 3975 def debuguigetpass(ui, prompt=b''):
3976 3976 """show prompt to type password"""
3977 3977 r = ui.getpass(prompt)
3978 3978 if r is None:
3979 3979 r = b"<default response>"
3980 3980 ui.writenoi18n(b'response: %s\n' % r)
3981 3981
3982 3982
3983 3983 @command(
3984 3984 b'debuguiprompt',
3985 3985 [
3986 3986 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
3987 3987 ],
3988 3988 _(b'[-p TEXT]'),
3989 3989 norepo=True,
3990 3990 )
3991 3991 def debuguiprompt(ui, prompt=b''):
3992 3992 """show plain prompt"""
3993 3993 r = ui.prompt(prompt)
3994 3994 ui.writenoi18n(b'response: %s\n' % r)
3995 3995
3996 3996
3997 3997 @command(b'debugupdatecaches', [])
3998 3998 def debugupdatecaches(ui, repo, *pats, **opts):
3999 3999 """warm all known caches in the repository"""
4000 4000 with repo.wlock(), repo.lock():
4001 4001 repo.updatecaches(full=True)
4002 4002
4003 4003
4004 4004 @command(
4005 4005 b'debugupgraderepo',
4006 4006 [
4007 4007 (
4008 4008 b'o',
4009 4009 b'optimize',
4010 4010 [],
4011 4011 _(b'extra optimization to perform'),
4012 4012 _(b'NAME'),
4013 4013 ),
4014 4014 (b'', b'run', False, _(b'performs an upgrade')),
4015 4015 (b'', b'backup', True, _(b'keep the old repository content around')),
4016 4016 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4017 4017 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4018 4018 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4019 4019 ],
4020 4020 )
4021 4021 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4022 4022 """upgrade a repository to use different features
4023 4023
4024 4024 If no arguments are specified, the repository is evaluated for upgrade
4025 4025 and a list of problems and potential optimizations is printed.
4026 4026
4027 4027 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4028 4028 can be influenced via additional arguments. More details will be provided
4029 4029 by the command output when run without ``--run``.
4030 4030
4031 4031 During the upgrade, the repository will be locked and no writes will be
4032 4032 allowed.
4033 4033
4034 4034 At the end of the upgrade, the repository may not be readable while new
4035 4035 repository data is swapped in. This window will be as long as it takes to
4036 4036 rename some directories inside the ``.hg`` directory. On most machines, this
4037 4037 should complete almost instantaneously and the chances of a consumer being
4038 4038 unable to access the repository should be low.
4039 4039
4040 4040 By default, all revlog will be upgraded. You can restrict this using flag
4041 4041 such as `--manifest`:
4042 4042
4043 4043 * `--manifest`: only optimize the manifest
4044 4044 * `--no-manifest`: optimize all revlog but the manifest
4045 4045 * `--changelog`: optimize the changelog only
4046 4046 * `--no-changelog --no-manifest`: optimize filelogs only
4047 4047 * `--filelogs`: optimize the filelogs only
4048 4048 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4049 4049 """
4050 4050 return upgrade.upgraderepo(
4051 4051 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4052 4052 )
4053 4053
4054 4054
4055 4055 @command(
4056 4056 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4057 4057 )
4058 4058 def debugwalk(ui, repo, *pats, **opts):
4059 4059 """show how files match on given patterns"""
4060 4060 opts = pycompat.byteskwargs(opts)
4061 4061 m = scmutil.match(repo[None], pats, opts)
4062 4062 if ui.verbose:
4063 4063 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4064 4064 items = list(repo[None].walk(m))
4065 4065 if not items:
4066 4066 return
4067 4067 f = lambda fn: fn
4068 4068 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4069 4069 f = lambda fn: util.normpath(fn)
4070 4070 fmt = b'f %%-%ds %%-%ds %%s' % (
4071 4071 max([len(abs) for abs in items]),
4072 4072 max([len(repo.pathto(abs)) for abs in items]),
4073 4073 )
4074 4074 for abs in items:
4075 4075 line = fmt % (
4076 4076 abs,
4077 4077 f(repo.pathto(abs)),
4078 4078 m.exact(abs) and b'exact' or b'',
4079 4079 )
4080 4080 ui.write(b"%s\n" % line.rstrip())
4081 4081
4082 4082
4083 4083 @command(b'debugwhyunstable', [], _(b'REV'))
4084 4084 def debugwhyunstable(ui, repo, rev):
4085 4085 """explain instabilities of a changeset"""
4086 4086 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4087 4087 dnodes = b''
4088 4088 if entry.get(b'divergentnodes'):
4089 4089 dnodes = (
4090 4090 b' '.join(
4091 4091 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4092 4092 for ctx in entry[b'divergentnodes']
4093 4093 )
4094 4094 + b' '
4095 4095 )
4096 4096 ui.write(
4097 4097 b'%s: %s%s %s\n'
4098 4098 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4099 4099 )
4100 4100
4101 4101
4102 4102 @command(
4103 4103 b'debugwireargs',
4104 4104 [
4105 4105 (b'', b'three', b'', b'three'),
4106 4106 (b'', b'four', b'', b'four'),
4107 4107 (b'', b'five', b'', b'five'),
4108 4108 ]
4109 4109 + cmdutil.remoteopts,
4110 4110 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4111 4111 norepo=True,
4112 4112 )
4113 4113 def debugwireargs(ui, repopath, *vals, **opts):
4114 4114 opts = pycompat.byteskwargs(opts)
4115 4115 repo = hg.peer(ui, opts, repopath)
4116 4116 try:
4117 4117 for opt in cmdutil.remoteopts:
4118 4118 del opts[opt[1]]
4119 4119 args = {}
4120 4120 for k, v in pycompat.iteritems(opts):
4121 4121 if v:
4122 4122 args[k] = v
4123 4123 args = pycompat.strkwargs(args)
4124 4124 # run twice to check that we don't mess up the stream for the next command
4125 4125 res1 = repo.debugwireargs(*vals, **args)
4126 4126 res2 = repo.debugwireargs(*vals, **args)
4127 4127 ui.write(b"%s\n" % res1)
4128 4128 if res1 != res2:
4129 4129 ui.warn(b"%s\n" % res2)
4130 4130 finally:
4131 4131 repo.close()
4132 4132
4133 4133
4134 4134 def _parsewirelangblocks(fh):
4135 4135 activeaction = None
4136 4136 blocklines = []
4137 4137 lastindent = 0
4138 4138
4139 4139 for line in fh:
4140 4140 line = line.rstrip()
4141 4141 if not line:
4142 4142 continue
4143 4143
4144 4144 if line.startswith(b'#'):
4145 4145 continue
4146 4146
4147 4147 if not line.startswith(b' '):
4148 4148 # New block. Flush previous one.
4149 4149 if activeaction:
4150 4150 yield activeaction, blocklines
4151 4151
4152 4152 activeaction = line
4153 4153 blocklines = []
4154 4154 lastindent = 0
4155 4155 continue
4156 4156
4157 4157 # Else we start with an indent.
4158 4158
4159 4159 if not activeaction:
4160 4160 raise error.Abort(_(b'indented line outside of block'))
4161 4161
4162 4162 indent = len(line) - len(line.lstrip())
4163 4163
4164 4164 # If this line is indented more than the last line, concatenate it.
4165 4165 if indent > lastindent and blocklines:
4166 4166 blocklines[-1] += line.lstrip()
4167 4167 else:
4168 4168 blocklines.append(line)
4169 4169 lastindent = indent
4170 4170
4171 4171 # Flush last block.
4172 4172 if activeaction:
4173 4173 yield activeaction, blocklines
4174 4174
4175 4175
4176 4176 @command(
4177 4177 b'debugwireproto',
4178 4178 [
4179 4179 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4180 4180 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4181 4181 (
4182 4182 b'',
4183 4183 b'noreadstderr',
4184 4184 False,
4185 4185 _(b'do not read from stderr of the remote'),
4186 4186 ),
4187 4187 (
4188 4188 b'',
4189 4189 b'nologhandshake',
4190 4190 False,
4191 4191 _(b'do not log I/O related to the peer handshake'),
4192 4192 ),
4193 4193 ]
4194 4194 + cmdutil.remoteopts,
4195 4195 _(b'[PATH]'),
4196 4196 optionalrepo=True,
4197 4197 )
4198 4198 def debugwireproto(ui, repo, path=None, **opts):
4199 4199 """send wire protocol commands to a server
4200 4200
4201 4201 This command can be used to issue wire protocol commands to remote
4202 4202 peers and to debug the raw data being exchanged.
4203 4203
4204 4204 ``--localssh`` will start an SSH server against the current repository
4205 4205 and connect to that. By default, the connection will perform a handshake
4206 4206 and establish an appropriate peer instance.
4207 4207
4208 4208 ``--peer`` can be used to bypass the handshake protocol and construct a
4209 4209 peer instance using the specified class type. Valid values are ``raw``,
4210 4210 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4211 4211 raw data payloads and don't support higher-level command actions.
4212 4212
4213 4213 ``--noreadstderr`` can be used to disable automatic reading from stderr
4214 4214 of the peer (for SSH connections only). Disabling automatic reading of
4215 4215 stderr is useful for making output more deterministic.
4216 4216
4217 4217 Commands are issued via a mini language which is specified via stdin.
4218 4218 The language consists of individual actions to perform. An action is
4219 4219 defined by a block. A block is defined as a line with no leading
4220 4220 space followed by 0 or more lines with leading space. Blocks are
4221 4221 effectively a high-level command with additional metadata.
4222 4222
4223 4223 Lines beginning with ``#`` are ignored.
4224 4224
4225 4225 The following sections denote available actions.
4226 4226
4227 4227 raw
4228 4228 ---
4229 4229
4230 4230 Send raw data to the server.
4231 4231
4232 4232 The block payload contains the raw data to send as one atomic send
4233 4233 operation. The data may not actually be delivered in a single system
4234 4234 call: it depends on the abilities of the transport being used.
4235 4235
4236 4236 Each line in the block is de-indented and concatenated. Then, that
4237 4237 value is evaluated as a Python b'' literal. This allows the use of
4238 4238 backslash escaping, etc.
4239 4239
4240 4240 raw+
4241 4241 ----
4242 4242
4243 4243 Behaves like ``raw`` except flushes output afterwards.
4244 4244
4245 4245 command <X>
4246 4246 -----------
4247 4247
4248 4248 Send a request to run a named command, whose name follows the ``command``
4249 4249 string.
4250 4250
4251 4251 Arguments to the command are defined as lines in this block. The format of
4252 4252 each line is ``<key> <value>``. e.g.::
4253 4253
4254 4254 command listkeys
4255 4255 namespace bookmarks
4256 4256
4257 4257 If the value begins with ``eval:``, it will be interpreted as a Python
4258 4258 literal expression. Otherwise values are interpreted as Python b'' literals.
4259 4259 This allows sending complex types and encoding special byte sequences via
4260 4260 backslash escaping.
4261 4261
4262 4262 The following arguments have special meaning:
4263 4263
4264 4264 ``PUSHFILE``
4265 4265 When defined, the *push* mechanism of the peer will be used instead
4266 4266 of the static request-response mechanism and the content of the
4267 4267 file specified in the value of this argument will be sent as the
4268 4268 command payload.
4269 4269
4270 4270 This can be used to submit a local bundle file to the remote.
4271 4271
4272 4272 batchbegin
4273 4273 ----------
4274 4274
4275 4275 Instruct the peer to begin a batched send.
4276 4276
4277 4277 All ``command`` blocks are queued for execution until the next
4278 4278 ``batchsubmit`` block.
4279 4279
4280 4280 batchsubmit
4281 4281 -----------
4282 4282
4283 4283 Submit previously queued ``command`` blocks as a batch request.
4284 4284
4285 4285 This action MUST be paired with a ``batchbegin`` action.
4286 4286
4287 4287 httprequest <method> <path>
4288 4288 ---------------------------
4289 4289
4290 4290 (HTTP peer only)
4291 4291
4292 4292 Send an HTTP request to the peer.
4293 4293
4294 4294 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4295 4295
4296 4296 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4297 4297 headers to add to the request. e.g. ``Accept: foo``.
4298 4298
4299 4299 The following arguments are special:
4300 4300
4301 4301 ``BODYFILE``
4302 4302 The content of the file defined as the value to this argument will be
4303 4303 transferred verbatim as the HTTP request body.
4304 4304
4305 4305 ``frame <type> <flags> <payload>``
4306 4306 Send a unified protocol frame as part of the request body.
4307 4307
4308 4308 All frames will be collected and sent as the body to the HTTP
4309 4309 request.
4310 4310
4311 4311 close
4312 4312 -----
4313 4313
4314 4314 Close the connection to the server.
4315 4315
4316 4316 flush
4317 4317 -----
4318 4318
4319 4319 Flush data written to the server.
4320 4320
4321 4321 readavailable
4322 4322 -------------
4323 4323
4324 4324 Close the write end of the connection and read all available data from
4325 4325 the server.
4326 4326
4327 4327 If the connection to the server encompasses multiple pipes, we poll both
4328 4328 pipes and read available data.
4329 4329
4330 4330 readline
4331 4331 --------
4332 4332
4333 4333 Read a line of output from the server. If there are multiple output
4334 4334 pipes, reads only the main pipe.
4335 4335
4336 4336 ereadline
4337 4337 ---------
4338 4338
4339 4339 Like ``readline``, but read from the stderr pipe, if available.
4340 4340
4341 4341 read <X>
4342 4342 --------
4343 4343
4344 4344 ``read()`` N bytes from the server's main output pipe.
4345 4345
4346 4346 eread <X>
4347 4347 ---------
4348 4348
4349 4349 ``read()`` N bytes from the server's stderr pipe, if available.
4350 4350
4351 4351 Specifying Unified Frame-Based Protocol Frames
4352 4352 ----------------------------------------------
4353 4353
4354 4354 It is possible to emit a *Unified Frame-Based Protocol* by using special
4355 4355 syntax.
4356 4356
4357 4357 A frame is composed as a type, flags, and payload. These can be parsed
4358 4358 from a string of the form:
4359 4359
4360 4360 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4361 4361
4362 4362 ``request-id`` and ``stream-id`` are integers defining the request and
4363 4363 stream identifiers.
4364 4364
4365 4365 ``type`` can be an integer value for the frame type or the string name
4366 4366 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4367 4367 ``command-name``.
4368 4368
4369 4369 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4370 4370 components. Each component (and there can be just one) can be an integer
4371 4371 or a flag name for stream flags or frame flags, respectively. Values are
4372 4372 resolved to integers and then bitwise OR'd together.
4373 4373
4374 4374 ``payload`` represents the raw frame payload. If it begins with
4375 4375 ``cbor:``, the following string is evaluated as Python code and the
4376 4376 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4377 4377 as a Python byte string literal.
4378 4378 """
4379 4379 opts = pycompat.byteskwargs(opts)
4380 4380
4381 4381 if opts[b'localssh'] and not repo:
4382 4382 raise error.Abort(_(b'--localssh requires a repository'))
4383 4383
4384 4384 if opts[b'peer'] and opts[b'peer'] not in (
4385 4385 b'raw',
4386 4386 b'http2',
4387 4387 b'ssh1',
4388 4388 b'ssh2',
4389 4389 ):
4390 4390 raise error.Abort(
4391 4391 _(b'invalid value for --peer'),
4392 4392 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4393 4393 )
4394 4394
4395 4395 if path and opts[b'localssh']:
4396 4396 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4397 4397
4398 4398 if ui.interactive():
4399 4399 ui.write(_(b'(waiting for commands on stdin)\n'))
4400 4400
4401 4401 blocks = list(_parsewirelangblocks(ui.fin))
4402 4402
4403 4403 proc = None
4404 4404 stdin = None
4405 4405 stdout = None
4406 4406 stderr = None
4407 4407 opener = None
4408 4408
4409 4409 if opts[b'localssh']:
4410 4410 # We start the SSH server in its own process so there is process
4411 4411 # separation. This prevents a whole class of potential bugs around
4412 4412 # shared state from interfering with server operation.
4413 4413 args = procutil.hgcmd() + [
4414 4414 b'-R',
4415 4415 repo.root,
4416 4416 b'debugserve',
4417 4417 b'--sshstdio',
4418 4418 ]
4419 4419 proc = subprocess.Popen(
4420 4420 pycompat.rapply(procutil.tonativestr, args),
4421 4421 stdin=subprocess.PIPE,
4422 4422 stdout=subprocess.PIPE,
4423 4423 stderr=subprocess.PIPE,
4424 4424 bufsize=0,
4425 4425 )
4426 4426
4427 4427 stdin = proc.stdin
4428 4428 stdout = proc.stdout
4429 4429 stderr = proc.stderr
4430 4430
4431 4431 # We turn the pipes into observers so we can log I/O.
4432 4432 if ui.verbose or opts[b'peer'] == b'raw':
4433 4433 stdin = util.makeloggingfileobject(
4434 4434 ui, proc.stdin, b'i', logdata=True
4435 4435 )
4436 4436 stdout = util.makeloggingfileobject(
4437 4437 ui, proc.stdout, b'o', logdata=True
4438 4438 )
4439 4439 stderr = util.makeloggingfileobject(
4440 4440 ui, proc.stderr, b'e', logdata=True
4441 4441 )
4442 4442
4443 4443 # --localssh also implies the peer connection settings.
4444 4444
4445 4445 url = b'ssh://localserver'
4446 4446 autoreadstderr = not opts[b'noreadstderr']
4447 4447
4448 4448 if opts[b'peer'] == b'ssh1':
4449 4449 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4450 4450 peer = sshpeer.sshv1peer(
4451 4451 ui,
4452 4452 url,
4453 4453 proc,
4454 4454 stdin,
4455 4455 stdout,
4456 4456 stderr,
4457 4457 None,
4458 4458 autoreadstderr=autoreadstderr,
4459 4459 )
4460 4460 elif opts[b'peer'] == b'ssh2':
4461 4461 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4462 4462 peer = sshpeer.sshv2peer(
4463 4463 ui,
4464 4464 url,
4465 4465 proc,
4466 4466 stdin,
4467 4467 stdout,
4468 4468 stderr,
4469 4469 None,
4470 4470 autoreadstderr=autoreadstderr,
4471 4471 )
4472 4472 elif opts[b'peer'] == b'raw':
4473 4473 ui.write(_(b'using raw connection to peer\n'))
4474 4474 peer = None
4475 4475 else:
4476 4476 ui.write(_(b'creating ssh peer from handshake results\n'))
4477 4477 peer = sshpeer.makepeer(
4478 4478 ui,
4479 4479 url,
4480 4480 proc,
4481 4481 stdin,
4482 4482 stdout,
4483 4483 stderr,
4484 4484 autoreadstderr=autoreadstderr,
4485 4485 )
4486 4486
4487 4487 elif path:
4488 4488 # We bypass hg.peer() so we can proxy the sockets.
4489 4489 # TODO consider not doing this because we skip
4490 4490 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4491 4491 u = util.url(path)
4492 4492 if u.scheme != b'http':
4493 4493 raise error.Abort(_(b'only http:// paths are currently supported'))
4494 4494
4495 4495 url, authinfo = u.authinfo()
4496 4496 openerargs = {
4497 4497 'useragent': b'Mercurial debugwireproto',
4498 4498 }
4499 4499
4500 4500 # Turn pipes/sockets into observers so we can log I/O.
4501 4501 if ui.verbose:
4502 4502 openerargs.update(
4503 4503 {
4504 4504 'loggingfh': ui,
4505 4505 'loggingname': b's',
4506 4506 'loggingopts': {
4507 4507 'logdata': True,
4508 4508 'logdataapis': False,
4509 4509 },
4510 4510 }
4511 4511 )
4512 4512
4513 4513 if ui.debugflag:
4514 4514 openerargs['loggingopts']['logdataapis'] = True
4515 4515
4516 4516 # Don't send default headers when in raw mode. This allows us to
4517 4517 # bypass most of the behavior of our URL handling code so we can
4518 4518 # have near complete control over what's sent on the wire.
4519 4519 if opts[b'peer'] == b'raw':
4520 4520 openerargs['sendaccept'] = False
4521 4521
4522 4522 opener = urlmod.opener(ui, authinfo, **openerargs)
4523 4523
4524 4524 if opts[b'peer'] == b'http2':
4525 4525 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4526 4526 # We go through makepeer() because we need an API descriptor for
4527 4527 # the peer instance to be useful.
4528 4528 with ui.configoverride(
4529 4529 {(b'experimental', b'httppeer.advertise-v2'): True}
4530 4530 ):
4531 4531 if opts[b'nologhandshake']:
4532 4532 ui.pushbuffer()
4533 4533
4534 4534 peer = httppeer.makepeer(ui, path, opener=opener)
4535 4535
4536 4536 if opts[b'nologhandshake']:
4537 4537 ui.popbuffer()
4538 4538
4539 4539 if not isinstance(peer, httppeer.httpv2peer):
4540 4540 raise error.Abort(
4541 4541 _(
4542 4542 b'could not instantiate HTTP peer for '
4543 4543 b'wire protocol version 2'
4544 4544 ),
4545 4545 hint=_(
4546 4546 b'the server may not have the feature '
4547 4547 b'enabled or is not allowing this '
4548 4548 b'client version'
4549 4549 ),
4550 4550 )
4551 4551
4552 4552 elif opts[b'peer'] == b'raw':
4553 4553 ui.write(_(b'using raw connection to peer\n'))
4554 4554 peer = None
4555 4555 elif opts[b'peer']:
4556 4556 raise error.Abort(
4557 4557 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4558 4558 )
4559 4559 else:
4560 4560 peer = httppeer.makepeer(ui, path, opener=opener)
4561 4561
4562 4562 # We /could/ populate stdin/stdout with sock.makefile()...
4563 4563 else:
4564 4564 raise error.Abort(_(b'unsupported connection configuration'))
4565 4565
4566 4566 batchedcommands = None
4567 4567
4568 4568 # Now perform actions based on the parsed wire language instructions.
4569 4569 for action, lines in blocks:
4570 4570 if action in (b'raw', b'raw+'):
4571 4571 if not stdin:
4572 4572 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4573 4573
4574 4574 # Concatenate the data together.
4575 4575 data = b''.join(l.lstrip() for l in lines)
4576 4576 data = stringutil.unescapestr(data)
4577 4577 stdin.write(data)
4578 4578
4579 4579 if action == b'raw+':
4580 4580 stdin.flush()
4581 4581 elif action == b'flush':
4582 4582 if not stdin:
4583 4583 raise error.Abort(_(b'cannot call flush on this peer'))
4584 4584 stdin.flush()
4585 4585 elif action.startswith(b'command'):
4586 4586 if not peer:
4587 4587 raise error.Abort(
4588 4588 _(
4589 4589 b'cannot send commands unless peer instance '
4590 4590 b'is available'
4591 4591 )
4592 4592 )
4593 4593
4594 4594 command = action.split(b' ', 1)[1]
4595 4595
4596 4596 args = {}
4597 4597 for line in lines:
4598 4598 # We need to allow empty values.
4599 4599 fields = line.lstrip().split(b' ', 1)
4600 4600 if len(fields) == 1:
4601 4601 key = fields[0]
4602 4602 value = b''
4603 4603 else:
4604 4604 key, value = fields
4605 4605
4606 4606 if value.startswith(b'eval:'):
4607 4607 value = stringutil.evalpythonliteral(value[5:])
4608 4608 else:
4609 4609 value = stringutil.unescapestr(value)
4610 4610
4611 4611 args[key] = value
4612 4612
4613 4613 if batchedcommands is not None:
4614 4614 batchedcommands.append((command, args))
4615 4615 continue
4616 4616
4617 4617 ui.status(_(b'sending %s command\n') % command)
4618 4618
4619 4619 if b'PUSHFILE' in args:
4620 4620 with open(args[b'PUSHFILE'], 'rb') as fh:
4621 4621 del args[b'PUSHFILE']
4622 4622 res, output = peer._callpush(
4623 4623 command, fh, **pycompat.strkwargs(args)
4624 4624 )
4625 4625 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4626 4626 ui.status(
4627 4627 _(b'remote output: %s\n') % stringutil.escapestr(output)
4628 4628 )
4629 4629 else:
4630 4630 with peer.commandexecutor() as e:
4631 4631 res = e.callcommand(command, args).result()
4632 4632
4633 4633 if isinstance(res, wireprotov2peer.commandresponse):
4634 4634 val = res.objects()
4635 4635 ui.status(
4636 4636 _(b'response: %s\n')
4637 4637 % stringutil.pprint(val, bprefix=True, indent=2)
4638 4638 )
4639 4639 else:
4640 4640 ui.status(
4641 4641 _(b'response: %s\n')
4642 4642 % stringutil.pprint(res, bprefix=True, indent=2)
4643 4643 )
4644 4644
4645 4645 elif action == b'batchbegin':
4646 4646 if batchedcommands is not None:
4647 4647 raise error.Abort(_(b'nested batchbegin not allowed'))
4648 4648
4649 4649 batchedcommands = []
4650 4650 elif action == b'batchsubmit':
4651 4651 # There is a batching API we could go through. But it would be
4652 4652 # difficult to normalize requests into function calls. It is easier
4653 4653 # to bypass this layer and normalize to commands + args.
4654 4654 ui.status(
4655 4655 _(b'sending batch with %d sub-commands\n')
4656 4656 % len(batchedcommands)
4657 4657 )
4658 4658 assert peer is not None
4659 4659 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4660 4660 ui.status(
4661 4661 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4662 4662 )
4663 4663
4664 4664 batchedcommands = None
4665 4665
4666 4666 elif action.startswith(b'httprequest '):
4667 4667 if not opener:
4668 4668 raise error.Abort(
4669 4669 _(b'cannot use httprequest without an HTTP peer')
4670 4670 )
4671 4671
4672 4672 request = action.split(b' ', 2)
4673 4673 if len(request) != 3:
4674 4674 raise error.Abort(
4675 4675 _(
4676 4676 b'invalid httprequest: expected format is '
4677 4677 b'"httprequest <method> <path>'
4678 4678 )
4679 4679 )
4680 4680
4681 4681 method, httppath = request[1:]
4682 4682 headers = {}
4683 4683 body = None
4684 4684 frames = []
4685 4685 for line in lines:
4686 4686 line = line.lstrip()
4687 4687 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4688 4688 if m:
4689 4689 # Headers need to use native strings.
4690 4690 key = pycompat.strurl(m.group(1))
4691 4691 value = pycompat.strurl(m.group(2))
4692 4692 headers[key] = value
4693 4693 continue
4694 4694
4695 4695 if line.startswith(b'BODYFILE '):
4696 4696 with open(line.split(b' ', 1), b'rb') as fh:
4697 4697 body = fh.read()
4698 4698 elif line.startswith(b'frame '):
4699 4699 frame = wireprotoframing.makeframefromhumanstring(
4700 4700 line[len(b'frame ') :]
4701 4701 )
4702 4702
4703 4703 frames.append(frame)
4704 4704 else:
4705 4705 raise error.Abort(
4706 4706 _(b'unknown argument to httprequest: %s') % line
4707 4707 )
4708 4708
4709 4709 url = path + httppath
4710 4710
4711 4711 if frames:
4712 4712 body = b''.join(bytes(f) for f in frames)
4713 4713
4714 4714 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4715 4715
4716 4716 # urllib.Request insists on using has_data() as a proxy for
4717 4717 # determining the request method. Override that to use our
4718 4718 # explicitly requested method.
4719 4719 req.get_method = lambda: pycompat.sysstr(method)
4720 4720
4721 4721 try:
4722 4722 res = opener.open(req)
4723 4723 body = res.read()
4724 4724 except util.urlerr.urlerror as e:
4725 4725 # read() method must be called, but only exists in Python 2
4726 4726 getattr(e, 'read', lambda: None)()
4727 4727 continue
4728 4728
4729 4729 ct = res.headers.get('Content-Type')
4730 4730 if ct == 'application/mercurial-cbor':
4731 4731 ui.write(
4732 4732 _(b'cbor> %s\n')
4733 4733 % stringutil.pprint(
4734 4734 cborutil.decodeall(body), bprefix=True, indent=2
4735 4735 )
4736 4736 )
4737 4737
4738 4738 elif action == b'close':
4739 4739 assert peer is not None
4740 4740 peer.close()
4741 4741 elif action == b'readavailable':
4742 4742 if not stdout or not stderr:
4743 4743 raise error.Abort(
4744 4744 _(b'readavailable not available on this peer')
4745 4745 )
4746 4746
4747 4747 stdin.close()
4748 4748 stdout.read()
4749 4749 stderr.read()
4750 4750
4751 4751 elif action == b'readline':
4752 4752 if not stdout:
4753 4753 raise error.Abort(_(b'readline not available on this peer'))
4754 4754 stdout.readline()
4755 4755 elif action == b'ereadline':
4756 4756 if not stderr:
4757 4757 raise error.Abort(_(b'ereadline not available on this peer'))
4758 4758 stderr.readline()
4759 4759 elif action.startswith(b'read '):
4760 4760 count = int(action.split(b' ', 1)[1])
4761 4761 if not stdout:
4762 4762 raise error.Abort(_(b'read not available on this peer'))
4763 4763 stdout.read(count)
4764 4764 elif action.startswith(b'eread '):
4765 4765 count = int(action.split(b' ', 1)[1])
4766 4766 if not stderr:
4767 4767 raise error.Abort(_(b'eread not available on this peer'))
4768 4768 stderr.read(count)
4769 4769 else:
4770 4770 raise error.Abort(_(b'unknown action: %s') % action)
4771 4771
4772 4772 if batchedcommands is not None:
4773 4773 raise error.Abort(_(b'unclosed "batchbegin" request'))
4774 4774
4775 4775 if peer:
4776 4776 peer.close()
4777 4777
4778 4778 if proc:
4779 4779 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now