##// END OF EJS Templates
dirstate-item: use the v1_serialization method in debugstate...
marmoute -
r48368:85ce6ed5 default
parent child Browse files
Show More
@@ -1,4827 +1,4833 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import contextlib
13 13 import difflib
14 14 import errno
15 15 import glob
16 16 import operator
17 17 import os
18 18 import platform
19 19 import random
20 20 import re
21 21 import socket
22 22 import ssl
23 23 import stat
24 24 import string
25 25 import subprocess
26 26 import sys
27 27 import time
28 28
29 29 from .i18n import _
30 30 from .node import (
31 31 bin,
32 32 hex,
33 33 nullrev,
34 34 short,
35 35 )
36 36 from .pycompat import (
37 37 getattr,
38 38 open,
39 39 )
40 40 from . import (
41 41 bundle2,
42 42 bundlerepo,
43 43 changegroup,
44 44 cmdutil,
45 45 color,
46 46 context,
47 47 copies,
48 48 dagparser,
49 49 encoding,
50 50 error,
51 51 exchange,
52 52 extensions,
53 53 filemerge,
54 54 filesetlang,
55 55 formatter,
56 56 hg,
57 57 httppeer,
58 58 localrepo,
59 59 lock as lockmod,
60 60 logcmdutil,
61 61 mergestate as mergestatemod,
62 62 metadata,
63 63 obsolete,
64 64 obsutil,
65 65 pathutil,
66 66 phases,
67 67 policy,
68 68 pvec,
69 69 pycompat,
70 70 registrar,
71 71 repair,
72 72 repoview,
73 73 revlog,
74 74 revset,
75 75 revsetlang,
76 76 scmutil,
77 77 setdiscovery,
78 78 simplemerge,
79 79 sshpeer,
80 80 sslutil,
81 81 streamclone,
82 82 strip,
83 83 tags as tagsmod,
84 84 templater,
85 85 treediscovery,
86 86 upgrade,
87 87 url as urlmod,
88 88 util,
89 89 vfs as vfsmod,
90 90 wireprotoframing,
91 91 wireprotoserver,
92 92 wireprotov2peer,
93 93 )
94 94 from .interfaces import repository
95 95 from .utils import (
96 96 cborutil,
97 97 compression,
98 98 dateutil,
99 99 procutil,
100 100 stringutil,
101 101 urlutil,
102 102 )
103 103
104 104 from .revlogutils import (
105 105 deltas as deltautil,
106 106 nodemap,
107 107 sidedata,
108 108 )
109 109
110 110 release = lockmod.release
111 111
112 112 table = {}
113 113 table.update(strip.command._table)
114 114 command = registrar.command(table)
115 115
116 116
117 117 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
118 118 def debugancestor(ui, repo, *args):
119 119 """find the ancestor revision of two revisions in a given index"""
120 120 if len(args) == 3:
121 121 index, rev1, rev2 = args
122 122 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
123 123 lookup = r.lookup
124 124 elif len(args) == 2:
125 125 if not repo:
126 126 raise error.Abort(
127 127 _(b'there is no Mercurial repository here (.hg not found)')
128 128 )
129 129 rev1, rev2 = args
130 130 r = repo.changelog
131 131 lookup = repo.lookup
132 132 else:
133 133 raise error.Abort(_(b'either two or three arguments required'))
134 134 a = r.ancestor(lookup(rev1), lookup(rev2))
135 135 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
136 136
137 137
138 138 @command(b'debugantivirusrunning', [])
139 139 def debugantivirusrunning(ui, repo):
140 140 """attempt to trigger an antivirus scanner to see if one is active"""
141 141 with repo.cachevfs.open('eicar-test-file.com', b'wb') as f:
142 142 f.write(
143 143 util.b85decode(
144 144 # This is a base85-armored version of the EICAR test file. See
145 145 # https://en.wikipedia.org/wiki/EICAR_test_file for details.
146 146 b'ST#=}P$fV?P+K%yP+C|uG$>GBDK|qyDK~v2MM*<JQY}+dK~6+LQba95P'
147 147 b'E<)&Nm5l)EmTEQR4qnHOhq9iNGnJx'
148 148 )
149 149 )
150 150 # Give an AV engine time to scan the file.
151 151 time.sleep(2)
152 152 util.unlink(repo.cachevfs.join('eicar-test-file.com'))
153 153
154 154
155 155 @command(b'debugapplystreamclonebundle', [], b'FILE')
156 156 def debugapplystreamclonebundle(ui, repo, fname):
157 157 """apply a stream clone bundle file"""
158 158 f = hg.openpath(ui, fname)
159 159 gen = exchange.readbundle(ui, f, fname)
160 160 gen.apply(repo)
161 161
162 162
163 163 @command(
164 164 b'debugbuilddag',
165 165 [
166 166 (
167 167 b'm',
168 168 b'mergeable-file',
169 169 None,
170 170 _(b'add single file mergeable changes'),
171 171 ),
172 172 (
173 173 b'o',
174 174 b'overwritten-file',
175 175 None,
176 176 _(b'add single file all revs overwrite'),
177 177 ),
178 178 (b'n', b'new-file', None, _(b'add new file at each rev')),
179 179 ],
180 180 _(b'[OPTION]... [TEXT]'),
181 181 )
182 182 def debugbuilddag(
183 183 ui,
184 184 repo,
185 185 text=None,
186 186 mergeable_file=False,
187 187 overwritten_file=False,
188 188 new_file=False,
189 189 ):
190 190 """builds a repo with a given DAG from scratch in the current empty repo
191 191
192 192 The description of the DAG is read from stdin if not given on the
193 193 command line.
194 194
195 195 Elements:
196 196
197 197 - "+n" is a linear run of n nodes based on the current default parent
198 198 - "." is a single node based on the current default parent
199 199 - "$" resets the default parent to null (implied at the start);
200 200 otherwise the default parent is always the last node created
201 201 - "<p" sets the default parent to the backref p
202 202 - "*p" is a fork at parent p, which is a backref
203 203 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
204 204 - "/p2" is a merge of the preceding node and p2
205 205 - ":tag" defines a local tag for the preceding node
206 206 - "@branch" sets the named branch for subsequent nodes
207 207 - "#...\\n" is a comment up to the end of the line
208 208
209 209 Whitespace between the above elements is ignored.
210 210
211 211 A backref is either
212 212
213 213 - a number n, which references the node curr-n, where curr is the current
214 214 node, or
215 215 - the name of a local tag you placed earlier using ":tag", or
216 216 - empty to denote the default parent.
217 217
218 218 All string valued-elements are either strictly alphanumeric, or must
219 219 be enclosed in double quotes ("..."), with "\\" as escape character.
220 220 """
221 221
222 222 if text is None:
223 223 ui.status(_(b"reading DAG from stdin\n"))
224 224 text = ui.fin.read()
225 225
226 226 cl = repo.changelog
227 227 if len(cl) > 0:
228 228 raise error.Abort(_(b'repository is not empty'))
229 229
230 230 # determine number of revs in DAG
231 231 total = 0
232 232 for type, data in dagparser.parsedag(text):
233 233 if type == b'n':
234 234 total += 1
235 235
236 236 if mergeable_file:
237 237 linesperrev = 2
238 238 # make a file with k lines per rev
239 239 initialmergedlines = [
240 240 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
241 241 ]
242 242 initialmergedlines.append(b"")
243 243
244 244 tags = []
245 245 progress = ui.makeprogress(
246 246 _(b'building'), unit=_(b'revisions'), total=total
247 247 )
248 248 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
249 249 at = -1
250 250 atbranch = b'default'
251 251 nodeids = []
252 252 id = 0
253 253 progress.update(id)
254 254 for type, data in dagparser.parsedag(text):
255 255 if type == b'n':
256 256 ui.note((b'node %s\n' % pycompat.bytestr(data)))
257 257 id, ps = data
258 258
259 259 files = []
260 260 filecontent = {}
261 261
262 262 p2 = None
263 263 if mergeable_file:
264 264 fn = b"mf"
265 265 p1 = repo[ps[0]]
266 266 if len(ps) > 1:
267 267 p2 = repo[ps[1]]
268 268 pa = p1.ancestor(p2)
269 269 base, local, other = [
270 270 x[fn].data() for x in (pa, p1, p2)
271 271 ]
272 272 m3 = simplemerge.Merge3Text(base, local, other)
273 273 ml = [l.strip() for l in m3.merge_lines()]
274 274 ml.append(b"")
275 275 elif at > 0:
276 276 ml = p1[fn].data().split(b"\n")
277 277 else:
278 278 ml = initialmergedlines
279 279 ml[id * linesperrev] += b" r%i" % id
280 280 mergedtext = b"\n".join(ml)
281 281 files.append(fn)
282 282 filecontent[fn] = mergedtext
283 283
284 284 if overwritten_file:
285 285 fn = b"of"
286 286 files.append(fn)
287 287 filecontent[fn] = b"r%i\n" % id
288 288
289 289 if new_file:
290 290 fn = b"nf%i" % id
291 291 files.append(fn)
292 292 filecontent[fn] = b"r%i\n" % id
293 293 if len(ps) > 1:
294 294 if not p2:
295 295 p2 = repo[ps[1]]
296 296 for fn in p2:
297 297 if fn.startswith(b"nf"):
298 298 files.append(fn)
299 299 filecontent[fn] = p2[fn].data()
300 300
301 301 def fctxfn(repo, cx, path):
302 302 if path in filecontent:
303 303 return context.memfilectx(
304 304 repo, cx, path, filecontent[path]
305 305 )
306 306 return None
307 307
308 308 if len(ps) == 0 or ps[0] < 0:
309 309 pars = [None, None]
310 310 elif len(ps) == 1:
311 311 pars = [nodeids[ps[0]], None]
312 312 else:
313 313 pars = [nodeids[p] for p in ps]
314 314 cx = context.memctx(
315 315 repo,
316 316 pars,
317 317 b"r%i" % id,
318 318 files,
319 319 fctxfn,
320 320 date=(id, 0),
321 321 user=b"debugbuilddag",
322 322 extra={b'branch': atbranch},
323 323 )
324 324 nodeid = repo.commitctx(cx)
325 325 nodeids.append(nodeid)
326 326 at = id
327 327 elif type == b'l':
328 328 id, name = data
329 329 ui.note((b'tag %s\n' % name))
330 330 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
331 331 elif type == b'a':
332 332 ui.note((b'branch %s\n' % data))
333 333 atbranch = data
334 334 progress.update(id)
335 335
336 336 if tags:
337 337 repo.vfs.write(b"localtags", b"".join(tags))
338 338
339 339
340 340 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
341 341 indent_string = b' ' * indent
342 342 if all:
343 343 ui.writenoi18n(
344 344 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
345 345 % indent_string
346 346 )
347 347
348 348 def showchunks(named):
349 349 ui.write(b"\n%s%s\n" % (indent_string, named))
350 350 for deltadata in gen.deltaiter():
351 351 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
352 352 ui.write(
353 353 b"%s%s %s %s %s %s %d\n"
354 354 % (
355 355 indent_string,
356 356 hex(node),
357 357 hex(p1),
358 358 hex(p2),
359 359 hex(cs),
360 360 hex(deltabase),
361 361 len(delta),
362 362 )
363 363 )
364 364
365 365 gen.changelogheader()
366 366 showchunks(b"changelog")
367 367 gen.manifestheader()
368 368 showchunks(b"manifest")
369 369 for chunkdata in iter(gen.filelogheader, {}):
370 370 fname = chunkdata[b'filename']
371 371 showchunks(fname)
372 372 else:
373 373 if isinstance(gen, bundle2.unbundle20):
374 374 raise error.Abort(_(b'use debugbundle2 for this file'))
375 375 gen.changelogheader()
376 376 for deltadata in gen.deltaiter():
377 377 node, p1, p2, cs, deltabase, delta, flags, sidedata = deltadata
378 378 ui.write(b"%s%s\n" % (indent_string, hex(node)))
379 379
380 380
381 381 def _debugobsmarkers(ui, part, indent=0, **opts):
382 382 """display version and markers contained in 'data'"""
383 383 opts = pycompat.byteskwargs(opts)
384 384 data = part.read()
385 385 indent_string = b' ' * indent
386 386 try:
387 387 version, markers = obsolete._readmarkers(data)
388 388 except error.UnknownVersion as exc:
389 389 msg = b"%sunsupported version: %s (%d bytes)\n"
390 390 msg %= indent_string, exc.version, len(data)
391 391 ui.write(msg)
392 392 else:
393 393 msg = b"%sversion: %d (%d bytes)\n"
394 394 msg %= indent_string, version, len(data)
395 395 ui.write(msg)
396 396 fm = ui.formatter(b'debugobsolete', opts)
397 397 for rawmarker in sorted(markers):
398 398 m = obsutil.marker(None, rawmarker)
399 399 fm.startitem()
400 400 fm.plain(indent_string)
401 401 cmdutil.showmarker(fm, m)
402 402 fm.end()
403 403
404 404
405 405 def _debugphaseheads(ui, data, indent=0):
406 406 """display version and markers contained in 'data'"""
407 407 indent_string = b' ' * indent
408 408 headsbyphase = phases.binarydecode(data)
409 409 for phase in phases.allphases:
410 410 for head in headsbyphase[phase]:
411 411 ui.write(indent_string)
412 412 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
413 413
414 414
415 415 def _quasirepr(thing):
416 416 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
417 417 return b'{%s}' % (
418 418 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
419 419 )
420 420 return pycompat.bytestr(repr(thing))
421 421
422 422
423 423 def _debugbundle2(ui, gen, all=None, **opts):
424 424 """lists the contents of a bundle2"""
425 425 if not isinstance(gen, bundle2.unbundle20):
426 426 raise error.Abort(_(b'not a bundle2 file'))
427 427 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
428 428 parttypes = opts.get('part_type', [])
429 429 for part in gen.iterparts():
430 430 if parttypes and part.type not in parttypes:
431 431 continue
432 432 msg = b'%s -- %s (mandatory: %r)\n'
433 433 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
434 434 if part.type == b'changegroup':
435 435 version = part.params.get(b'version', b'01')
436 436 cg = changegroup.getunbundler(version, part, b'UN')
437 437 if not ui.quiet:
438 438 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
439 439 if part.type == b'obsmarkers':
440 440 if not ui.quiet:
441 441 _debugobsmarkers(ui, part, indent=4, **opts)
442 442 if part.type == b'phase-heads':
443 443 if not ui.quiet:
444 444 _debugphaseheads(ui, part, indent=4)
445 445
446 446
447 447 @command(
448 448 b'debugbundle',
449 449 [
450 450 (b'a', b'all', None, _(b'show all details')),
451 451 (b'', b'part-type', [], _(b'show only the named part type')),
452 452 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
453 453 ],
454 454 _(b'FILE'),
455 455 norepo=True,
456 456 )
457 457 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
458 458 """lists the contents of a bundle"""
459 459 with hg.openpath(ui, bundlepath) as f:
460 460 if spec:
461 461 spec = exchange.getbundlespec(ui, f)
462 462 ui.write(b'%s\n' % spec)
463 463 return
464 464
465 465 gen = exchange.readbundle(ui, f, bundlepath)
466 466 if isinstance(gen, bundle2.unbundle20):
467 467 return _debugbundle2(ui, gen, all=all, **opts)
468 468 _debugchangegroup(ui, gen, all=all, **opts)
469 469
470 470
471 471 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
472 472 def debugcapabilities(ui, path, **opts):
473 473 """lists the capabilities of a remote peer"""
474 474 opts = pycompat.byteskwargs(opts)
475 475 peer = hg.peer(ui, opts, path)
476 476 try:
477 477 caps = peer.capabilities()
478 478 ui.writenoi18n(b'Main capabilities:\n')
479 479 for c in sorted(caps):
480 480 ui.write(b' %s\n' % c)
481 481 b2caps = bundle2.bundle2caps(peer)
482 482 if b2caps:
483 483 ui.writenoi18n(b'Bundle2 capabilities:\n')
484 484 for key, values in sorted(pycompat.iteritems(b2caps)):
485 485 ui.write(b' %s\n' % key)
486 486 for v in values:
487 487 ui.write(b' %s\n' % v)
488 488 finally:
489 489 peer.close()
490 490
491 491
492 492 @command(
493 493 b'debugchangedfiles',
494 494 [
495 495 (
496 496 b'',
497 497 b'compute',
498 498 False,
499 499 b"compute information instead of reading it from storage",
500 500 ),
501 501 ],
502 502 b'REV',
503 503 )
504 504 def debugchangedfiles(ui, repo, rev, **opts):
505 505 """list the stored files changes for a revision"""
506 506 ctx = scmutil.revsingle(repo, rev, None)
507 507 files = None
508 508
509 509 if opts['compute']:
510 510 files = metadata.compute_all_files_changes(ctx)
511 511 else:
512 512 sd = repo.changelog.sidedata(ctx.rev())
513 513 files_block = sd.get(sidedata.SD_FILES)
514 514 if files_block is not None:
515 515 files = metadata.decode_files_sidedata(sd)
516 516 if files is not None:
517 517 for f in sorted(files.touched):
518 518 if f in files.added:
519 519 action = b"added"
520 520 elif f in files.removed:
521 521 action = b"removed"
522 522 elif f in files.merged:
523 523 action = b"merged"
524 524 elif f in files.salvaged:
525 525 action = b"salvaged"
526 526 else:
527 527 action = b"touched"
528 528
529 529 copy_parent = b""
530 530 copy_source = b""
531 531 if f in files.copied_from_p1:
532 532 copy_parent = b"p1"
533 533 copy_source = files.copied_from_p1[f]
534 534 elif f in files.copied_from_p2:
535 535 copy_parent = b"p2"
536 536 copy_source = files.copied_from_p2[f]
537 537
538 538 data = (action, copy_parent, f, copy_source)
539 539 template = b"%-8s %2s: %s, %s;\n"
540 540 ui.write(template % data)
541 541
542 542
543 543 @command(b'debugcheckstate', [], b'')
544 544 def debugcheckstate(ui, repo):
545 545 """validate the correctness of the current dirstate"""
546 546 parent1, parent2 = repo.dirstate.parents()
547 547 m1 = repo[parent1].manifest()
548 548 m2 = repo[parent2].manifest()
549 549 errors = 0
550 550 for f in repo.dirstate:
551 551 state = repo.dirstate[f]
552 552 if state in b"nr" and f not in m1:
553 553 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
554 554 errors += 1
555 555 if state in b"a" and f in m1:
556 556 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
557 557 errors += 1
558 558 if state in b"m" and f not in m1 and f not in m2:
559 559 ui.warn(
560 560 _(b"%s in state %s, but not in either manifest\n") % (f, state)
561 561 )
562 562 errors += 1
563 563 for f in m1:
564 564 state = repo.dirstate[f]
565 565 if state not in b"nrm":
566 566 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
567 567 errors += 1
568 568 if errors:
569 569 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
570 570 raise error.Abort(errstr)
571 571
572 572
573 573 @command(
574 574 b'debugcolor',
575 575 [(b'', b'style', None, _(b'show all configured styles'))],
576 576 b'hg debugcolor',
577 577 )
578 578 def debugcolor(ui, repo, **opts):
579 579 """show available color, effects or style"""
580 580 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
581 581 if opts.get('style'):
582 582 return _debugdisplaystyle(ui)
583 583 else:
584 584 return _debugdisplaycolor(ui)
585 585
586 586
587 587 def _debugdisplaycolor(ui):
588 588 ui = ui.copy()
589 589 ui._styles.clear()
590 590 for effect in color._activeeffects(ui).keys():
591 591 ui._styles[effect] = effect
592 592 if ui._terminfoparams:
593 593 for k, v in ui.configitems(b'color'):
594 594 if k.startswith(b'color.'):
595 595 ui._styles[k] = k[6:]
596 596 elif k.startswith(b'terminfo.'):
597 597 ui._styles[k] = k[9:]
598 598 ui.write(_(b'available colors:\n'))
599 599 # sort label with a '_' after the other to group '_background' entry.
600 600 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
601 601 for colorname, label in items:
602 602 ui.write(b'%s\n' % colorname, label=label)
603 603
604 604
605 605 def _debugdisplaystyle(ui):
606 606 ui.write(_(b'available style:\n'))
607 607 if not ui._styles:
608 608 return
609 609 width = max(len(s) for s in ui._styles)
610 610 for label, effects in sorted(ui._styles.items()):
611 611 ui.write(b'%s' % label, label=label)
612 612 if effects:
613 613 # 50
614 614 ui.write(b': ')
615 615 ui.write(b' ' * (max(0, width - len(label))))
616 616 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
617 617 ui.write(b'\n')
618 618
619 619
620 620 @command(b'debugcreatestreamclonebundle', [], b'FILE')
621 621 def debugcreatestreamclonebundle(ui, repo, fname):
622 622 """create a stream clone bundle file
623 623
624 624 Stream bundles are special bundles that are essentially archives of
625 625 revlog files. They are commonly used for cloning very quickly.
626 626 """
627 627 # TODO we may want to turn this into an abort when this functionality
628 628 # is moved into `hg bundle`.
629 629 if phases.hassecret(repo):
630 630 ui.warn(
631 631 _(
632 632 b'(warning: stream clone bundle will contain secret '
633 633 b'revisions)\n'
634 634 )
635 635 )
636 636
637 637 requirements, gen = streamclone.generatebundlev1(repo)
638 638 changegroup.writechunks(ui, gen, fname)
639 639
640 640 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
641 641
642 642
643 643 @command(
644 644 b'debugdag',
645 645 [
646 646 (b't', b'tags', None, _(b'use tags as labels')),
647 647 (b'b', b'branches', None, _(b'annotate with branch names')),
648 648 (b'', b'dots', None, _(b'use dots for runs')),
649 649 (b's', b'spaces', None, _(b'separate elements by spaces')),
650 650 ],
651 651 _(b'[OPTION]... [FILE [REV]...]'),
652 652 optionalrepo=True,
653 653 )
654 654 def debugdag(ui, repo, file_=None, *revs, **opts):
655 655 """format the changelog or an index DAG as a concise textual description
656 656
657 657 If you pass a revlog index, the revlog's DAG is emitted. If you list
658 658 revision numbers, they get labeled in the output as rN.
659 659
660 660 Otherwise, the changelog DAG of the current repo is emitted.
661 661 """
662 662 spaces = opts.get('spaces')
663 663 dots = opts.get('dots')
664 664 if file_:
665 665 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
666 666 revs = {int(r) for r in revs}
667 667
668 668 def events():
669 669 for r in rlog:
670 670 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
671 671 if r in revs:
672 672 yield b'l', (r, b"r%i" % r)
673 673
674 674 elif repo:
675 675 cl = repo.changelog
676 676 tags = opts.get('tags')
677 677 branches = opts.get('branches')
678 678 if tags:
679 679 labels = {}
680 680 for l, n in repo.tags().items():
681 681 labels.setdefault(cl.rev(n), []).append(l)
682 682
683 683 def events():
684 684 b = b"default"
685 685 for r in cl:
686 686 if branches:
687 687 newb = cl.read(cl.node(r))[5][b'branch']
688 688 if newb != b:
689 689 yield b'a', newb
690 690 b = newb
691 691 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
692 692 if tags:
693 693 ls = labels.get(r)
694 694 if ls:
695 695 for l in ls:
696 696 yield b'l', (r, l)
697 697
698 698 else:
699 699 raise error.Abort(_(b'need repo for changelog dag'))
700 700
701 701 for line in dagparser.dagtextlines(
702 702 events(),
703 703 addspaces=spaces,
704 704 wraplabels=True,
705 705 wrapannotations=True,
706 706 wrapnonlinear=dots,
707 707 usedots=dots,
708 708 maxlinewidth=70,
709 709 ):
710 710 ui.write(line)
711 711 ui.write(b"\n")
712 712
713 713
714 714 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
715 715 def debugdata(ui, repo, file_, rev=None, **opts):
716 716 """dump the contents of a data file revision"""
717 717 opts = pycompat.byteskwargs(opts)
718 718 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
719 719 if rev is not None:
720 720 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
721 721 file_, rev = None, file_
722 722 elif rev is None:
723 723 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
724 724 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
725 725 try:
726 726 ui.write(r.rawdata(r.lookup(rev)))
727 727 except KeyError:
728 728 raise error.Abort(_(b'invalid revision identifier %s') % rev)
729 729
730 730
731 731 @command(
732 732 b'debugdate',
733 733 [(b'e', b'extended', None, _(b'try extended date formats'))],
734 734 _(b'[-e] DATE [RANGE]'),
735 735 norepo=True,
736 736 optionalrepo=True,
737 737 )
738 738 def debugdate(ui, date, range=None, **opts):
739 739 """parse and display a date"""
740 740 if opts["extended"]:
741 741 d = dateutil.parsedate(date, dateutil.extendeddateformats)
742 742 else:
743 743 d = dateutil.parsedate(date)
744 744 ui.writenoi18n(b"internal: %d %d\n" % d)
745 745 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
746 746 if range:
747 747 m = dateutil.matchdate(range)
748 748 ui.writenoi18n(b"match: %s\n" % m(d[0]))
749 749
750 750
751 751 @command(
752 752 b'debugdeltachain',
753 753 cmdutil.debugrevlogopts + cmdutil.formatteropts,
754 754 _(b'-c|-m|FILE'),
755 755 optionalrepo=True,
756 756 )
757 757 def debugdeltachain(ui, repo, file_=None, **opts):
758 758 """dump information about delta chains in a revlog
759 759
760 760 Output can be templatized. Available template keywords are:
761 761
762 762 :``rev``: revision number
763 763 :``chainid``: delta chain identifier (numbered by unique base)
764 764 :``chainlen``: delta chain length to this revision
765 765 :``prevrev``: previous revision in delta chain
766 766 :``deltatype``: role of delta / how it was computed
767 767 :``compsize``: compressed size of revision
768 768 :``uncompsize``: uncompressed size of revision
769 769 :``chainsize``: total size of compressed revisions in chain
770 770 :``chainratio``: total chain size divided by uncompressed revision size
771 771 (new delta chains typically start at ratio 2.00)
772 772 :``lindist``: linear distance from base revision in delta chain to end
773 773 of this revision
774 774 :``extradist``: total size of revisions not part of this delta chain from
775 775 base of delta chain to end of this revision; a measurement
776 776 of how much extra data we need to read/seek across to read
777 777 the delta chain for this revision
778 778 :``extraratio``: extradist divided by chainsize; another representation of
779 779 how much unrelated data is needed to load this delta chain
780 780
781 781 If the repository is configured to use the sparse read, additional keywords
782 782 are available:
783 783
784 784 :``readsize``: total size of data read from the disk for a revision
785 785 (sum of the sizes of all the blocks)
786 786 :``largestblock``: size of the largest block of data read from the disk
787 787 :``readdensity``: density of useful bytes in the data read from the disk
788 788 :``srchunks``: in how many data hunks the whole revision would be read
789 789
790 790 The sparse read can be enabled with experimental.sparse-read = True
791 791 """
792 792 opts = pycompat.byteskwargs(opts)
793 793 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
794 794 index = r.index
795 795 start = r.start
796 796 length = r.length
797 797 generaldelta = r._generaldelta
798 798 withsparseread = getattr(r, '_withsparseread', False)
799 799
800 800 def revinfo(rev):
801 801 e = index[rev]
802 802 compsize = e[1]
803 803 uncompsize = e[2]
804 804 chainsize = 0
805 805
806 806 if generaldelta:
807 807 if e[3] == e[5]:
808 808 deltatype = b'p1'
809 809 elif e[3] == e[6]:
810 810 deltatype = b'p2'
811 811 elif e[3] == rev - 1:
812 812 deltatype = b'prev'
813 813 elif e[3] == rev:
814 814 deltatype = b'base'
815 815 else:
816 816 deltatype = b'other'
817 817 else:
818 818 if e[3] == rev:
819 819 deltatype = b'base'
820 820 else:
821 821 deltatype = b'prev'
822 822
823 823 chain = r._deltachain(rev)[0]
824 824 for iterrev in chain:
825 825 e = index[iterrev]
826 826 chainsize += e[1]
827 827
828 828 return compsize, uncompsize, deltatype, chain, chainsize
829 829
830 830 fm = ui.formatter(b'debugdeltachain', opts)
831 831
832 832 fm.plain(
833 833 b' rev chain# chainlen prev delta '
834 834 b'size rawsize chainsize ratio lindist extradist '
835 835 b'extraratio'
836 836 )
837 837 if withsparseread:
838 838 fm.plain(b' readsize largestblk rddensity srchunks')
839 839 fm.plain(b'\n')
840 840
841 841 chainbases = {}
842 842 for rev in r:
843 843 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
844 844 chainbase = chain[0]
845 845 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
846 846 basestart = start(chainbase)
847 847 revstart = start(rev)
848 848 lineardist = revstart + comp - basestart
849 849 extradist = lineardist - chainsize
850 850 try:
851 851 prevrev = chain[-2]
852 852 except IndexError:
853 853 prevrev = -1
854 854
855 855 if uncomp != 0:
856 856 chainratio = float(chainsize) / float(uncomp)
857 857 else:
858 858 chainratio = chainsize
859 859
860 860 if chainsize != 0:
861 861 extraratio = float(extradist) / float(chainsize)
862 862 else:
863 863 extraratio = extradist
864 864
865 865 fm.startitem()
866 866 fm.write(
867 867 b'rev chainid chainlen prevrev deltatype compsize '
868 868 b'uncompsize chainsize chainratio lindist extradist '
869 869 b'extraratio',
870 870 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
871 871 rev,
872 872 chainid,
873 873 len(chain),
874 874 prevrev,
875 875 deltatype,
876 876 comp,
877 877 uncomp,
878 878 chainsize,
879 879 chainratio,
880 880 lineardist,
881 881 extradist,
882 882 extraratio,
883 883 rev=rev,
884 884 chainid=chainid,
885 885 chainlen=len(chain),
886 886 prevrev=prevrev,
887 887 deltatype=deltatype,
888 888 compsize=comp,
889 889 uncompsize=uncomp,
890 890 chainsize=chainsize,
891 891 chainratio=chainratio,
892 892 lindist=lineardist,
893 893 extradist=extradist,
894 894 extraratio=extraratio,
895 895 )
896 896 if withsparseread:
897 897 readsize = 0
898 898 largestblock = 0
899 899 srchunks = 0
900 900
901 901 for revschunk in deltautil.slicechunk(r, chain):
902 902 srchunks += 1
903 903 blkend = start(revschunk[-1]) + length(revschunk[-1])
904 904 blksize = blkend - start(revschunk[0])
905 905
906 906 readsize += blksize
907 907 if largestblock < blksize:
908 908 largestblock = blksize
909 909
910 910 if readsize:
911 911 readdensity = float(chainsize) / float(readsize)
912 912 else:
913 913 readdensity = 1
914 914
915 915 fm.write(
916 916 b'readsize largestblock readdensity srchunks',
917 917 b' %10d %10d %9.5f %8d',
918 918 readsize,
919 919 largestblock,
920 920 readdensity,
921 921 srchunks,
922 922 readsize=readsize,
923 923 largestblock=largestblock,
924 924 readdensity=readdensity,
925 925 srchunks=srchunks,
926 926 )
927 927
928 928 fm.plain(b'\n')
929 929
930 930 fm.end()
931 931
932 932
933 933 @command(
934 934 b'debugdirstate|debugstate',
935 935 [
936 936 (
937 937 b'',
938 938 b'nodates',
939 939 None,
940 940 _(b'do not display the saved mtime (DEPRECATED)'),
941 941 ),
942 942 (b'', b'dates', True, _(b'display the saved mtime')),
943 943 (b'', b'datesort', None, _(b'sort by saved mtime')),
944 944 (b'', b'dirs', False, _(b'display directories')),
945 945 ],
946 946 _(b'[OPTION]...'),
947 947 )
948 948 def debugstate(ui, repo, **opts):
949 949 """show the contents of the current dirstate"""
950 950
951 951 nodates = not opts['dates']
952 952 if opts.get('nodates') is not None:
953 953 nodates = True
954 954 datesort = opts.get('datesort')
955 955
956 956 if datesort:
957 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
957 keyfunc = lambda x: (
958 x[1].v1_mtime(),
959 x[0],
960 ) # sort by mtime, then by filename
958 961 else:
959 962 keyfunc = None # sort by filename
960 963 entries = list(pycompat.iteritems(repo.dirstate))
961 964 if opts['dirs']:
962 965 entries.extend(repo.dirstate.directories())
963 966 entries.sort(key=keyfunc)
964 967 for file_, ent in entries:
965 if ent[3] == -1:
968 if ent.v1_mtime() == -1:
966 969 timestr = b'unset '
967 970 elif nodates:
968 971 timestr = b'set '
969 972 else:
970 973 timestr = time.strftime(
971 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
974 "%Y-%m-%d %H:%M:%S ", time.localtime(ent.v1_mtime())
972 975 )
973 976 timestr = encoding.strtolocal(timestr)
974 if ent[1] & 0o20000:
977 if ent.mode & 0o20000:
975 978 mode = b'lnk'
976 979 else:
977 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
978 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
980 mode = b'%3o' % (ent.v1_mode() & 0o777 & ~util.umask)
981 ui.write(
982 b"%c %s %10d %s%s\n"
983 % (ent.v1_state(), mode, ent.v1_size(), timestr, file_)
984 )
979 985 for f in repo.dirstate.copies():
980 986 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
981 987
982 988
983 989 @command(
984 990 b'debugdiscovery',
985 991 [
986 992 (b'', b'old', None, _(b'use old-style discovery')),
987 993 (
988 994 b'',
989 995 b'nonheads',
990 996 None,
991 997 _(b'use old-style discovery with non-heads included'),
992 998 ),
993 999 (b'', b'rev', [], b'restrict discovery to this set of revs'),
994 1000 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
995 1001 (
996 1002 b'',
997 1003 b'local-as-revs',
998 1004 b"",
999 1005 b'treat local has having these revisions only',
1000 1006 ),
1001 1007 (
1002 1008 b'',
1003 1009 b'remote-as-revs',
1004 1010 b"",
1005 1011 b'use local as remote, with only these these revisions',
1006 1012 ),
1007 1013 ]
1008 1014 + cmdutil.remoteopts
1009 1015 + cmdutil.formatteropts,
1010 1016 _(b'[--rev REV] [OTHER]'),
1011 1017 )
1012 1018 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
1013 1019 """runs the changeset discovery protocol in isolation
1014 1020
1015 1021 The local peer can be "replaced" by a subset of the local repository by
1016 1022 using the `--local-as-revs` flag. Int he same way, usual `remote` peer can
1017 1023 be "replaced" by a subset of the local repository using the
1018 1024 `--local-as-revs` flag. This is useful to efficiently debug pathological
1019 1025 discovery situation.
1020 1026
1021 1027 The following developer oriented config are relevant for people playing with this command:
1022 1028
1023 1029 * devel.discovery.exchange-heads=True
1024 1030
1025 1031 If False, the discovery will not start with
1026 1032 remote head fetching and local head querying.
1027 1033
1028 1034 * devel.discovery.grow-sample=True
1029 1035
1030 1036 If False, the sample size used in set discovery will not be increased
1031 1037 through the process
1032 1038
1033 1039 * devel.discovery.grow-sample.dynamic=True
1034 1040
1035 1041 When discovery.grow-sample.dynamic is True, the default, the sample size is
1036 1042 adapted to the shape of the undecided set (it is set to the max of:
1037 1043 <target-size>, len(roots(undecided)), len(heads(undecided)
1038 1044
1039 1045 * devel.discovery.grow-sample.rate=1.05
1040 1046
1041 1047 the rate at which the sample grow
1042 1048
1043 1049 * devel.discovery.randomize=True
1044 1050
1045 1051 If andom sampling during discovery are deterministic. It is meant for
1046 1052 integration tests.
1047 1053
1048 1054 * devel.discovery.sample-size=200
1049 1055
1050 1056 Control the initial size of the discovery sample
1051 1057
1052 1058 * devel.discovery.sample-size.initial=100
1053 1059
1054 1060 Control the initial size of the discovery for initial change
1055 1061 """
1056 1062 opts = pycompat.byteskwargs(opts)
1057 1063 unfi = repo.unfiltered()
1058 1064
1059 1065 # setup potential extra filtering
1060 1066 local_revs = opts[b"local_as_revs"]
1061 1067 remote_revs = opts[b"remote_as_revs"]
1062 1068
1063 1069 # make sure tests are repeatable
1064 1070 random.seed(int(opts[b'seed']))
1065 1071
1066 1072 if not remote_revs:
1067 1073
1068 1074 remoteurl, branches = urlutil.get_unique_pull_path(
1069 1075 b'debugdiscovery', repo, ui, remoteurl
1070 1076 )
1071 1077 remote = hg.peer(repo, opts, remoteurl)
1072 1078 ui.status(_(b'comparing with %s\n') % urlutil.hidepassword(remoteurl))
1073 1079 else:
1074 1080 branches = (None, [])
1075 1081 remote_filtered_revs = scmutil.revrange(
1076 1082 unfi, [b"not (::(%s))" % remote_revs]
1077 1083 )
1078 1084 remote_filtered_revs = frozenset(remote_filtered_revs)
1079 1085
1080 1086 def remote_func(x):
1081 1087 return remote_filtered_revs
1082 1088
1083 1089 repoview.filtertable[b'debug-discovery-remote-filter'] = remote_func
1084 1090
1085 1091 remote = repo.peer()
1086 1092 remote._repo = remote._repo.filtered(b'debug-discovery-remote-filter')
1087 1093
1088 1094 if local_revs:
1089 1095 local_filtered_revs = scmutil.revrange(
1090 1096 unfi, [b"not (::(%s))" % local_revs]
1091 1097 )
1092 1098 local_filtered_revs = frozenset(local_filtered_revs)
1093 1099
1094 1100 def local_func(x):
1095 1101 return local_filtered_revs
1096 1102
1097 1103 repoview.filtertable[b'debug-discovery-local-filter'] = local_func
1098 1104 repo = repo.filtered(b'debug-discovery-local-filter')
1099 1105
1100 1106 data = {}
1101 1107 if opts.get(b'old'):
1102 1108
1103 1109 def doit(pushedrevs, remoteheads, remote=remote):
1104 1110 if not util.safehasattr(remote, b'branches'):
1105 1111 # enable in-client legacy support
1106 1112 remote = localrepo.locallegacypeer(remote.local())
1107 1113 common, _in, hds = treediscovery.findcommonincoming(
1108 1114 repo, remote, force=True, audit=data
1109 1115 )
1110 1116 common = set(common)
1111 1117 if not opts.get(b'nonheads'):
1112 1118 ui.writenoi18n(
1113 1119 b"unpruned common: %s\n"
1114 1120 % b" ".join(sorted(short(n) for n in common))
1115 1121 )
1116 1122
1117 1123 clnode = repo.changelog.node
1118 1124 common = repo.revs(b'heads(::%ln)', common)
1119 1125 common = {clnode(r) for r in common}
1120 1126 return common, hds
1121 1127
1122 1128 else:
1123 1129
1124 1130 def doit(pushedrevs, remoteheads, remote=remote):
1125 1131 nodes = None
1126 1132 if pushedrevs:
1127 1133 revs = scmutil.revrange(repo, pushedrevs)
1128 1134 nodes = [repo[r].node() for r in revs]
1129 1135 common, any, hds = setdiscovery.findcommonheads(
1130 1136 ui, repo, remote, ancestorsof=nodes, audit=data
1131 1137 )
1132 1138 return common, hds
1133 1139
1134 1140 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
1135 1141 localrevs = opts[b'rev']
1136 1142
1137 1143 fm = ui.formatter(b'debugdiscovery', opts)
1138 1144 if fm.strict_format:
1139 1145
1140 1146 @contextlib.contextmanager
1141 1147 def may_capture_output():
1142 1148 ui.pushbuffer()
1143 1149 yield
1144 1150 data[b'output'] = ui.popbuffer()
1145 1151
1146 1152 else:
1147 1153 may_capture_output = util.nullcontextmanager
1148 1154 with may_capture_output():
1149 1155 with util.timedcm('debug-discovery') as t:
1150 1156 common, hds = doit(localrevs, remoterevs)
1151 1157
1152 1158 # compute all statistics
1153 1159 heads_common = set(common)
1154 1160 heads_remote = set(hds)
1155 1161 heads_local = set(repo.heads())
1156 1162 # note: they cannot be a local or remote head that is in common and not
1157 1163 # itself a head of common.
1158 1164 heads_common_local = heads_common & heads_local
1159 1165 heads_common_remote = heads_common & heads_remote
1160 1166 heads_common_both = heads_common & heads_remote & heads_local
1161 1167
1162 1168 all = repo.revs(b'all()')
1163 1169 common = repo.revs(b'::%ln', common)
1164 1170 roots_common = repo.revs(b'roots(::%ld)', common)
1165 1171 missing = repo.revs(b'not ::%ld', common)
1166 1172 heads_missing = repo.revs(b'heads(%ld)', missing)
1167 1173 roots_missing = repo.revs(b'roots(%ld)', missing)
1168 1174 assert len(common) + len(missing) == len(all)
1169 1175
1170 1176 initial_undecided = repo.revs(
1171 1177 b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
1172 1178 )
1173 1179 heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
1174 1180 roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
1175 1181 common_initial_undecided = initial_undecided & common
1176 1182 missing_initial_undecided = initial_undecided & missing
1177 1183
1178 1184 data[b'elapsed'] = t.elapsed
1179 1185 data[b'nb-common-heads'] = len(heads_common)
1180 1186 data[b'nb-common-heads-local'] = len(heads_common_local)
1181 1187 data[b'nb-common-heads-remote'] = len(heads_common_remote)
1182 1188 data[b'nb-common-heads-both'] = len(heads_common_both)
1183 1189 data[b'nb-common-roots'] = len(roots_common)
1184 1190 data[b'nb-head-local'] = len(heads_local)
1185 1191 data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
1186 1192 data[b'nb-head-remote'] = len(heads_remote)
1187 1193 data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
1188 1194 heads_common_remote
1189 1195 )
1190 1196 data[b'nb-revs'] = len(all)
1191 1197 data[b'nb-revs-common'] = len(common)
1192 1198 data[b'nb-revs-missing'] = len(missing)
1193 1199 data[b'nb-missing-heads'] = len(heads_missing)
1194 1200 data[b'nb-missing-roots'] = len(roots_missing)
1195 1201 data[b'nb-ini_und'] = len(initial_undecided)
1196 1202 data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
1197 1203 data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
1198 1204 data[b'nb-ini_und-common'] = len(common_initial_undecided)
1199 1205 data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
1200 1206
1201 1207 fm.startitem()
1202 1208 fm.data(**pycompat.strkwargs(data))
1203 1209 # display discovery summary
1204 1210 fm.plain(b"elapsed time: %(elapsed)f seconds\n" % data)
1205 1211 fm.plain(b"round-trips: %(total-roundtrips)9d\n" % data)
1206 1212 fm.plain(b"heads summary:\n")
1207 1213 fm.plain(b" total common heads: %(nb-common-heads)9d\n" % data)
1208 1214 fm.plain(b" also local heads: %(nb-common-heads-local)9d\n" % data)
1209 1215 fm.plain(b" also remote heads: %(nb-common-heads-remote)9d\n" % data)
1210 1216 fm.plain(b" both: %(nb-common-heads-both)9d\n" % data)
1211 1217 fm.plain(b" local heads: %(nb-head-local)9d\n" % data)
1212 1218 fm.plain(b" common: %(nb-common-heads-local)9d\n" % data)
1213 1219 fm.plain(b" missing: %(nb-head-local-missing)9d\n" % data)
1214 1220 fm.plain(b" remote heads: %(nb-head-remote)9d\n" % data)
1215 1221 fm.plain(b" common: %(nb-common-heads-remote)9d\n" % data)
1216 1222 fm.plain(b" unknown: %(nb-head-remote-unknown)9d\n" % data)
1217 1223 fm.plain(b"local changesets: %(nb-revs)9d\n" % data)
1218 1224 fm.plain(b" common: %(nb-revs-common)9d\n" % data)
1219 1225 fm.plain(b" heads: %(nb-common-heads)9d\n" % data)
1220 1226 fm.plain(b" roots: %(nb-common-roots)9d\n" % data)
1221 1227 fm.plain(b" missing: %(nb-revs-missing)9d\n" % data)
1222 1228 fm.plain(b" heads: %(nb-missing-heads)9d\n" % data)
1223 1229 fm.plain(b" roots: %(nb-missing-roots)9d\n" % data)
1224 1230 fm.plain(b" first undecided set: %(nb-ini_und)9d\n" % data)
1225 1231 fm.plain(b" heads: %(nb-ini_und-heads)9d\n" % data)
1226 1232 fm.plain(b" roots: %(nb-ini_und-roots)9d\n" % data)
1227 1233 fm.plain(b" common: %(nb-ini_und-common)9d\n" % data)
1228 1234 fm.plain(b" missing: %(nb-ini_und-missing)9d\n" % data)
1229 1235
1230 1236 if ui.verbose:
1231 1237 fm.plain(
1232 1238 b"common heads: %s\n"
1233 1239 % b" ".join(sorted(short(n) for n in heads_common))
1234 1240 )
1235 1241 fm.end()
1236 1242
1237 1243
1238 1244 _chunksize = 4 << 10
1239 1245
1240 1246
1241 1247 @command(
1242 1248 b'debugdownload',
1243 1249 [
1244 1250 (b'o', b'output', b'', _(b'path')),
1245 1251 ],
1246 1252 optionalrepo=True,
1247 1253 )
1248 1254 def debugdownload(ui, repo, url, output=None, **opts):
1249 1255 """download a resource using Mercurial logic and config"""
1250 1256 fh = urlmod.open(ui, url, output)
1251 1257
1252 1258 dest = ui
1253 1259 if output:
1254 1260 dest = open(output, b"wb", _chunksize)
1255 1261 try:
1256 1262 data = fh.read(_chunksize)
1257 1263 while data:
1258 1264 dest.write(data)
1259 1265 data = fh.read(_chunksize)
1260 1266 finally:
1261 1267 if output:
1262 1268 dest.close()
1263 1269
1264 1270
1265 1271 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1266 1272 def debugextensions(ui, repo, **opts):
1267 1273 '''show information about active extensions'''
1268 1274 opts = pycompat.byteskwargs(opts)
1269 1275 exts = extensions.extensions(ui)
1270 1276 hgver = util.version()
1271 1277 fm = ui.formatter(b'debugextensions', opts)
1272 1278 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1273 1279 isinternal = extensions.ismoduleinternal(extmod)
1274 1280 extsource = None
1275 1281
1276 1282 if util.safehasattr(extmod, '__file__'):
1277 1283 extsource = pycompat.fsencode(extmod.__file__)
1278 1284 elif getattr(sys, 'oxidized', False):
1279 1285 extsource = pycompat.sysexecutable
1280 1286 if isinternal:
1281 1287 exttestedwith = [] # never expose magic string to users
1282 1288 else:
1283 1289 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1284 1290 extbuglink = getattr(extmod, 'buglink', None)
1285 1291
1286 1292 fm.startitem()
1287 1293
1288 1294 if ui.quiet or ui.verbose:
1289 1295 fm.write(b'name', b'%s\n', extname)
1290 1296 else:
1291 1297 fm.write(b'name', b'%s', extname)
1292 1298 if isinternal or hgver in exttestedwith:
1293 1299 fm.plain(b'\n')
1294 1300 elif not exttestedwith:
1295 1301 fm.plain(_(b' (untested!)\n'))
1296 1302 else:
1297 1303 lasttestedversion = exttestedwith[-1]
1298 1304 fm.plain(b' (%s!)\n' % lasttestedversion)
1299 1305
1300 1306 fm.condwrite(
1301 1307 ui.verbose and extsource,
1302 1308 b'source',
1303 1309 _(b' location: %s\n'),
1304 1310 extsource or b"",
1305 1311 )
1306 1312
1307 1313 if ui.verbose:
1308 1314 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1309 1315 fm.data(bundled=isinternal)
1310 1316
1311 1317 fm.condwrite(
1312 1318 ui.verbose and exttestedwith,
1313 1319 b'testedwith',
1314 1320 _(b' tested with: %s\n'),
1315 1321 fm.formatlist(exttestedwith, name=b'ver'),
1316 1322 )
1317 1323
1318 1324 fm.condwrite(
1319 1325 ui.verbose and extbuglink,
1320 1326 b'buglink',
1321 1327 _(b' bug reporting: %s\n'),
1322 1328 extbuglink or b"",
1323 1329 )
1324 1330
1325 1331 fm.end()
1326 1332
1327 1333
1328 1334 @command(
1329 1335 b'debugfileset',
1330 1336 [
1331 1337 (
1332 1338 b'r',
1333 1339 b'rev',
1334 1340 b'',
1335 1341 _(b'apply the filespec on this revision'),
1336 1342 _(b'REV'),
1337 1343 ),
1338 1344 (
1339 1345 b'',
1340 1346 b'all-files',
1341 1347 False,
1342 1348 _(b'test files from all revisions and working directory'),
1343 1349 ),
1344 1350 (
1345 1351 b's',
1346 1352 b'show-matcher',
1347 1353 None,
1348 1354 _(b'print internal representation of matcher'),
1349 1355 ),
1350 1356 (
1351 1357 b'p',
1352 1358 b'show-stage',
1353 1359 [],
1354 1360 _(b'print parsed tree at the given stage'),
1355 1361 _(b'NAME'),
1356 1362 ),
1357 1363 ],
1358 1364 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1359 1365 )
1360 1366 def debugfileset(ui, repo, expr, **opts):
1361 1367 '''parse and apply a fileset specification'''
1362 1368 from . import fileset
1363 1369
1364 1370 fileset.symbols # force import of fileset so we have predicates to optimize
1365 1371 opts = pycompat.byteskwargs(opts)
1366 1372 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1367 1373
1368 1374 stages = [
1369 1375 (b'parsed', pycompat.identity),
1370 1376 (b'analyzed', filesetlang.analyze),
1371 1377 (b'optimized', filesetlang.optimize),
1372 1378 ]
1373 1379 stagenames = {n for n, f in stages}
1374 1380
1375 1381 showalways = set()
1376 1382 if ui.verbose and not opts[b'show_stage']:
1377 1383 # show parsed tree by --verbose (deprecated)
1378 1384 showalways.add(b'parsed')
1379 1385 if opts[b'show_stage'] == [b'all']:
1380 1386 showalways.update(stagenames)
1381 1387 else:
1382 1388 for n in opts[b'show_stage']:
1383 1389 if n not in stagenames:
1384 1390 raise error.Abort(_(b'invalid stage name: %s') % n)
1385 1391 showalways.update(opts[b'show_stage'])
1386 1392
1387 1393 tree = filesetlang.parse(expr)
1388 1394 for n, f in stages:
1389 1395 tree = f(tree)
1390 1396 if n in showalways:
1391 1397 if opts[b'show_stage'] or n != b'parsed':
1392 1398 ui.write(b"* %s:\n" % n)
1393 1399 ui.write(filesetlang.prettyformat(tree), b"\n")
1394 1400
1395 1401 files = set()
1396 1402 if opts[b'all_files']:
1397 1403 for r in repo:
1398 1404 c = repo[r]
1399 1405 files.update(c.files())
1400 1406 files.update(c.substate)
1401 1407 if opts[b'all_files'] or ctx.rev() is None:
1402 1408 wctx = repo[None]
1403 1409 files.update(
1404 1410 repo.dirstate.walk(
1405 1411 scmutil.matchall(repo),
1406 1412 subrepos=list(wctx.substate),
1407 1413 unknown=True,
1408 1414 ignored=True,
1409 1415 )
1410 1416 )
1411 1417 files.update(wctx.substate)
1412 1418 else:
1413 1419 files.update(ctx.files())
1414 1420 files.update(ctx.substate)
1415 1421
1416 1422 m = ctx.matchfileset(repo.getcwd(), expr)
1417 1423 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1418 1424 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1419 1425 for f in sorted(files):
1420 1426 if not m(f):
1421 1427 continue
1422 1428 ui.write(b"%s\n" % f)
1423 1429
1424 1430
1425 1431 @command(b'debugformat', [] + cmdutil.formatteropts)
1426 1432 def debugformat(ui, repo, **opts):
1427 1433 """display format information about the current repository
1428 1434
1429 1435 Use --verbose to get extra information about current config value and
1430 1436 Mercurial default."""
1431 1437 opts = pycompat.byteskwargs(opts)
1432 1438 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1433 1439 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1434 1440
1435 1441 def makeformatname(name):
1436 1442 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1437 1443
1438 1444 fm = ui.formatter(b'debugformat', opts)
1439 1445 if fm.isplain():
1440 1446
1441 1447 def formatvalue(value):
1442 1448 if util.safehasattr(value, b'startswith'):
1443 1449 return value
1444 1450 if value:
1445 1451 return b'yes'
1446 1452 else:
1447 1453 return b'no'
1448 1454
1449 1455 else:
1450 1456 formatvalue = pycompat.identity
1451 1457
1452 1458 fm.plain(b'format-variant')
1453 1459 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1454 1460 fm.plain(b' repo')
1455 1461 if ui.verbose:
1456 1462 fm.plain(b' config default')
1457 1463 fm.plain(b'\n')
1458 1464 for fv in upgrade.allformatvariant:
1459 1465 fm.startitem()
1460 1466 repovalue = fv.fromrepo(repo)
1461 1467 configvalue = fv.fromconfig(repo)
1462 1468
1463 1469 if repovalue != configvalue:
1464 1470 namelabel = b'formatvariant.name.mismatchconfig'
1465 1471 repolabel = b'formatvariant.repo.mismatchconfig'
1466 1472 elif repovalue != fv.default:
1467 1473 namelabel = b'formatvariant.name.mismatchdefault'
1468 1474 repolabel = b'formatvariant.repo.mismatchdefault'
1469 1475 else:
1470 1476 namelabel = b'formatvariant.name.uptodate'
1471 1477 repolabel = b'formatvariant.repo.uptodate'
1472 1478
1473 1479 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1474 1480 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1475 1481 if fv.default != configvalue:
1476 1482 configlabel = b'formatvariant.config.special'
1477 1483 else:
1478 1484 configlabel = b'formatvariant.config.default'
1479 1485 fm.condwrite(
1480 1486 ui.verbose,
1481 1487 b'config',
1482 1488 b' %6s',
1483 1489 formatvalue(configvalue),
1484 1490 label=configlabel,
1485 1491 )
1486 1492 fm.condwrite(
1487 1493 ui.verbose,
1488 1494 b'default',
1489 1495 b' %7s',
1490 1496 formatvalue(fv.default),
1491 1497 label=b'formatvariant.default',
1492 1498 )
1493 1499 fm.plain(b'\n')
1494 1500 fm.end()
1495 1501
1496 1502
1497 1503 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1498 1504 def debugfsinfo(ui, path=b"."):
1499 1505 """show information detected about current filesystem"""
1500 1506 ui.writenoi18n(b'path: %s\n' % path)
1501 1507 ui.writenoi18n(
1502 1508 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1503 1509 )
1504 1510 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1505 1511 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1506 1512 ui.writenoi18n(
1507 1513 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1508 1514 )
1509 1515 ui.writenoi18n(
1510 1516 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1511 1517 )
1512 1518 casesensitive = b'(unknown)'
1513 1519 try:
1514 1520 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1515 1521 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1516 1522 except OSError:
1517 1523 pass
1518 1524 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1519 1525
1520 1526
1521 1527 @command(
1522 1528 b'debuggetbundle',
1523 1529 [
1524 1530 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1525 1531 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1526 1532 (
1527 1533 b't',
1528 1534 b'type',
1529 1535 b'bzip2',
1530 1536 _(b'bundle compression type to use'),
1531 1537 _(b'TYPE'),
1532 1538 ),
1533 1539 ],
1534 1540 _(b'REPO FILE [-H|-C ID]...'),
1535 1541 norepo=True,
1536 1542 )
1537 1543 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1538 1544 """retrieves a bundle from a repo
1539 1545
1540 1546 Every ID must be a full-length hex node id string. Saves the bundle to the
1541 1547 given file.
1542 1548 """
1543 1549 opts = pycompat.byteskwargs(opts)
1544 1550 repo = hg.peer(ui, opts, repopath)
1545 1551 if not repo.capable(b'getbundle'):
1546 1552 raise error.Abort(b"getbundle() not supported by target repository")
1547 1553 args = {}
1548 1554 if common:
1549 1555 args['common'] = [bin(s) for s in common]
1550 1556 if head:
1551 1557 args['heads'] = [bin(s) for s in head]
1552 1558 # TODO: get desired bundlecaps from command line.
1553 1559 args['bundlecaps'] = None
1554 1560 bundle = repo.getbundle(b'debug', **args)
1555 1561
1556 1562 bundletype = opts.get(b'type', b'bzip2').lower()
1557 1563 btypes = {
1558 1564 b'none': b'HG10UN',
1559 1565 b'bzip2': b'HG10BZ',
1560 1566 b'gzip': b'HG10GZ',
1561 1567 b'bundle2': b'HG20',
1562 1568 }
1563 1569 bundletype = btypes.get(bundletype)
1564 1570 if bundletype not in bundle2.bundletypes:
1565 1571 raise error.Abort(_(b'unknown bundle type specified with --type'))
1566 1572 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1567 1573
1568 1574
1569 1575 @command(b'debugignore', [], b'[FILE]')
1570 1576 def debugignore(ui, repo, *files, **opts):
1571 1577 """display the combined ignore pattern and information about ignored files
1572 1578
1573 1579 With no argument display the combined ignore pattern.
1574 1580
1575 1581 Given space separated file names, shows if the given file is ignored and
1576 1582 if so, show the ignore rule (file and line number) that matched it.
1577 1583 """
1578 1584 ignore = repo.dirstate._ignore
1579 1585 if not files:
1580 1586 # Show all the patterns
1581 1587 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1582 1588 else:
1583 1589 m = scmutil.match(repo[None], pats=files)
1584 1590 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1585 1591 for f in m.files():
1586 1592 nf = util.normpath(f)
1587 1593 ignored = None
1588 1594 ignoredata = None
1589 1595 if nf != b'.':
1590 1596 if ignore(nf):
1591 1597 ignored = nf
1592 1598 ignoredata = repo.dirstate._ignorefileandline(nf)
1593 1599 else:
1594 1600 for p in pathutil.finddirs(nf):
1595 1601 if ignore(p):
1596 1602 ignored = p
1597 1603 ignoredata = repo.dirstate._ignorefileandline(p)
1598 1604 break
1599 1605 if ignored:
1600 1606 if ignored == nf:
1601 1607 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1602 1608 else:
1603 1609 ui.write(
1604 1610 _(
1605 1611 b"%s is ignored because of "
1606 1612 b"containing directory %s\n"
1607 1613 )
1608 1614 % (uipathfn(f), ignored)
1609 1615 )
1610 1616 ignorefile, lineno, line = ignoredata
1611 1617 ui.write(
1612 1618 _(b"(ignore rule in %s, line %d: '%s')\n")
1613 1619 % (ignorefile, lineno, line)
1614 1620 )
1615 1621 else:
1616 1622 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1617 1623
1618 1624
1619 1625 @command(
1620 1626 b'debugindex',
1621 1627 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1622 1628 _(b'-c|-m|FILE'),
1623 1629 )
1624 1630 def debugindex(ui, repo, file_=None, **opts):
1625 1631 """dump index data for a storage primitive"""
1626 1632 opts = pycompat.byteskwargs(opts)
1627 1633 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1628 1634
1629 1635 if ui.debugflag:
1630 1636 shortfn = hex
1631 1637 else:
1632 1638 shortfn = short
1633 1639
1634 1640 idlen = 12
1635 1641 for i in store:
1636 1642 idlen = len(shortfn(store.node(i)))
1637 1643 break
1638 1644
1639 1645 fm = ui.formatter(b'debugindex', opts)
1640 1646 fm.plain(
1641 1647 b' rev linkrev %s %s p2\n'
1642 1648 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1643 1649 )
1644 1650
1645 1651 for rev in store:
1646 1652 node = store.node(rev)
1647 1653 parents = store.parents(node)
1648 1654
1649 1655 fm.startitem()
1650 1656 fm.write(b'rev', b'%6d ', rev)
1651 1657 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1652 1658 fm.write(b'node', b'%s ', shortfn(node))
1653 1659 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1654 1660 fm.write(b'p2', b'%s', shortfn(parents[1]))
1655 1661 fm.plain(b'\n')
1656 1662
1657 1663 fm.end()
1658 1664
1659 1665
1660 1666 @command(
1661 1667 b'debugindexdot',
1662 1668 cmdutil.debugrevlogopts,
1663 1669 _(b'-c|-m|FILE'),
1664 1670 optionalrepo=True,
1665 1671 )
1666 1672 def debugindexdot(ui, repo, file_=None, **opts):
1667 1673 """dump an index DAG as a graphviz dot file"""
1668 1674 opts = pycompat.byteskwargs(opts)
1669 1675 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1670 1676 ui.writenoi18n(b"digraph G {\n")
1671 1677 for i in r:
1672 1678 node = r.node(i)
1673 1679 pp = r.parents(node)
1674 1680 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1675 1681 if pp[1] != repo.nullid:
1676 1682 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1677 1683 ui.write(b"}\n")
1678 1684
1679 1685
1680 1686 @command(b'debugindexstats', [])
1681 1687 def debugindexstats(ui, repo):
1682 1688 """show stats related to the changelog index"""
1683 1689 repo.changelog.shortest(repo.nullid, 1)
1684 1690 index = repo.changelog.index
1685 1691 if not util.safehasattr(index, b'stats'):
1686 1692 raise error.Abort(_(b'debugindexstats only works with native code'))
1687 1693 for k, v in sorted(index.stats().items()):
1688 1694 ui.write(b'%s: %d\n' % (k, v))
1689 1695
1690 1696
1691 1697 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1692 1698 def debuginstall(ui, **opts):
1693 1699 """test Mercurial installation
1694 1700
1695 1701 Returns 0 on success.
1696 1702 """
1697 1703 opts = pycompat.byteskwargs(opts)
1698 1704
1699 1705 problems = 0
1700 1706
1701 1707 fm = ui.formatter(b'debuginstall', opts)
1702 1708 fm.startitem()
1703 1709
1704 1710 # encoding might be unknown or wrong. don't translate these messages.
1705 1711 fm.write(b'encoding', b"checking encoding (%s)...\n", encoding.encoding)
1706 1712 err = None
1707 1713 try:
1708 1714 codecs.lookup(pycompat.sysstr(encoding.encoding))
1709 1715 except LookupError as inst:
1710 1716 err = stringutil.forcebytestr(inst)
1711 1717 problems += 1
1712 1718 fm.condwrite(
1713 1719 err,
1714 1720 b'encodingerror',
1715 1721 b" %s\n (check that your locale is properly set)\n",
1716 1722 err,
1717 1723 )
1718 1724
1719 1725 # Python
1720 1726 pythonlib = None
1721 1727 if util.safehasattr(os, '__file__'):
1722 1728 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1723 1729 elif getattr(sys, 'oxidized', False):
1724 1730 pythonlib = pycompat.sysexecutable
1725 1731
1726 1732 fm.write(
1727 1733 b'pythonexe',
1728 1734 _(b"checking Python executable (%s)\n"),
1729 1735 pycompat.sysexecutable or _(b"unknown"),
1730 1736 )
1731 1737 fm.write(
1732 1738 b'pythonimplementation',
1733 1739 _(b"checking Python implementation (%s)\n"),
1734 1740 pycompat.sysbytes(platform.python_implementation()),
1735 1741 )
1736 1742 fm.write(
1737 1743 b'pythonver',
1738 1744 _(b"checking Python version (%s)\n"),
1739 1745 (b"%d.%d.%d" % sys.version_info[:3]),
1740 1746 )
1741 1747 fm.write(
1742 1748 b'pythonlib',
1743 1749 _(b"checking Python lib (%s)...\n"),
1744 1750 pythonlib or _(b"unknown"),
1745 1751 )
1746 1752
1747 1753 try:
1748 1754 from . import rustext # pytype: disable=import-error
1749 1755
1750 1756 rustext.__doc__ # trigger lazy import
1751 1757 except ImportError:
1752 1758 rustext = None
1753 1759
1754 1760 security = set(sslutil.supportedprotocols)
1755 1761 if sslutil.hassni:
1756 1762 security.add(b'sni')
1757 1763
1758 1764 fm.write(
1759 1765 b'pythonsecurity',
1760 1766 _(b"checking Python security support (%s)\n"),
1761 1767 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1762 1768 )
1763 1769
1764 1770 # These are warnings, not errors. So don't increment problem count. This
1765 1771 # may change in the future.
1766 1772 if b'tls1.2' not in security:
1767 1773 fm.plain(
1768 1774 _(
1769 1775 b' TLS 1.2 not supported by Python install; '
1770 1776 b'network connections lack modern security\n'
1771 1777 )
1772 1778 )
1773 1779 if b'sni' not in security:
1774 1780 fm.plain(
1775 1781 _(
1776 1782 b' SNI not supported by Python install; may have '
1777 1783 b'connectivity issues with some servers\n'
1778 1784 )
1779 1785 )
1780 1786
1781 1787 fm.plain(
1782 1788 _(
1783 1789 b"checking Rust extensions (%s)\n"
1784 1790 % (b'missing' if rustext is None else b'installed')
1785 1791 ),
1786 1792 )
1787 1793
1788 1794 # TODO print CA cert info
1789 1795
1790 1796 # hg version
1791 1797 hgver = util.version()
1792 1798 fm.write(
1793 1799 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1794 1800 )
1795 1801 fm.write(
1796 1802 b'hgverextra',
1797 1803 _(b"checking Mercurial custom build (%s)\n"),
1798 1804 b'+'.join(hgver.split(b'+')[1:]),
1799 1805 )
1800 1806
1801 1807 # compiled modules
1802 1808 hgmodules = None
1803 1809 if util.safehasattr(sys.modules[__name__], '__file__'):
1804 1810 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1805 1811 elif getattr(sys, 'oxidized', False):
1806 1812 hgmodules = pycompat.sysexecutable
1807 1813
1808 1814 fm.write(
1809 1815 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1810 1816 )
1811 1817 fm.write(
1812 1818 b'hgmodules',
1813 1819 _(b"checking installed modules (%s)...\n"),
1814 1820 hgmodules or _(b"unknown"),
1815 1821 )
1816 1822
1817 1823 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1818 1824 rustext = rustandc # for now, that's the only case
1819 1825 cext = policy.policy in (b'c', b'allow') or rustandc
1820 1826 nopure = cext or rustext
1821 1827 if nopure:
1822 1828 err = None
1823 1829 try:
1824 1830 if cext:
1825 1831 from .cext import ( # pytype: disable=import-error
1826 1832 base85,
1827 1833 bdiff,
1828 1834 mpatch,
1829 1835 osutil,
1830 1836 )
1831 1837
1832 1838 # quiet pyflakes
1833 1839 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1834 1840 if rustext:
1835 1841 from .rustext import ( # pytype: disable=import-error
1836 1842 ancestor,
1837 1843 dirstate,
1838 1844 )
1839 1845
1840 1846 dir(ancestor), dir(dirstate) # quiet pyflakes
1841 1847 except Exception as inst:
1842 1848 err = stringutil.forcebytestr(inst)
1843 1849 problems += 1
1844 1850 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1845 1851
1846 1852 compengines = util.compengines._engines.values()
1847 1853 fm.write(
1848 1854 b'compengines',
1849 1855 _(b'checking registered compression engines (%s)\n'),
1850 1856 fm.formatlist(
1851 1857 sorted(e.name() for e in compengines),
1852 1858 name=b'compengine',
1853 1859 fmt=b'%s',
1854 1860 sep=b', ',
1855 1861 ),
1856 1862 )
1857 1863 fm.write(
1858 1864 b'compenginesavail',
1859 1865 _(b'checking available compression engines (%s)\n'),
1860 1866 fm.formatlist(
1861 1867 sorted(e.name() for e in compengines if e.available()),
1862 1868 name=b'compengine',
1863 1869 fmt=b'%s',
1864 1870 sep=b', ',
1865 1871 ),
1866 1872 )
1867 1873 wirecompengines = compression.compengines.supportedwireengines(
1868 1874 compression.SERVERROLE
1869 1875 )
1870 1876 fm.write(
1871 1877 b'compenginesserver',
1872 1878 _(
1873 1879 b'checking available compression engines '
1874 1880 b'for wire protocol (%s)\n'
1875 1881 ),
1876 1882 fm.formatlist(
1877 1883 [e.name() for e in wirecompengines if e.wireprotosupport()],
1878 1884 name=b'compengine',
1879 1885 fmt=b'%s',
1880 1886 sep=b', ',
1881 1887 ),
1882 1888 )
1883 1889 re2 = b'missing'
1884 1890 if util._re2:
1885 1891 re2 = b'available'
1886 1892 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1887 1893 fm.data(re2=bool(util._re2))
1888 1894
1889 1895 # templates
1890 1896 p = templater.templatedir()
1891 1897 fm.write(b'templatedirs', b'checking templates (%s)...\n', p or b'')
1892 1898 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1893 1899 if p:
1894 1900 (m, fp) = templater.try_open_template(b"map-cmdline.default")
1895 1901 if m:
1896 1902 # template found, check if it is working
1897 1903 err = None
1898 1904 try:
1899 1905 templater.templater.frommapfile(m)
1900 1906 except Exception as inst:
1901 1907 err = stringutil.forcebytestr(inst)
1902 1908 p = None
1903 1909 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1904 1910 else:
1905 1911 p = None
1906 1912 fm.condwrite(
1907 1913 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1908 1914 )
1909 1915 fm.condwrite(
1910 1916 not m,
1911 1917 b'defaulttemplatenotfound',
1912 1918 _(b" template '%s' not found\n"),
1913 1919 b"default",
1914 1920 )
1915 1921 if not p:
1916 1922 problems += 1
1917 1923 fm.condwrite(
1918 1924 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1919 1925 )
1920 1926
1921 1927 # editor
1922 1928 editor = ui.geteditor()
1923 1929 editor = util.expandpath(editor)
1924 1930 editorbin = procutil.shellsplit(editor)[0]
1925 1931 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1926 1932 cmdpath = procutil.findexe(editorbin)
1927 1933 fm.condwrite(
1928 1934 not cmdpath and editor == b'vi',
1929 1935 b'vinotfound',
1930 1936 _(
1931 1937 b" No commit editor set and can't find %s in PATH\n"
1932 1938 b" (specify a commit editor in your configuration"
1933 1939 b" file)\n"
1934 1940 ),
1935 1941 not cmdpath and editor == b'vi' and editorbin,
1936 1942 )
1937 1943 fm.condwrite(
1938 1944 not cmdpath and editor != b'vi',
1939 1945 b'editornotfound',
1940 1946 _(
1941 1947 b" Can't find editor '%s' in PATH\n"
1942 1948 b" (specify a commit editor in your configuration"
1943 1949 b" file)\n"
1944 1950 ),
1945 1951 not cmdpath and editorbin,
1946 1952 )
1947 1953 if not cmdpath and editor != b'vi':
1948 1954 problems += 1
1949 1955
1950 1956 # check username
1951 1957 username = None
1952 1958 err = None
1953 1959 try:
1954 1960 username = ui.username()
1955 1961 except error.Abort as e:
1956 1962 err = e.message
1957 1963 problems += 1
1958 1964
1959 1965 fm.condwrite(
1960 1966 username, b'username', _(b"checking username (%s)\n"), username
1961 1967 )
1962 1968 fm.condwrite(
1963 1969 err,
1964 1970 b'usernameerror',
1965 1971 _(
1966 1972 b"checking username...\n %s\n"
1967 1973 b" (specify a username in your configuration file)\n"
1968 1974 ),
1969 1975 err,
1970 1976 )
1971 1977
1972 1978 for name, mod in extensions.extensions():
1973 1979 handler = getattr(mod, 'debuginstall', None)
1974 1980 if handler is not None:
1975 1981 problems += handler(ui, fm)
1976 1982
1977 1983 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1978 1984 if not problems:
1979 1985 fm.data(problems=problems)
1980 1986 fm.condwrite(
1981 1987 problems,
1982 1988 b'problems',
1983 1989 _(b"%d problems detected, please check your install!\n"),
1984 1990 problems,
1985 1991 )
1986 1992 fm.end()
1987 1993
1988 1994 return problems
1989 1995
1990 1996
1991 1997 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1992 1998 def debugknown(ui, repopath, *ids, **opts):
1993 1999 """test whether node ids are known to a repo
1994 2000
1995 2001 Every ID must be a full-length hex node id string. Returns a list of 0s
1996 2002 and 1s indicating unknown/known.
1997 2003 """
1998 2004 opts = pycompat.byteskwargs(opts)
1999 2005 repo = hg.peer(ui, opts, repopath)
2000 2006 if not repo.capable(b'known'):
2001 2007 raise error.Abort(b"known() not supported by target repository")
2002 2008 flags = repo.known([bin(s) for s in ids])
2003 2009 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
2004 2010
2005 2011
2006 2012 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
2007 2013 def debuglabelcomplete(ui, repo, *args):
2008 2014 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2009 2015 debugnamecomplete(ui, repo, *args)
2010 2016
2011 2017
2012 2018 @command(
2013 2019 b'debuglocks',
2014 2020 [
2015 2021 (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
2016 2022 (
2017 2023 b'W',
2018 2024 b'force-free-wlock',
2019 2025 None,
2020 2026 _(b'free the working state lock (DANGEROUS)'),
2021 2027 ),
2022 2028 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
2023 2029 (
2024 2030 b'S',
2025 2031 b'set-wlock',
2026 2032 None,
2027 2033 _(b'set the working state lock until stopped'),
2028 2034 ),
2029 2035 ],
2030 2036 _(b'[OPTION]...'),
2031 2037 )
2032 2038 def debuglocks(ui, repo, **opts):
2033 2039 """show or modify state of locks
2034 2040
2035 2041 By default, this command will show which locks are held. This
2036 2042 includes the user and process holding the lock, the amount of time
2037 2043 the lock has been held, and the machine name where the process is
2038 2044 running if it's not local.
2039 2045
2040 2046 Locks protect the integrity of Mercurial's data, so should be
2041 2047 treated with care. System crashes or other interruptions may cause
2042 2048 locks to not be properly released, though Mercurial will usually
2043 2049 detect and remove such stale locks automatically.
2044 2050
2045 2051 However, detecting stale locks may not always be possible (for
2046 2052 instance, on a shared filesystem). Removing locks may also be
2047 2053 blocked by filesystem permissions.
2048 2054
2049 2055 Setting a lock will prevent other commands from changing the data.
2050 2056 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
2051 2057 The set locks are removed when the command exits.
2052 2058
2053 2059 Returns 0 if no locks are held.
2054 2060
2055 2061 """
2056 2062
2057 2063 if opts.get('force_free_lock'):
2058 2064 repo.svfs.unlink(b'lock')
2059 2065 if opts.get('force_free_wlock'):
2060 2066 repo.vfs.unlink(b'wlock')
2061 2067 if opts.get('force_free_lock') or opts.get('force_free_wlock'):
2062 2068 return 0
2063 2069
2064 2070 locks = []
2065 2071 try:
2066 2072 if opts.get('set_wlock'):
2067 2073 try:
2068 2074 locks.append(repo.wlock(False))
2069 2075 except error.LockHeld:
2070 2076 raise error.Abort(_(b'wlock is already held'))
2071 2077 if opts.get('set_lock'):
2072 2078 try:
2073 2079 locks.append(repo.lock(False))
2074 2080 except error.LockHeld:
2075 2081 raise error.Abort(_(b'lock is already held'))
2076 2082 if len(locks):
2077 2083 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
2078 2084 return 0
2079 2085 finally:
2080 2086 release(*locks)
2081 2087
2082 2088 now = time.time()
2083 2089 held = 0
2084 2090
2085 2091 def report(vfs, name, method):
2086 2092 # this causes stale locks to get reaped for more accurate reporting
2087 2093 try:
2088 2094 l = method(False)
2089 2095 except error.LockHeld:
2090 2096 l = None
2091 2097
2092 2098 if l:
2093 2099 l.release()
2094 2100 else:
2095 2101 try:
2096 2102 st = vfs.lstat(name)
2097 2103 age = now - st[stat.ST_MTIME]
2098 2104 user = util.username(st.st_uid)
2099 2105 locker = vfs.readlock(name)
2100 2106 if b":" in locker:
2101 2107 host, pid = locker.split(b':')
2102 2108 if host == socket.gethostname():
2103 2109 locker = b'user %s, process %s' % (user or b'None', pid)
2104 2110 else:
2105 2111 locker = b'user %s, process %s, host %s' % (
2106 2112 user or b'None',
2107 2113 pid,
2108 2114 host,
2109 2115 )
2110 2116 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
2111 2117 return 1
2112 2118 except OSError as e:
2113 2119 if e.errno != errno.ENOENT:
2114 2120 raise
2115 2121
2116 2122 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
2117 2123 return 0
2118 2124
2119 2125 held += report(repo.svfs, b"lock", repo.lock)
2120 2126 held += report(repo.vfs, b"wlock", repo.wlock)
2121 2127
2122 2128 return held
2123 2129
2124 2130
2125 2131 @command(
2126 2132 b'debugmanifestfulltextcache',
2127 2133 [
2128 2134 (b'', b'clear', False, _(b'clear the cache')),
2129 2135 (
2130 2136 b'a',
2131 2137 b'add',
2132 2138 [],
2133 2139 _(b'add the given manifest nodes to the cache'),
2134 2140 _(b'NODE'),
2135 2141 ),
2136 2142 ],
2137 2143 b'',
2138 2144 )
2139 2145 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
2140 2146 """show, clear or amend the contents of the manifest fulltext cache"""
2141 2147
2142 2148 def getcache():
2143 2149 r = repo.manifestlog.getstorage(b'')
2144 2150 try:
2145 2151 return r._fulltextcache
2146 2152 except AttributeError:
2147 2153 msg = _(
2148 2154 b"Current revlog implementation doesn't appear to have a "
2149 2155 b"manifest fulltext cache\n"
2150 2156 )
2151 2157 raise error.Abort(msg)
2152 2158
2153 2159 if opts.get('clear'):
2154 2160 with repo.wlock():
2155 2161 cache = getcache()
2156 2162 cache.clear(clear_persisted_data=True)
2157 2163 return
2158 2164
2159 2165 if add:
2160 2166 with repo.wlock():
2161 2167 m = repo.manifestlog
2162 2168 store = m.getstorage(b'')
2163 2169 for n in add:
2164 2170 try:
2165 2171 manifest = m[store.lookup(n)]
2166 2172 except error.LookupError as e:
2167 2173 raise error.Abort(
2168 2174 bytes(e), hint=b"Check your manifest node id"
2169 2175 )
2170 2176 manifest.read() # stores revisision in cache too
2171 2177 return
2172 2178
2173 2179 cache = getcache()
2174 2180 if not len(cache):
2175 2181 ui.write(_(b'cache empty\n'))
2176 2182 else:
2177 2183 ui.write(
2178 2184 _(
2179 2185 b'cache contains %d manifest entries, in order of most to '
2180 2186 b'least recent:\n'
2181 2187 )
2182 2188 % (len(cache),)
2183 2189 )
2184 2190 totalsize = 0
2185 2191 for nodeid in cache:
2186 2192 # Use cache.get to not update the LRU order
2187 2193 data = cache.peek(nodeid)
2188 2194 size = len(data)
2189 2195 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
2190 2196 ui.write(
2191 2197 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
2192 2198 )
2193 2199 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
2194 2200 ui.write(
2195 2201 _(b'total cache data size %s, on-disk %s\n')
2196 2202 % (util.bytecount(totalsize), util.bytecount(ondisk))
2197 2203 )
2198 2204
2199 2205
2200 2206 @command(b'debugmergestate', [] + cmdutil.templateopts, b'')
2201 2207 def debugmergestate(ui, repo, *args, **opts):
2202 2208 """print merge state
2203 2209
2204 2210 Use --verbose to print out information about whether v1 or v2 merge state
2205 2211 was chosen."""
2206 2212
2207 2213 if ui.verbose:
2208 2214 ms = mergestatemod.mergestate(repo)
2209 2215
2210 2216 # sort so that reasonable information is on top
2211 2217 v1records = ms._readrecordsv1()
2212 2218 v2records = ms._readrecordsv2()
2213 2219
2214 2220 if not v1records and not v2records:
2215 2221 pass
2216 2222 elif not v2records:
2217 2223 ui.writenoi18n(b'no version 2 merge state\n')
2218 2224 elif ms._v1v2match(v1records, v2records):
2219 2225 ui.writenoi18n(b'v1 and v2 states match: using v2\n')
2220 2226 else:
2221 2227 ui.writenoi18n(b'v1 and v2 states mismatch: using v1\n')
2222 2228
2223 2229 opts = pycompat.byteskwargs(opts)
2224 2230 if not opts[b'template']:
2225 2231 opts[b'template'] = (
2226 2232 b'{if(commits, "", "no merge state found\n")}'
2227 2233 b'{commits % "{name}{if(label, " ({label})")}: {node}\n"}'
2228 2234 b'{files % "file: {path} (state \\"{state}\\")\n'
2229 2235 b'{if(local_path, "'
2230 2236 b' local path: {local_path} (hash {local_key}, flags \\"{local_flags}\\")\n'
2231 2237 b' ancestor path: {ancestor_path} (node {ancestor_node})\n'
2232 2238 b' other path: {other_path} (node {other_node})\n'
2233 2239 b'")}'
2234 2240 b'{if(rename_side, "'
2235 2241 b' rename side: {rename_side}\n'
2236 2242 b' renamed path: {renamed_path}\n'
2237 2243 b'")}'
2238 2244 b'{extras % " extra: {key} = {value}\n"}'
2239 2245 b'"}'
2240 2246 b'{extras % "extra: {file} ({key} = {value})\n"}'
2241 2247 )
2242 2248
2243 2249 ms = mergestatemod.mergestate.read(repo)
2244 2250
2245 2251 fm = ui.formatter(b'debugmergestate', opts)
2246 2252 fm.startitem()
2247 2253
2248 2254 fm_commits = fm.nested(b'commits')
2249 2255 if ms.active():
2250 2256 for name, node, label_index in (
2251 2257 (b'local', ms.local, 0),
2252 2258 (b'other', ms.other, 1),
2253 2259 ):
2254 2260 fm_commits.startitem()
2255 2261 fm_commits.data(name=name)
2256 2262 fm_commits.data(node=hex(node))
2257 2263 if ms._labels and len(ms._labels) > label_index:
2258 2264 fm_commits.data(label=ms._labels[label_index])
2259 2265 fm_commits.end()
2260 2266
2261 2267 fm_files = fm.nested(b'files')
2262 2268 if ms.active():
2263 2269 for f in ms:
2264 2270 fm_files.startitem()
2265 2271 fm_files.data(path=f)
2266 2272 state = ms._state[f]
2267 2273 fm_files.data(state=state[0])
2268 2274 if state[0] in (
2269 2275 mergestatemod.MERGE_RECORD_UNRESOLVED,
2270 2276 mergestatemod.MERGE_RECORD_RESOLVED,
2271 2277 ):
2272 2278 fm_files.data(local_key=state[1])
2273 2279 fm_files.data(local_path=state[2])
2274 2280 fm_files.data(ancestor_path=state[3])
2275 2281 fm_files.data(ancestor_node=state[4])
2276 2282 fm_files.data(other_path=state[5])
2277 2283 fm_files.data(other_node=state[6])
2278 2284 fm_files.data(local_flags=state[7])
2279 2285 elif state[0] in (
2280 2286 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
2281 2287 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
2282 2288 ):
2283 2289 fm_files.data(renamed_path=state[1])
2284 2290 fm_files.data(rename_side=state[2])
2285 2291 fm_extras = fm_files.nested(b'extras')
2286 2292 for k, v in sorted(ms.extras(f).items()):
2287 2293 fm_extras.startitem()
2288 2294 fm_extras.data(key=k)
2289 2295 fm_extras.data(value=v)
2290 2296 fm_extras.end()
2291 2297
2292 2298 fm_files.end()
2293 2299
2294 2300 fm_extras = fm.nested(b'extras')
2295 2301 for f, d in sorted(pycompat.iteritems(ms.allextras())):
2296 2302 if f in ms:
2297 2303 # If file is in mergestate, we have already processed it's extras
2298 2304 continue
2299 2305 for k, v in pycompat.iteritems(d):
2300 2306 fm_extras.startitem()
2301 2307 fm_extras.data(file=f)
2302 2308 fm_extras.data(key=k)
2303 2309 fm_extras.data(value=v)
2304 2310 fm_extras.end()
2305 2311
2306 2312 fm.end()
2307 2313
2308 2314
2309 2315 @command(b'debugnamecomplete', [], _(b'NAME...'))
2310 2316 def debugnamecomplete(ui, repo, *args):
2311 2317 '''complete "names" - tags, open branch names, bookmark names'''
2312 2318
2313 2319 names = set()
2314 2320 # since we previously only listed open branches, we will handle that
2315 2321 # specially (after this for loop)
2316 2322 for name, ns in pycompat.iteritems(repo.names):
2317 2323 if name != b'branches':
2318 2324 names.update(ns.listnames(repo))
2319 2325 names.update(
2320 2326 tag
2321 2327 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2322 2328 if not closed
2323 2329 )
2324 2330 completions = set()
2325 2331 if not args:
2326 2332 args = [b'']
2327 2333 for a in args:
2328 2334 completions.update(n for n in names if n.startswith(a))
2329 2335 ui.write(b'\n'.join(sorted(completions)))
2330 2336 ui.write(b'\n')
2331 2337
2332 2338
2333 2339 @command(
2334 2340 b'debugnodemap',
2335 2341 [
2336 2342 (
2337 2343 b'',
2338 2344 b'dump-new',
2339 2345 False,
2340 2346 _(b'write a (new) persistent binary nodemap on stdout'),
2341 2347 ),
2342 2348 (b'', b'dump-disk', False, _(b'dump on-disk data on stdout')),
2343 2349 (
2344 2350 b'',
2345 2351 b'check',
2346 2352 False,
2347 2353 _(b'check that the data on disk data are correct.'),
2348 2354 ),
2349 2355 (
2350 2356 b'',
2351 2357 b'metadata',
2352 2358 False,
2353 2359 _(b'display the on disk meta data for the nodemap'),
2354 2360 ),
2355 2361 ],
2356 2362 )
2357 2363 def debugnodemap(ui, repo, **opts):
2358 2364 """write and inspect on disk nodemap"""
2359 2365 if opts['dump_new']:
2360 2366 unfi = repo.unfiltered()
2361 2367 cl = unfi.changelog
2362 2368 if util.safehasattr(cl.index, "nodemap_data_all"):
2363 2369 data = cl.index.nodemap_data_all()
2364 2370 else:
2365 2371 data = nodemap.persistent_data(cl.index)
2366 2372 ui.write(data)
2367 2373 elif opts['dump_disk']:
2368 2374 unfi = repo.unfiltered()
2369 2375 cl = unfi.changelog
2370 2376 nm_data = nodemap.persisted_data(cl)
2371 2377 if nm_data is not None:
2372 2378 docket, data = nm_data
2373 2379 ui.write(data[:])
2374 2380 elif opts['check']:
2375 2381 unfi = repo.unfiltered()
2376 2382 cl = unfi.changelog
2377 2383 nm_data = nodemap.persisted_data(cl)
2378 2384 if nm_data is not None:
2379 2385 docket, data = nm_data
2380 2386 return nodemap.check_data(ui, cl.index, data)
2381 2387 elif opts['metadata']:
2382 2388 unfi = repo.unfiltered()
2383 2389 cl = unfi.changelog
2384 2390 nm_data = nodemap.persisted_data(cl)
2385 2391 if nm_data is not None:
2386 2392 docket, data = nm_data
2387 2393 ui.write((b"uid: %s\n") % docket.uid)
2388 2394 ui.write((b"tip-rev: %d\n") % docket.tip_rev)
2389 2395 ui.write((b"tip-node: %s\n") % hex(docket.tip_node))
2390 2396 ui.write((b"data-length: %d\n") % docket.data_length)
2391 2397 ui.write((b"data-unused: %d\n") % docket.data_unused)
2392 2398 unused_perc = docket.data_unused * 100.0 / docket.data_length
2393 2399 ui.write((b"data-unused: %2.3f%%\n") % unused_perc)
2394 2400
2395 2401
2396 2402 @command(
2397 2403 b'debugobsolete',
2398 2404 [
2399 2405 (b'', b'flags', 0, _(b'markers flag')),
2400 2406 (
2401 2407 b'',
2402 2408 b'record-parents',
2403 2409 False,
2404 2410 _(b'record parent information for the precursor'),
2405 2411 ),
2406 2412 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2407 2413 (
2408 2414 b'',
2409 2415 b'exclusive',
2410 2416 False,
2411 2417 _(b'restrict display to markers only relevant to REV'),
2412 2418 ),
2413 2419 (b'', b'index', False, _(b'display index of the marker')),
2414 2420 (b'', b'delete', [], _(b'delete markers specified by indices')),
2415 2421 ]
2416 2422 + cmdutil.commitopts2
2417 2423 + cmdutil.formatteropts,
2418 2424 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2419 2425 )
2420 2426 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2421 2427 """create arbitrary obsolete marker
2422 2428
2423 2429 With no arguments, displays the list of obsolescence markers."""
2424 2430
2425 2431 opts = pycompat.byteskwargs(opts)
2426 2432
2427 2433 def parsenodeid(s):
2428 2434 try:
2429 2435 # We do not use revsingle/revrange functions here to accept
2430 2436 # arbitrary node identifiers, possibly not present in the
2431 2437 # local repository.
2432 2438 n = bin(s)
2433 2439 if len(n) != repo.nodeconstants.nodelen:
2434 2440 raise TypeError()
2435 2441 return n
2436 2442 except TypeError:
2437 2443 raise error.InputError(
2438 2444 b'changeset references must be full hexadecimal '
2439 2445 b'node identifiers'
2440 2446 )
2441 2447
2442 2448 if opts.get(b'delete'):
2443 2449 indices = []
2444 2450 for v in opts.get(b'delete'):
2445 2451 try:
2446 2452 indices.append(int(v))
2447 2453 except ValueError:
2448 2454 raise error.InputError(
2449 2455 _(b'invalid index value: %r') % v,
2450 2456 hint=_(b'use integers for indices'),
2451 2457 )
2452 2458
2453 2459 if repo.currenttransaction():
2454 2460 raise error.Abort(
2455 2461 _(b'cannot delete obsmarkers in the middle of transaction.')
2456 2462 )
2457 2463
2458 2464 with repo.lock():
2459 2465 n = repair.deleteobsmarkers(repo.obsstore, indices)
2460 2466 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2461 2467
2462 2468 return
2463 2469
2464 2470 if precursor is not None:
2465 2471 if opts[b'rev']:
2466 2472 raise error.InputError(
2467 2473 b'cannot select revision when creating marker'
2468 2474 )
2469 2475 metadata = {}
2470 2476 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2471 2477 succs = tuple(parsenodeid(succ) for succ in successors)
2472 2478 l = repo.lock()
2473 2479 try:
2474 2480 tr = repo.transaction(b'debugobsolete')
2475 2481 try:
2476 2482 date = opts.get(b'date')
2477 2483 if date:
2478 2484 date = dateutil.parsedate(date)
2479 2485 else:
2480 2486 date = None
2481 2487 prec = parsenodeid(precursor)
2482 2488 parents = None
2483 2489 if opts[b'record_parents']:
2484 2490 if prec not in repo.unfiltered():
2485 2491 raise error.Abort(
2486 2492 b'cannot used --record-parents on '
2487 2493 b'unknown changesets'
2488 2494 )
2489 2495 parents = repo.unfiltered()[prec].parents()
2490 2496 parents = tuple(p.node() for p in parents)
2491 2497 repo.obsstore.create(
2492 2498 tr,
2493 2499 prec,
2494 2500 succs,
2495 2501 opts[b'flags'],
2496 2502 parents=parents,
2497 2503 date=date,
2498 2504 metadata=metadata,
2499 2505 ui=ui,
2500 2506 )
2501 2507 tr.close()
2502 2508 except ValueError as exc:
2503 2509 raise error.Abort(
2504 2510 _(b'bad obsmarker input: %s') % stringutil.forcebytestr(exc)
2505 2511 )
2506 2512 finally:
2507 2513 tr.release()
2508 2514 finally:
2509 2515 l.release()
2510 2516 else:
2511 2517 if opts[b'rev']:
2512 2518 revs = scmutil.revrange(repo, opts[b'rev'])
2513 2519 nodes = [repo[r].node() for r in revs]
2514 2520 markers = list(
2515 2521 obsutil.getmarkers(
2516 2522 repo, nodes=nodes, exclusive=opts[b'exclusive']
2517 2523 )
2518 2524 )
2519 2525 markers.sort(key=lambda x: x._data)
2520 2526 else:
2521 2527 markers = obsutil.getmarkers(repo)
2522 2528
2523 2529 markerstoiter = markers
2524 2530 isrelevant = lambda m: True
2525 2531 if opts.get(b'rev') and opts.get(b'index'):
2526 2532 markerstoiter = obsutil.getmarkers(repo)
2527 2533 markerset = set(markers)
2528 2534 isrelevant = lambda m: m in markerset
2529 2535
2530 2536 fm = ui.formatter(b'debugobsolete', opts)
2531 2537 for i, m in enumerate(markerstoiter):
2532 2538 if not isrelevant(m):
2533 2539 # marker can be irrelevant when we're iterating over a set
2534 2540 # of markers (markerstoiter) which is bigger than the set
2535 2541 # of markers we want to display (markers)
2536 2542 # this can happen if both --index and --rev options are
2537 2543 # provided and thus we need to iterate over all of the markers
2538 2544 # to get the correct indices, but only display the ones that
2539 2545 # are relevant to --rev value
2540 2546 continue
2541 2547 fm.startitem()
2542 2548 ind = i if opts.get(b'index') else None
2543 2549 cmdutil.showmarker(fm, m, index=ind)
2544 2550 fm.end()
2545 2551
2546 2552
2547 2553 @command(
2548 2554 b'debugp1copies',
2549 2555 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2550 2556 _(b'[-r REV]'),
2551 2557 )
2552 2558 def debugp1copies(ui, repo, **opts):
2553 2559 """dump copy information compared to p1"""
2554 2560
2555 2561 opts = pycompat.byteskwargs(opts)
2556 2562 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2557 2563 for dst, src in ctx.p1copies().items():
2558 2564 ui.write(b'%s -> %s\n' % (src, dst))
2559 2565
2560 2566
2561 2567 @command(
2562 2568 b'debugp2copies',
2563 2569 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2564 2570 _(b'[-r REV]'),
2565 2571 )
2566 2572 def debugp1copies(ui, repo, **opts):
2567 2573 """dump copy information compared to p2"""
2568 2574
2569 2575 opts = pycompat.byteskwargs(opts)
2570 2576 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2571 2577 for dst, src in ctx.p2copies().items():
2572 2578 ui.write(b'%s -> %s\n' % (src, dst))
2573 2579
2574 2580
2575 2581 @command(
2576 2582 b'debugpathcomplete',
2577 2583 [
2578 2584 (b'f', b'full', None, _(b'complete an entire path')),
2579 2585 (b'n', b'normal', None, _(b'show only normal files')),
2580 2586 (b'a', b'added', None, _(b'show only added files')),
2581 2587 (b'r', b'removed', None, _(b'show only removed files')),
2582 2588 ],
2583 2589 _(b'FILESPEC...'),
2584 2590 )
2585 2591 def debugpathcomplete(ui, repo, *specs, **opts):
2586 2592 """complete part or all of a tracked path
2587 2593
2588 2594 This command supports shells that offer path name completion. It
2589 2595 currently completes only files already known to the dirstate.
2590 2596
2591 2597 Completion extends only to the next path segment unless
2592 2598 --full is specified, in which case entire paths are used."""
2593 2599
2594 2600 def complete(path, acceptable):
2595 2601 dirstate = repo.dirstate
2596 2602 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2597 2603 rootdir = repo.root + pycompat.ossep
2598 2604 if spec != repo.root and not spec.startswith(rootdir):
2599 2605 return [], []
2600 2606 if os.path.isdir(spec):
2601 2607 spec += b'/'
2602 2608 spec = spec[len(rootdir) :]
2603 2609 fixpaths = pycompat.ossep != b'/'
2604 2610 if fixpaths:
2605 2611 spec = spec.replace(pycompat.ossep, b'/')
2606 2612 speclen = len(spec)
2607 2613 fullpaths = opts['full']
2608 2614 files, dirs = set(), set()
2609 2615 adddir, addfile = dirs.add, files.add
2610 2616 for f, st in pycompat.iteritems(dirstate):
2611 2617 if f.startswith(spec) and st.state in acceptable:
2612 2618 if fixpaths:
2613 2619 f = f.replace(b'/', pycompat.ossep)
2614 2620 if fullpaths:
2615 2621 addfile(f)
2616 2622 continue
2617 2623 s = f.find(pycompat.ossep, speclen)
2618 2624 if s >= 0:
2619 2625 adddir(f[:s])
2620 2626 else:
2621 2627 addfile(f)
2622 2628 return files, dirs
2623 2629
2624 2630 acceptable = b''
2625 2631 if opts['normal']:
2626 2632 acceptable += b'nm'
2627 2633 if opts['added']:
2628 2634 acceptable += b'a'
2629 2635 if opts['removed']:
2630 2636 acceptable += b'r'
2631 2637 cwd = repo.getcwd()
2632 2638 if not specs:
2633 2639 specs = [b'.']
2634 2640
2635 2641 files, dirs = set(), set()
2636 2642 for spec in specs:
2637 2643 f, d = complete(spec, acceptable or b'nmar')
2638 2644 files.update(f)
2639 2645 dirs.update(d)
2640 2646 files.update(dirs)
2641 2647 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2642 2648 ui.write(b'\n')
2643 2649
2644 2650
2645 2651 @command(
2646 2652 b'debugpathcopies',
2647 2653 cmdutil.walkopts,
2648 2654 b'hg debugpathcopies REV1 REV2 [FILE]',
2649 2655 inferrepo=True,
2650 2656 )
2651 2657 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2652 2658 """show copies between two revisions"""
2653 2659 ctx1 = scmutil.revsingle(repo, rev1)
2654 2660 ctx2 = scmutil.revsingle(repo, rev2)
2655 2661 m = scmutil.match(ctx1, pats, opts)
2656 2662 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2657 2663 ui.write(b'%s -> %s\n' % (src, dst))
2658 2664
2659 2665
2660 2666 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2661 2667 def debugpeer(ui, path):
2662 2668 """establish a connection to a peer repository"""
2663 2669 # Always enable peer request logging. Requires --debug to display
2664 2670 # though.
2665 2671 overrides = {
2666 2672 (b'devel', b'debug.peer-request'): True,
2667 2673 }
2668 2674
2669 2675 with ui.configoverride(overrides):
2670 2676 peer = hg.peer(ui, {}, path)
2671 2677
2672 2678 try:
2673 2679 local = peer.local() is not None
2674 2680 canpush = peer.canpush()
2675 2681
2676 2682 ui.write(_(b'url: %s\n') % peer.url())
2677 2683 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2678 2684 ui.write(
2679 2685 _(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no'))
2680 2686 )
2681 2687 finally:
2682 2688 peer.close()
2683 2689
2684 2690
2685 2691 @command(
2686 2692 b'debugpickmergetool',
2687 2693 [
2688 2694 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2689 2695 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2690 2696 ]
2691 2697 + cmdutil.walkopts
2692 2698 + cmdutil.mergetoolopts,
2693 2699 _(b'[PATTERN]...'),
2694 2700 inferrepo=True,
2695 2701 )
2696 2702 def debugpickmergetool(ui, repo, *pats, **opts):
2697 2703 """examine which merge tool is chosen for specified file
2698 2704
2699 2705 As described in :hg:`help merge-tools`, Mercurial examines
2700 2706 configurations below in this order to decide which merge tool is
2701 2707 chosen for specified file.
2702 2708
2703 2709 1. ``--tool`` option
2704 2710 2. ``HGMERGE`` environment variable
2705 2711 3. configurations in ``merge-patterns`` section
2706 2712 4. configuration of ``ui.merge``
2707 2713 5. configurations in ``merge-tools`` section
2708 2714 6. ``hgmerge`` tool (for historical reason only)
2709 2715 7. default tool for fallback (``:merge`` or ``:prompt``)
2710 2716
2711 2717 This command writes out examination result in the style below::
2712 2718
2713 2719 FILE = MERGETOOL
2714 2720
2715 2721 By default, all files known in the first parent context of the
2716 2722 working directory are examined. Use file patterns and/or -I/-X
2717 2723 options to limit target files. -r/--rev is also useful to examine
2718 2724 files in another context without actual updating to it.
2719 2725
2720 2726 With --debug, this command shows warning messages while matching
2721 2727 against ``merge-patterns`` and so on, too. It is recommended to
2722 2728 use this option with explicit file patterns and/or -I/-X options,
2723 2729 because this option increases amount of output per file according
2724 2730 to configurations in hgrc.
2725 2731
2726 2732 With -v/--verbose, this command shows configurations below at
2727 2733 first (only if specified).
2728 2734
2729 2735 - ``--tool`` option
2730 2736 - ``HGMERGE`` environment variable
2731 2737 - configuration of ``ui.merge``
2732 2738
2733 2739 If merge tool is chosen before matching against
2734 2740 ``merge-patterns``, this command can't show any helpful
2735 2741 information, even with --debug. In such case, information above is
2736 2742 useful to know why a merge tool is chosen.
2737 2743 """
2738 2744 opts = pycompat.byteskwargs(opts)
2739 2745 overrides = {}
2740 2746 if opts[b'tool']:
2741 2747 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2742 2748 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2743 2749
2744 2750 with ui.configoverride(overrides, b'debugmergepatterns'):
2745 2751 hgmerge = encoding.environ.get(b"HGMERGE")
2746 2752 if hgmerge is not None:
2747 2753 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2748 2754 uimerge = ui.config(b"ui", b"merge")
2749 2755 if uimerge:
2750 2756 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2751 2757
2752 2758 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2753 2759 m = scmutil.match(ctx, pats, opts)
2754 2760 changedelete = opts[b'changedelete']
2755 2761 for path in ctx.walk(m):
2756 2762 fctx = ctx[path]
2757 2763 with ui.silent(
2758 2764 error=True
2759 2765 ) if not ui.debugflag else util.nullcontextmanager():
2760 2766 tool, toolpath = filemerge._picktool(
2761 2767 repo,
2762 2768 ui,
2763 2769 path,
2764 2770 fctx.isbinary(),
2765 2771 b'l' in fctx.flags(),
2766 2772 changedelete,
2767 2773 )
2768 2774 ui.write(b'%s = %s\n' % (path, tool))
2769 2775
2770 2776
2771 2777 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2772 2778 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2773 2779 """access the pushkey key/value protocol
2774 2780
2775 2781 With two args, list the keys in the given namespace.
2776 2782
2777 2783 With five args, set a key to new if it currently is set to old.
2778 2784 Reports success or failure.
2779 2785 """
2780 2786
2781 2787 target = hg.peer(ui, {}, repopath)
2782 2788 try:
2783 2789 if keyinfo:
2784 2790 key, old, new = keyinfo
2785 2791 with target.commandexecutor() as e:
2786 2792 r = e.callcommand(
2787 2793 b'pushkey',
2788 2794 {
2789 2795 b'namespace': namespace,
2790 2796 b'key': key,
2791 2797 b'old': old,
2792 2798 b'new': new,
2793 2799 },
2794 2800 ).result()
2795 2801
2796 2802 ui.status(pycompat.bytestr(r) + b'\n')
2797 2803 return not r
2798 2804 else:
2799 2805 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2800 2806 ui.write(
2801 2807 b"%s\t%s\n"
2802 2808 % (stringutil.escapestr(k), stringutil.escapestr(v))
2803 2809 )
2804 2810 finally:
2805 2811 target.close()
2806 2812
2807 2813
2808 2814 @command(b'debugpvec', [], _(b'A B'))
2809 2815 def debugpvec(ui, repo, a, b=None):
2810 2816 ca = scmutil.revsingle(repo, a)
2811 2817 cb = scmutil.revsingle(repo, b)
2812 2818 pa = pvec.ctxpvec(ca)
2813 2819 pb = pvec.ctxpvec(cb)
2814 2820 if pa == pb:
2815 2821 rel = b"="
2816 2822 elif pa > pb:
2817 2823 rel = b">"
2818 2824 elif pa < pb:
2819 2825 rel = b"<"
2820 2826 elif pa | pb:
2821 2827 rel = b"|"
2822 2828 ui.write(_(b"a: %s\n") % pa)
2823 2829 ui.write(_(b"b: %s\n") % pb)
2824 2830 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2825 2831 ui.write(
2826 2832 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2827 2833 % (
2828 2834 abs(pa._depth - pb._depth),
2829 2835 pvec._hamming(pa._vec, pb._vec),
2830 2836 pa.distance(pb),
2831 2837 rel,
2832 2838 )
2833 2839 )
2834 2840
2835 2841
2836 2842 @command(
2837 2843 b'debugrebuilddirstate|debugrebuildstate',
2838 2844 [
2839 2845 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2840 2846 (
2841 2847 b'',
2842 2848 b'minimal',
2843 2849 None,
2844 2850 _(
2845 2851 b'only rebuild files that are inconsistent with '
2846 2852 b'the working copy parent'
2847 2853 ),
2848 2854 ),
2849 2855 ],
2850 2856 _(b'[-r REV]'),
2851 2857 )
2852 2858 def debugrebuilddirstate(ui, repo, rev, **opts):
2853 2859 """rebuild the dirstate as it would look like for the given revision
2854 2860
2855 2861 If no revision is specified the first current parent will be used.
2856 2862
2857 2863 The dirstate will be set to the files of the given revision.
2858 2864 The actual working directory content or existing dirstate
2859 2865 information such as adds or removes is not considered.
2860 2866
2861 2867 ``minimal`` will only rebuild the dirstate status for files that claim to be
2862 2868 tracked but are not in the parent manifest, or that exist in the parent
2863 2869 manifest but are not in the dirstate. It will not change adds, removes, or
2864 2870 modified files that are in the working copy parent.
2865 2871
2866 2872 One use of this command is to make the next :hg:`status` invocation
2867 2873 check the actual file content.
2868 2874 """
2869 2875 ctx = scmutil.revsingle(repo, rev)
2870 2876 with repo.wlock():
2871 2877 dirstate = repo.dirstate
2872 2878 changedfiles = None
2873 2879 # See command doc for what minimal does.
2874 2880 if opts.get('minimal'):
2875 2881 manifestfiles = set(ctx.manifest().keys())
2876 2882 dirstatefiles = set(dirstate)
2877 2883 manifestonly = manifestfiles - dirstatefiles
2878 2884 dsonly = dirstatefiles - manifestfiles
2879 2885 dsnotadded = {f for f in dsonly if dirstate[f] != b'a'}
2880 2886 changedfiles = manifestonly | dsnotadded
2881 2887
2882 2888 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2883 2889
2884 2890
2885 2891 @command(b'debugrebuildfncache', [], b'')
2886 2892 def debugrebuildfncache(ui, repo):
2887 2893 """rebuild the fncache file"""
2888 2894 repair.rebuildfncache(ui, repo)
2889 2895
2890 2896
2891 2897 @command(
2892 2898 b'debugrename',
2893 2899 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2894 2900 _(b'[-r REV] [FILE]...'),
2895 2901 )
2896 2902 def debugrename(ui, repo, *pats, **opts):
2897 2903 """dump rename information"""
2898 2904
2899 2905 opts = pycompat.byteskwargs(opts)
2900 2906 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2901 2907 m = scmutil.match(ctx, pats, opts)
2902 2908 for abs in ctx.walk(m):
2903 2909 fctx = ctx[abs]
2904 2910 o = fctx.filelog().renamed(fctx.filenode())
2905 2911 rel = repo.pathto(abs)
2906 2912 if o:
2907 2913 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2908 2914 else:
2909 2915 ui.write(_(b"%s not renamed\n") % rel)
2910 2916
2911 2917
2912 2918 @command(b'debugrequires|debugrequirements', [], b'')
2913 2919 def debugrequirements(ui, repo):
2914 2920 """print the current repo requirements"""
2915 2921 for r in sorted(repo.requirements):
2916 2922 ui.write(b"%s\n" % r)
2917 2923
2918 2924
2919 2925 @command(
2920 2926 b'debugrevlog',
2921 2927 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2922 2928 _(b'-c|-m|FILE'),
2923 2929 optionalrepo=True,
2924 2930 )
2925 2931 def debugrevlog(ui, repo, file_=None, **opts):
2926 2932 """show data and statistics about a revlog"""
2927 2933 opts = pycompat.byteskwargs(opts)
2928 2934 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2929 2935
2930 2936 if opts.get(b"dump"):
2931 2937 numrevs = len(r)
2932 2938 ui.write(
2933 2939 (
2934 2940 b"# rev p1rev p2rev start end deltastart base p1 p2"
2935 2941 b" rawsize totalsize compression heads chainlen\n"
2936 2942 )
2937 2943 )
2938 2944 ts = 0
2939 2945 heads = set()
2940 2946
2941 2947 for rev in pycompat.xrange(numrevs):
2942 2948 dbase = r.deltaparent(rev)
2943 2949 if dbase == -1:
2944 2950 dbase = rev
2945 2951 cbase = r.chainbase(rev)
2946 2952 clen = r.chainlen(rev)
2947 2953 p1, p2 = r.parentrevs(rev)
2948 2954 rs = r.rawsize(rev)
2949 2955 ts = ts + rs
2950 2956 heads -= set(r.parentrevs(rev))
2951 2957 heads.add(rev)
2952 2958 try:
2953 2959 compression = ts / r.end(rev)
2954 2960 except ZeroDivisionError:
2955 2961 compression = 0
2956 2962 ui.write(
2957 2963 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2958 2964 b"%11d %5d %8d\n"
2959 2965 % (
2960 2966 rev,
2961 2967 p1,
2962 2968 p2,
2963 2969 r.start(rev),
2964 2970 r.end(rev),
2965 2971 r.start(dbase),
2966 2972 r.start(cbase),
2967 2973 r.start(p1),
2968 2974 r.start(p2),
2969 2975 rs,
2970 2976 ts,
2971 2977 compression,
2972 2978 len(heads),
2973 2979 clen,
2974 2980 )
2975 2981 )
2976 2982 return 0
2977 2983
2978 2984 format = r._format_version
2979 2985 v = r._format_flags
2980 2986 flags = []
2981 2987 gdelta = False
2982 2988 if v & revlog.FLAG_INLINE_DATA:
2983 2989 flags.append(b'inline')
2984 2990 if v & revlog.FLAG_GENERALDELTA:
2985 2991 gdelta = True
2986 2992 flags.append(b'generaldelta')
2987 2993 if not flags:
2988 2994 flags = [b'(none)']
2989 2995
2990 2996 ### tracks merge vs single parent
2991 2997 nummerges = 0
2992 2998
2993 2999 ### tracks ways the "delta" are build
2994 3000 # nodelta
2995 3001 numempty = 0
2996 3002 numemptytext = 0
2997 3003 numemptydelta = 0
2998 3004 # full file content
2999 3005 numfull = 0
3000 3006 # intermediate snapshot against a prior snapshot
3001 3007 numsemi = 0
3002 3008 # snapshot count per depth
3003 3009 numsnapdepth = collections.defaultdict(lambda: 0)
3004 3010 # delta against previous revision
3005 3011 numprev = 0
3006 3012 # delta against first or second parent (not prev)
3007 3013 nump1 = 0
3008 3014 nump2 = 0
3009 3015 # delta against neither prev nor parents
3010 3016 numother = 0
3011 3017 # delta against prev that are also first or second parent
3012 3018 # (details of `numprev`)
3013 3019 nump1prev = 0
3014 3020 nump2prev = 0
3015 3021
3016 3022 # data about delta chain of each revs
3017 3023 chainlengths = []
3018 3024 chainbases = []
3019 3025 chainspans = []
3020 3026
3021 3027 # data about each revision
3022 3028 datasize = [None, 0, 0]
3023 3029 fullsize = [None, 0, 0]
3024 3030 semisize = [None, 0, 0]
3025 3031 # snapshot count per depth
3026 3032 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
3027 3033 deltasize = [None, 0, 0]
3028 3034 chunktypecounts = {}
3029 3035 chunktypesizes = {}
3030 3036
3031 3037 def addsize(size, l):
3032 3038 if l[0] is None or size < l[0]:
3033 3039 l[0] = size
3034 3040 if size > l[1]:
3035 3041 l[1] = size
3036 3042 l[2] += size
3037 3043
3038 3044 numrevs = len(r)
3039 3045 for rev in pycompat.xrange(numrevs):
3040 3046 p1, p2 = r.parentrevs(rev)
3041 3047 delta = r.deltaparent(rev)
3042 3048 if format > 0:
3043 3049 addsize(r.rawsize(rev), datasize)
3044 3050 if p2 != nullrev:
3045 3051 nummerges += 1
3046 3052 size = r.length(rev)
3047 3053 if delta == nullrev:
3048 3054 chainlengths.append(0)
3049 3055 chainbases.append(r.start(rev))
3050 3056 chainspans.append(size)
3051 3057 if size == 0:
3052 3058 numempty += 1
3053 3059 numemptytext += 1
3054 3060 else:
3055 3061 numfull += 1
3056 3062 numsnapdepth[0] += 1
3057 3063 addsize(size, fullsize)
3058 3064 addsize(size, snapsizedepth[0])
3059 3065 else:
3060 3066 chainlengths.append(chainlengths[delta] + 1)
3061 3067 baseaddr = chainbases[delta]
3062 3068 revaddr = r.start(rev)
3063 3069 chainbases.append(baseaddr)
3064 3070 chainspans.append((revaddr - baseaddr) + size)
3065 3071 if size == 0:
3066 3072 numempty += 1
3067 3073 numemptydelta += 1
3068 3074 elif r.issnapshot(rev):
3069 3075 addsize(size, semisize)
3070 3076 numsemi += 1
3071 3077 depth = r.snapshotdepth(rev)
3072 3078 numsnapdepth[depth] += 1
3073 3079 addsize(size, snapsizedepth[depth])
3074 3080 else:
3075 3081 addsize(size, deltasize)
3076 3082 if delta == rev - 1:
3077 3083 numprev += 1
3078 3084 if delta == p1:
3079 3085 nump1prev += 1
3080 3086 elif delta == p2:
3081 3087 nump2prev += 1
3082 3088 elif delta == p1:
3083 3089 nump1 += 1
3084 3090 elif delta == p2:
3085 3091 nump2 += 1
3086 3092 elif delta != nullrev:
3087 3093 numother += 1
3088 3094
3089 3095 # Obtain data on the raw chunks in the revlog.
3090 3096 if util.safehasattr(r, b'_getsegmentforrevs'):
3091 3097 segment = r._getsegmentforrevs(rev, rev)[1]
3092 3098 else:
3093 3099 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
3094 3100 if segment:
3095 3101 chunktype = bytes(segment[0:1])
3096 3102 else:
3097 3103 chunktype = b'empty'
3098 3104
3099 3105 if chunktype not in chunktypecounts:
3100 3106 chunktypecounts[chunktype] = 0
3101 3107 chunktypesizes[chunktype] = 0
3102 3108
3103 3109 chunktypecounts[chunktype] += 1
3104 3110 chunktypesizes[chunktype] += size
3105 3111
3106 3112 # Adjust size min value for empty cases
3107 3113 for size in (datasize, fullsize, semisize, deltasize):
3108 3114 if size[0] is None:
3109 3115 size[0] = 0
3110 3116
3111 3117 numdeltas = numrevs - numfull - numempty - numsemi
3112 3118 numoprev = numprev - nump1prev - nump2prev
3113 3119 totalrawsize = datasize[2]
3114 3120 datasize[2] /= numrevs
3115 3121 fulltotal = fullsize[2]
3116 3122 if numfull == 0:
3117 3123 fullsize[2] = 0
3118 3124 else:
3119 3125 fullsize[2] /= numfull
3120 3126 semitotal = semisize[2]
3121 3127 snaptotal = {}
3122 3128 if numsemi > 0:
3123 3129 semisize[2] /= numsemi
3124 3130 for depth in snapsizedepth:
3125 3131 snaptotal[depth] = snapsizedepth[depth][2]
3126 3132 snapsizedepth[depth][2] /= numsnapdepth[depth]
3127 3133
3128 3134 deltatotal = deltasize[2]
3129 3135 if numdeltas > 0:
3130 3136 deltasize[2] /= numdeltas
3131 3137 totalsize = fulltotal + semitotal + deltatotal
3132 3138 avgchainlen = sum(chainlengths) / numrevs
3133 3139 maxchainlen = max(chainlengths)
3134 3140 maxchainspan = max(chainspans)
3135 3141 compratio = 1
3136 3142 if totalsize:
3137 3143 compratio = totalrawsize / totalsize
3138 3144
3139 3145 basedfmtstr = b'%%%dd\n'
3140 3146 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
3141 3147
3142 3148 def dfmtstr(max):
3143 3149 return basedfmtstr % len(str(max))
3144 3150
3145 3151 def pcfmtstr(max, padding=0):
3146 3152 return basepcfmtstr % (len(str(max)), b' ' * padding)
3147 3153
3148 3154 def pcfmt(value, total):
3149 3155 if total:
3150 3156 return (value, 100 * float(value) / total)
3151 3157 else:
3152 3158 return value, 100.0
3153 3159
3154 3160 ui.writenoi18n(b'format : %d\n' % format)
3155 3161 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
3156 3162
3157 3163 ui.write(b'\n')
3158 3164 fmt = pcfmtstr(totalsize)
3159 3165 fmt2 = dfmtstr(totalsize)
3160 3166 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3161 3167 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
3162 3168 ui.writenoi18n(
3163 3169 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
3164 3170 )
3165 3171 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
3166 3172 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
3167 3173 ui.writenoi18n(
3168 3174 b' text : '
3169 3175 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
3170 3176 )
3171 3177 ui.writenoi18n(
3172 3178 b' delta : '
3173 3179 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
3174 3180 )
3175 3181 ui.writenoi18n(
3176 3182 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
3177 3183 )
3178 3184 for depth in sorted(numsnapdepth):
3179 3185 ui.write(
3180 3186 (b' lvl-%-3d : ' % depth)
3181 3187 + fmt % pcfmt(numsnapdepth[depth], numrevs)
3182 3188 )
3183 3189 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
3184 3190 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
3185 3191 ui.writenoi18n(
3186 3192 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
3187 3193 )
3188 3194 for depth in sorted(numsnapdepth):
3189 3195 ui.write(
3190 3196 (b' lvl-%-3d : ' % depth)
3191 3197 + fmt % pcfmt(snaptotal[depth], totalsize)
3192 3198 )
3193 3199 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
3194 3200
3195 3201 def fmtchunktype(chunktype):
3196 3202 if chunktype == b'empty':
3197 3203 return b' %s : ' % chunktype
3198 3204 elif chunktype in pycompat.bytestr(string.ascii_letters):
3199 3205 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
3200 3206 else:
3201 3207 return b' 0x%s : ' % hex(chunktype)
3202 3208
3203 3209 ui.write(b'\n')
3204 3210 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
3205 3211 for chunktype in sorted(chunktypecounts):
3206 3212 ui.write(fmtchunktype(chunktype))
3207 3213 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
3208 3214 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
3209 3215 for chunktype in sorted(chunktypecounts):
3210 3216 ui.write(fmtchunktype(chunktype))
3211 3217 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
3212 3218
3213 3219 ui.write(b'\n')
3214 3220 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
3215 3221 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
3216 3222 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
3217 3223 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
3218 3224 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
3219 3225
3220 3226 if format > 0:
3221 3227 ui.write(b'\n')
3222 3228 ui.writenoi18n(
3223 3229 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
3224 3230 % tuple(datasize)
3225 3231 )
3226 3232 ui.writenoi18n(
3227 3233 b'full revision size (min/max/avg) : %d / %d / %d\n'
3228 3234 % tuple(fullsize)
3229 3235 )
3230 3236 ui.writenoi18n(
3231 3237 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
3232 3238 % tuple(semisize)
3233 3239 )
3234 3240 for depth in sorted(snapsizedepth):
3235 3241 if depth == 0:
3236 3242 continue
3237 3243 ui.writenoi18n(
3238 3244 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
3239 3245 % ((depth,) + tuple(snapsizedepth[depth]))
3240 3246 )
3241 3247 ui.writenoi18n(
3242 3248 b'delta size (min/max/avg) : %d / %d / %d\n'
3243 3249 % tuple(deltasize)
3244 3250 )
3245 3251
3246 3252 if numdeltas > 0:
3247 3253 ui.write(b'\n')
3248 3254 fmt = pcfmtstr(numdeltas)
3249 3255 fmt2 = pcfmtstr(numdeltas, 4)
3250 3256 ui.writenoi18n(
3251 3257 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
3252 3258 )
3253 3259 if numprev > 0:
3254 3260 ui.writenoi18n(
3255 3261 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
3256 3262 )
3257 3263 ui.writenoi18n(
3258 3264 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
3259 3265 )
3260 3266 ui.writenoi18n(
3261 3267 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
3262 3268 )
3263 3269 if gdelta:
3264 3270 ui.writenoi18n(
3265 3271 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
3266 3272 )
3267 3273 ui.writenoi18n(
3268 3274 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
3269 3275 )
3270 3276 ui.writenoi18n(
3271 3277 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
3272 3278 )
3273 3279
3274 3280
3275 3281 @command(
3276 3282 b'debugrevlogindex',
3277 3283 cmdutil.debugrevlogopts
3278 3284 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
3279 3285 _(b'[-f FORMAT] -c|-m|FILE'),
3280 3286 optionalrepo=True,
3281 3287 )
3282 3288 def debugrevlogindex(ui, repo, file_=None, **opts):
3283 3289 """dump the contents of a revlog index"""
3284 3290 opts = pycompat.byteskwargs(opts)
3285 3291 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
3286 3292 format = opts.get(b'format', 0)
3287 3293 if format not in (0, 1):
3288 3294 raise error.Abort(_(b"unknown format %d") % format)
3289 3295
3290 3296 if ui.debugflag:
3291 3297 shortfn = hex
3292 3298 else:
3293 3299 shortfn = short
3294 3300
3295 3301 # There might not be anything in r, so have a sane default
3296 3302 idlen = 12
3297 3303 for i in r:
3298 3304 idlen = len(shortfn(r.node(i)))
3299 3305 break
3300 3306
3301 3307 if format == 0:
3302 3308 if ui.verbose:
3303 3309 ui.writenoi18n(
3304 3310 b" rev offset length linkrev %s %s p2\n"
3305 3311 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3306 3312 )
3307 3313 else:
3308 3314 ui.writenoi18n(
3309 3315 b" rev linkrev %s %s p2\n"
3310 3316 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
3311 3317 )
3312 3318 elif format == 1:
3313 3319 if ui.verbose:
3314 3320 ui.writenoi18n(
3315 3321 (
3316 3322 b" rev flag offset length size link p1"
3317 3323 b" p2 %s\n"
3318 3324 )
3319 3325 % b"nodeid".rjust(idlen)
3320 3326 )
3321 3327 else:
3322 3328 ui.writenoi18n(
3323 3329 b" rev flag size link p1 p2 %s\n"
3324 3330 % b"nodeid".rjust(idlen)
3325 3331 )
3326 3332
3327 3333 for i in r:
3328 3334 node = r.node(i)
3329 3335 if format == 0:
3330 3336 try:
3331 3337 pp = r.parents(node)
3332 3338 except Exception:
3333 3339 pp = [repo.nullid, repo.nullid]
3334 3340 if ui.verbose:
3335 3341 ui.write(
3336 3342 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3337 3343 % (
3338 3344 i,
3339 3345 r.start(i),
3340 3346 r.length(i),
3341 3347 r.linkrev(i),
3342 3348 shortfn(node),
3343 3349 shortfn(pp[0]),
3344 3350 shortfn(pp[1]),
3345 3351 )
3346 3352 )
3347 3353 else:
3348 3354 ui.write(
3349 3355 b"% 6d % 7d %s %s %s\n"
3350 3356 % (
3351 3357 i,
3352 3358 r.linkrev(i),
3353 3359 shortfn(node),
3354 3360 shortfn(pp[0]),
3355 3361 shortfn(pp[1]),
3356 3362 )
3357 3363 )
3358 3364 elif format == 1:
3359 3365 pr = r.parentrevs(i)
3360 3366 if ui.verbose:
3361 3367 ui.write(
3362 3368 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3363 3369 % (
3364 3370 i,
3365 3371 r.flags(i),
3366 3372 r.start(i),
3367 3373 r.length(i),
3368 3374 r.rawsize(i),
3369 3375 r.linkrev(i),
3370 3376 pr[0],
3371 3377 pr[1],
3372 3378 shortfn(node),
3373 3379 )
3374 3380 )
3375 3381 else:
3376 3382 ui.write(
3377 3383 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3378 3384 % (
3379 3385 i,
3380 3386 r.flags(i),
3381 3387 r.rawsize(i),
3382 3388 r.linkrev(i),
3383 3389 pr[0],
3384 3390 pr[1],
3385 3391 shortfn(node),
3386 3392 )
3387 3393 )
3388 3394
3389 3395
3390 3396 @command(
3391 3397 b'debugrevspec',
3392 3398 [
3393 3399 (
3394 3400 b'',
3395 3401 b'optimize',
3396 3402 None,
3397 3403 _(b'print parsed tree after optimizing (DEPRECATED)'),
3398 3404 ),
3399 3405 (
3400 3406 b'',
3401 3407 b'show-revs',
3402 3408 True,
3403 3409 _(b'print list of result revisions (default)'),
3404 3410 ),
3405 3411 (
3406 3412 b's',
3407 3413 b'show-set',
3408 3414 None,
3409 3415 _(b'print internal representation of result set'),
3410 3416 ),
3411 3417 (
3412 3418 b'p',
3413 3419 b'show-stage',
3414 3420 [],
3415 3421 _(b'print parsed tree at the given stage'),
3416 3422 _(b'NAME'),
3417 3423 ),
3418 3424 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3419 3425 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3420 3426 ],
3421 3427 b'REVSPEC',
3422 3428 )
3423 3429 def debugrevspec(ui, repo, expr, **opts):
3424 3430 """parse and apply a revision specification
3425 3431
3426 3432 Use -p/--show-stage option to print the parsed tree at the given stages.
3427 3433 Use -p all to print tree at every stage.
3428 3434
3429 3435 Use --no-show-revs option with -s or -p to print only the set
3430 3436 representation or the parsed tree respectively.
3431 3437
3432 3438 Use --verify-optimized to compare the optimized result with the unoptimized
3433 3439 one. Returns 1 if the optimized result differs.
3434 3440 """
3435 3441 opts = pycompat.byteskwargs(opts)
3436 3442 aliases = ui.configitems(b'revsetalias')
3437 3443 stages = [
3438 3444 (b'parsed', lambda tree: tree),
3439 3445 (
3440 3446 b'expanded',
3441 3447 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3442 3448 ),
3443 3449 (b'concatenated', revsetlang.foldconcat),
3444 3450 (b'analyzed', revsetlang.analyze),
3445 3451 (b'optimized', revsetlang.optimize),
3446 3452 ]
3447 3453 if opts[b'no_optimized']:
3448 3454 stages = stages[:-1]
3449 3455 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3450 3456 raise error.Abort(
3451 3457 _(b'cannot use --verify-optimized with --no-optimized')
3452 3458 )
3453 3459 stagenames = {n for n, f in stages}
3454 3460
3455 3461 showalways = set()
3456 3462 showchanged = set()
3457 3463 if ui.verbose and not opts[b'show_stage']:
3458 3464 # show parsed tree by --verbose (deprecated)
3459 3465 showalways.add(b'parsed')
3460 3466 showchanged.update([b'expanded', b'concatenated'])
3461 3467 if opts[b'optimize']:
3462 3468 showalways.add(b'optimized')
3463 3469 if opts[b'show_stage'] and opts[b'optimize']:
3464 3470 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3465 3471 if opts[b'show_stage'] == [b'all']:
3466 3472 showalways.update(stagenames)
3467 3473 else:
3468 3474 for n in opts[b'show_stage']:
3469 3475 if n not in stagenames:
3470 3476 raise error.Abort(_(b'invalid stage name: %s') % n)
3471 3477 showalways.update(opts[b'show_stage'])
3472 3478
3473 3479 treebystage = {}
3474 3480 printedtree = None
3475 3481 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3476 3482 for n, f in stages:
3477 3483 treebystage[n] = tree = f(tree)
3478 3484 if n in showalways or (n in showchanged and tree != printedtree):
3479 3485 if opts[b'show_stage'] or n != b'parsed':
3480 3486 ui.write(b"* %s:\n" % n)
3481 3487 ui.write(revsetlang.prettyformat(tree), b"\n")
3482 3488 printedtree = tree
3483 3489
3484 3490 if opts[b'verify_optimized']:
3485 3491 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3486 3492 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3487 3493 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3488 3494 ui.writenoi18n(
3489 3495 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3490 3496 )
3491 3497 ui.writenoi18n(
3492 3498 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3493 3499 )
3494 3500 arevs = list(arevs)
3495 3501 brevs = list(brevs)
3496 3502 if arevs == brevs:
3497 3503 return 0
3498 3504 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3499 3505 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3500 3506 sm = difflib.SequenceMatcher(None, arevs, brevs)
3501 3507 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3502 3508 if tag in ('delete', 'replace'):
3503 3509 for c in arevs[alo:ahi]:
3504 3510 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3505 3511 if tag in ('insert', 'replace'):
3506 3512 for c in brevs[blo:bhi]:
3507 3513 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3508 3514 if tag == 'equal':
3509 3515 for c in arevs[alo:ahi]:
3510 3516 ui.write(b' %d\n' % c)
3511 3517 return 1
3512 3518
3513 3519 func = revset.makematcher(tree)
3514 3520 revs = func(repo)
3515 3521 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3516 3522 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3517 3523 if not opts[b'show_revs']:
3518 3524 return
3519 3525 for c in revs:
3520 3526 ui.write(b"%d\n" % c)
3521 3527
3522 3528
3523 3529 @command(
3524 3530 b'debugserve',
3525 3531 [
3526 3532 (
3527 3533 b'',
3528 3534 b'sshstdio',
3529 3535 False,
3530 3536 _(b'run an SSH server bound to process handles'),
3531 3537 ),
3532 3538 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3533 3539 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3534 3540 ],
3535 3541 b'',
3536 3542 )
3537 3543 def debugserve(ui, repo, **opts):
3538 3544 """run a server with advanced settings
3539 3545
3540 3546 This command is similar to :hg:`serve`. It exists partially as a
3541 3547 workaround to the fact that ``hg serve --stdio`` must have specific
3542 3548 arguments for security reasons.
3543 3549 """
3544 3550 opts = pycompat.byteskwargs(opts)
3545 3551
3546 3552 if not opts[b'sshstdio']:
3547 3553 raise error.Abort(_(b'only --sshstdio is currently supported'))
3548 3554
3549 3555 logfh = None
3550 3556
3551 3557 if opts[b'logiofd'] and opts[b'logiofile']:
3552 3558 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3553 3559
3554 3560 if opts[b'logiofd']:
3555 3561 # Ideally we would be line buffered. But line buffering in binary
3556 3562 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3557 3563 # buffering could have performance impacts. But since this isn't
3558 3564 # performance critical code, it should be fine.
3559 3565 try:
3560 3566 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3561 3567 except OSError as e:
3562 3568 if e.errno != errno.ESPIPE:
3563 3569 raise
3564 3570 # can't seek a pipe, so `ab` mode fails on py3
3565 3571 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3566 3572 elif opts[b'logiofile']:
3567 3573 logfh = open(opts[b'logiofile'], b'ab', 0)
3568 3574
3569 3575 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3570 3576 s.serve_forever()
3571 3577
3572 3578
3573 3579 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3574 3580 def debugsetparents(ui, repo, rev1, rev2=None):
3575 3581 """manually set the parents of the current working directory (DANGEROUS)
3576 3582
3577 3583 This command is not what you are looking for and should not be used. Using
3578 3584 this command will most certainly results in slight corruption of the file
3579 3585 level histories withing your repository. DO NOT USE THIS COMMAND.
3580 3586
3581 3587 The command update the p1 and p2 field in the dirstate, and not touching
3582 3588 anything else. This useful for writing repository conversion tools, but
3583 3589 should be used with extreme care. For example, neither the working
3584 3590 directory nor the dirstate is updated, so file status may be incorrect
3585 3591 after running this command. Only used if you are one of the few people that
3586 3592 deeply unstand both conversion tools and file level histories. If you are
3587 3593 reading this help, you are not one of this people (most of them sailed west
3588 3594 from Mithlond anyway.
3589 3595
3590 3596 So one last time DO NOT USE THIS COMMAND.
3591 3597
3592 3598 Returns 0 on success.
3593 3599 """
3594 3600
3595 3601 node1 = scmutil.revsingle(repo, rev1).node()
3596 3602 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3597 3603
3598 3604 with repo.wlock():
3599 3605 repo.setparents(node1, node2)
3600 3606
3601 3607
3602 3608 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3603 3609 def debugsidedata(ui, repo, file_, rev=None, **opts):
3604 3610 """dump the side data for a cl/manifest/file revision
3605 3611
3606 3612 Use --verbose to dump the sidedata content."""
3607 3613 opts = pycompat.byteskwargs(opts)
3608 3614 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3609 3615 if rev is not None:
3610 3616 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3611 3617 file_, rev = None, file_
3612 3618 elif rev is None:
3613 3619 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3614 3620 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3615 3621 r = getattr(r, '_revlog', r)
3616 3622 try:
3617 3623 sidedata = r.sidedata(r.lookup(rev))
3618 3624 except KeyError:
3619 3625 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3620 3626 if sidedata:
3621 3627 sidedata = list(sidedata.items())
3622 3628 sidedata.sort()
3623 3629 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3624 3630 for key, value in sidedata:
3625 3631 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3626 3632 if ui.verbose:
3627 3633 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3628 3634
3629 3635
3630 3636 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3631 3637 def debugssl(ui, repo, source=None, **opts):
3632 3638 """test a secure connection to a server
3633 3639
3634 3640 This builds the certificate chain for the server on Windows, installing the
3635 3641 missing intermediates and trusted root via Windows Update if necessary. It
3636 3642 does nothing on other platforms.
3637 3643
3638 3644 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3639 3645 that server is used. See :hg:`help urls` for more information.
3640 3646
3641 3647 If the update succeeds, retry the original operation. Otherwise, the cause
3642 3648 of the SSL error is likely another issue.
3643 3649 """
3644 3650 if not pycompat.iswindows:
3645 3651 raise error.Abort(
3646 3652 _(b'certificate chain building is only possible on Windows')
3647 3653 )
3648 3654
3649 3655 if not source:
3650 3656 if not repo:
3651 3657 raise error.Abort(
3652 3658 _(
3653 3659 b"there is no Mercurial repository here, and no "
3654 3660 b"server specified"
3655 3661 )
3656 3662 )
3657 3663 source = b"default"
3658 3664
3659 3665 source, branches = urlutil.get_unique_pull_path(
3660 3666 b'debugssl', repo, ui, source
3661 3667 )
3662 3668 url = urlutil.url(source)
3663 3669
3664 3670 defaultport = {b'https': 443, b'ssh': 22}
3665 3671 if url.scheme in defaultport:
3666 3672 try:
3667 3673 addr = (url.host, int(url.port or defaultport[url.scheme]))
3668 3674 except ValueError:
3669 3675 raise error.Abort(_(b"malformed port number in URL"))
3670 3676 else:
3671 3677 raise error.Abort(_(b"only https and ssh connections are supported"))
3672 3678
3673 3679 from . import win32
3674 3680
3675 3681 s = ssl.wrap_socket(
3676 3682 socket.socket(),
3677 3683 ssl_version=ssl.PROTOCOL_TLS,
3678 3684 cert_reqs=ssl.CERT_NONE,
3679 3685 ca_certs=None,
3680 3686 )
3681 3687
3682 3688 try:
3683 3689 s.connect(addr)
3684 3690 cert = s.getpeercert(True)
3685 3691
3686 3692 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3687 3693
3688 3694 complete = win32.checkcertificatechain(cert, build=False)
3689 3695
3690 3696 if not complete:
3691 3697 ui.status(_(b'certificate chain is incomplete, updating... '))
3692 3698
3693 3699 if not win32.checkcertificatechain(cert):
3694 3700 ui.status(_(b'failed.\n'))
3695 3701 else:
3696 3702 ui.status(_(b'done.\n'))
3697 3703 else:
3698 3704 ui.status(_(b'full certificate chain is available\n'))
3699 3705 finally:
3700 3706 s.close()
3701 3707
3702 3708
3703 3709 @command(
3704 3710 b"debugbackupbundle",
3705 3711 [
3706 3712 (
3707 3713 b"",
3708 3714 b"recover",
3709 3715 b"",
3710 3716 b"brings the specified changeset back into the repository",
3711 3717 )
3712 3718 ]
3713 3719 + cmdutil.logopts,
3714 3720 _(b"hg debugbackupbundle [--recover HASH]"),
3715 3721 )
3716 3722 def debugbackupbundle(ui, repo, *pats, **opts):
3717 3723 """lists the changesets available in backup bundles
3718 3724
3719 3725 Without any arguments, this command prints a list of the changesets in each
3720 3726 backup bundle.
3721 3727
3722 3728 --recover takes a changeset hash and unbundles the first bundle that
3723 3729 contains that hash, which puts that changeset back in your repository.
3724 3730
3725 3731 --verbose will print the entire commit message and the bundle path for that
3726 3732 backup.
3727 3733 """
3728 3734 backups = list(
3729 3735 filter(
3730 3736 os.path.isfile, glob.glob(repo.vfs.join(b"strip-backup") + b"/*.hg")
3731 3737 )
3732 3738 )
3733 3739 backups.sort(key=lambda x: os.path.getmtime(x), reverse=True)
3734 3740
3735 3741 opts = pycompat.byteskwargs(opts)
3736 3742 opts[b"bundle"] = b""
3737 3743 opts[b"force"] = None
3738 3744 limit = logcmdutil.getlimit(opts)
3739 3745
3740 3746 def display(other, chlist, displayer):
3741 3747 if opts.get(b"newest_first"):
3742 3748 chlist.reverse()
3743 3749 count = 0
3744 3750 for n in chlist:
3745 3751 if limit is not None and count >= limit:
3746 3752 break
3747 3753 parents = [
3748 3754 True for p in other.changelog.parents(n) if p != repo.nullid
3749 3755 ]
3750 3756 if opts.get(b"no_merges") and len(parents) == 2:
3751 3757 continue
3752 3758 count += 1
3753 3759 displayer.show(other[n])
3754 3760
3755 3761 recovernode = opts.get(b"recover")
3756 3762 if recovernode:
3757 3763 if scmutil.isrevsymbol(repo, recovernode):
3758 3764 ui.warn(_(b"%s already exists in the repo\n") % recovernode)
3759 3765 return
3760 3766 elif backups:
3761 3767 msg = _(
3762 3768 b"Recover changesets using: hg debugbackupbundle --recover "
3763 3769 b"<changeset hash>\n\nAvailable backup changesets:"
3764 3770 )
3765 3771 ui.status(msg, label=b"status.removed")
3766 3772 else:
3767 3773 ui.status(_(b"no backup changesets found\n"))
3768 3774 return
3769 3775
3770 3776 for backup in backups:
3771 3777 # Much of this is copied from the hg incoming logic
3772 3778 source = os.path.relpath(backup, encoding.getcwd())
3773 3779 source, branches = urlutil.get_unique_pull_path(
3774 3780 b'debugbackupbundle',
3775 3781 repo,
3776 3782 ui,
3777 3783 source,
3778 3784 default_branches=opts.get(b'branch'),
3779 3785 )
3780 3786 try:
3781 3787 other = hg.peer(repo, opts, source)
3782 3788 except error.LookupError as ex:
3783 3789 msg = _(b"\nwarning: unable to open bundle %s") % source
3784 3790 hint = _(b"\n(missing parent rev %s)\n") % short(ex.name)
3785 3791 ui.warn(msg, hint=hint)
3786 3792 continue
3787 3793 revs, checkout = hg.addbranchrevs(
3788 3794 repo, other, branches, opts.get(b"rev")
3789 3795 )
3790 3796
3791 3797 if revs:
3792 3798 revs = [other.lookup(rev) for rev in revs]
3793 3799
3794 3800 with ui.silent():
3795 3801 try:
3796 3802 other, chlist, cleanupfn = bundlerepo.getremotechanges(
3797 3803 ui, repo, other, revs, opts[b"bundle"], opts[b"force"]
3798 3804 )
3799 3805 except error.LookupError:
3800 3806 continue
3801 3807
3802 3808 try:
3803 3809 if not chlist:
3804 3810 continue
3805 3811 if recovernode:
3806 3812 with repo.lock(), repo.transaction(b"unbundle") as tr:
3807 3813 if scmutil.isrevsymbol(other, recovernode):
3808 3814 ui.status(_(b"Unbundling %s\n") % (recovernode))
3809 3815 f = hg.openpath(ui, source)
3810 3816 gen = exchange.readbundle(ui, f, source)
3811 3817 if isinstance(gen, bundle2.unbundle20):
3812 3818 bundle2.applybundle(
3813 3819 repo,
3814 3820 gen,
3815 3821 tr,
3816 3822 source=b"unbundle",
3817 3823 url=b"bundle:" + source,
3818 3824 )
3819 3825 else:
3820 3826 gen.apply(repo, b"unbundle", b"bundle:" + source)
3821 3827 break
3822 3828 else:
3823 3829 backupdate = encoding.strtolocal(
3824 3830 time.strftime(
3825 3831 "%a %H:%M, %Y-%m-%d",
3826 3832 time.localtime(os.path.getmtime(source)),
3827 3833 )
3828 3834 )
3829 3835 ui.status(b"\n%s\n" % (backupdate.ljust(50)))
3830 3836 if ui.verbose:
3831 3837 ui.status(b"%s%s\n" % (b"bundle:".ljust(13), source))
3832 3838 else:
3833 3839 opts[
3834 3840 b"template"
3835 3841 ] = b"{label('status.modified', node|short)} {desc|firstline}\n"
3836 3842 displayer = logcmdutil.changesetdisplayer(
3837 3843 ui, other, opts, False
3838 3844 )
3839 3845 display(other, chlist, displayer)
3840 3846 displayer.close()
3841 3847 finally:
3842 3848 cleanupfn()
3843 3849
3844 3850
3845 3851 @command(
3846 3852 b'debugsub',
3847 3853 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3848 3854 _(b'[-r REV] [REV]'),
3849 3855 )
3850 3856 def debugsub(ui, repo, rev=None):
3851 3857 ctx = scmutil.revsingle(repo, rev, None)
3852 3858 for k, v in sorted(ctx.substate.items()):
3853 3859 ui.writenoi18n(b'path %s\n' % k)
3854 3860 ui.writenoi18n(b' source %s\n' % v[0])
3855 3861 ui.writenoi18n(b' revision %s\n' % v[1])
3856 3862
3857 3863
3858 3864 @command(b'debugshell', optionalrepo=True)
3859 3865 def debugshell(ui, repo):
3860 3866 """run an interactive Python interpreter
3861 3867
3862 3868 The local namespace is provided with a reference to the ui and
3863 3869 the repo instance (if available).
3864 3870 """
3865 3871 import code
3866 3872
3867 3873 imported_objects = {
3868 3874 'ui': ui,
3869 3875 'repo': repo,
3870 3876 }
3871 3877
3872 3878 code.interact(local=imported_objects)
3873 3879
3874 3880
3875 3881 @command(
3876 3882 b'debugsuccessorssets',
3877 3883 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3878 3884 _(b'[REV]'),
3879 3885 )
3880 3886 def debugsuccessorssets(ui, repo, *revs, **opts):
3881 3887 """show set of successors for revision
3882 3888
3883 3889 A successors set of changeset A is a consistent group of revisions that
3884 3890 succeed A. It contains non-obsolete changesets only unless closests
3885 3891 successors set is set.
3886 3892
3887 3893 In most cases a changeset A has a single successors set containing a single
3888 3894 successor (changeset A replaced by A').
3889 3895
3890 3896 A changeset that is made obsolete with no successors are called "pruned".
3891 3897 Such changesets have no successors sets at all.
3892 3898
3893 3899 A changeset that has been "split" will have a successors set containing
3894 3900 more than one successor.
3895 3901
3896 3902 A changeset that has been rewritten in multiple different ways is called
3897 3903 "divergent". Such changesets have multiple successor sets (each of which
3898 3904 may also be split, i.e. have multiple successors).
3899 3905
3900 3906 Results are displayed as follows::
3901 3907
3902 3908 <rev1>
3903 3909 <successors-1A>
3904 3910 <rev2>
3905 3911 <successors-2A>
3906 3912 <successors-2B1> <successors-2B2> <successors-2B3>
3907 3913
3908 3914 Here rev2 has two possible (i.e. divergent) successors sets. The first
3909 3915 holds one element, whereas the second holds three (i.e. the changeset has
3910 3916 been split).
3911 3917 """
3912 3918 # passed to successorssets caching computation from one call to another
3913 3919 cache = {}
3914 3920 ctx2str = bytes
3915 3921 node2str = short
3916 3922 for rev in scmutil.revrange(repo, revs):
3917 3923 ctx = repo[rev]
3918 3924 ui.write(b'%s\n' % ctx2str(ctx))
3919 3925 for succsset in obsutil.successorssets(
3920 3926 repo, ctx.node(), closest=opts['closest'], cache=cache
3921 3927 ):
3922 3928 if succsset:
3923 3929 ui.write(b' ')
3924 3930 ui.write(node2str(succsset[0]))
3925 3931 for node in succsset[1:]:
3926 3932 ui.write(b' ')
3927 3933 ui.write(node2str(node))
3928 3934 ui.write(b'\n')
3929 3935
3930 3936
3931 3937 @command(b'debugtagscache', [])
3932 3938 def debugtagscache(ui, repo):
3933 3939 """display the contents of .hg/cache/hgtagsfnodes1"""
3934 3940 cache = tagsmod.hgtagsfnodescache(repo.unfiltered())
3935 3941 flog = repo.file(b'.hgtags')
3936 3942 for r in repo:
3937 3943 node = repo[r].node()
3938 3944 tagsnode = cache.getfnode(node, computemissing=False)
3939 3945 if tagsnode:
3940 3946 tagsnodedisplay = hex(tagsnode)
3941 3947 if not flog.hasnode(tagsnode):
3942 3948 tagsnodedisplay += b' (unknown node)'
3943 3949 elif tagsnode is None:
3944 3950 tagsnodedisplay = b'missing'
3945 3951 else:
3946 3952 tagsnodedisplay = b'invalid'
3947 3953
3948 3954 ui.write(b'%d %s %s\n' % (r, hex(node), tagsnodedisplay))
3949 3955
3950 3956
3951 3957 @command(
3952 3958 b'debugtemplate',
3953 3959 [
3954 3960 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3955 3961 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3956 3962 ],
3957 3963 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3958 3964 optionalrepo=True,
3959 3965 )
3960 3966 def debugtemplate(ui, repo, tmpl, **opts):
3961 3967 """parse and apply a template
3962 3968
3963 3969 If -r/--rev is given, the template is processed as a log template and
3964 3970 applied to the given changesets. Otherwise, it is processed as a generic
3965 3971 template.
3966 3972
3967 3973 Use --verbose to print the parsed tree.
3968 3974 """
3969 3975 revs = None
3970 3976 if opts['rev']:
3971 3977 if repo is None:
3972 3978 raise error.RepoError(
3973 3979 _(b'there is no Mercurial repository here (.hg not found)')
3974 3980 )
3975 3981 revs = scmutil.revrange(repo, opts['rev'])
3976 3982
3977 3983 props = {}
3978 3984 for d in opts['define']:
3979 3985 try:
3980 3986 k, v = (e.strip() for e in d.split(b'=', 1))
3981 3987 if not k or k == b'ui':
3982 3988 raise ValueError
3983 3989 props[k] = v
3984 3990 except ValueError:
3985 3991 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3986 3992
3987 3993 if ui.verbose:
3988 3994 aliases = ui.configitems(b'templatealias')
3989 3995 tree = templater.parse(tmpl)
3990 3996 ui.note(templater.prettyformat(tree), b'\n')
3991 3997 newtree = templater.expandaliases(tree, aliases)
3992 3998 if newtree != tree:
3993 3999 ui.notenoi18n(
3994 4000 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3995 4001 )
3996 4002
3997 4003 if revs is None:
3998 4004 tres = formatter.templateresources(ui, repo)
3999 4005 t = formatter.maketemplater(ui, tmpl, resources=tres)
4000 4006 if ui.verbose:
4001 4007 kwds, funcs = t.symbolsuseddefault()
4002 4008 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4003 4009 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4004 4010 ui.write(t.renderdefault(props))
4005 4011 else:
4006 4012 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
4007 4013 if ui.verbose:
4008 4014 kwds, funcs = displayer.t.symbolsuseddefault()
4009 4015 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
4010 4016 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
4011 4017 for r in revs:
4012 4018 displayer.show(repo[r], **pycompat.strkwargs(props))
4013 4019 displayer.close()
4014 4020
4015 4021
4016 4022 @command(
4017 4023 b'debuguigetpass',
4018 4024 [
4019 4025 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4020 4026 ],
4021 4027 _(b'[-p TEXT]'),
4022 4028 norepo=True,
4023 4029 )
4024 4030 def debuguigetpass(ui, prompt=b''):
4025 4031 """show prompt to type password"""
4026 4032 r = ui.getpass(prompt)
4027 4033 if r is None:
4028 4034 r = b"<default response>"
4029 4035 ui.writenoi18n(b'response: %s\n' % r)
4030 4036
4031 4037
4032 4038 @command(
4033 4039 b'debuguiprompt',
4034 4040 [
4035 4041 (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
4036 4042 ],
4037 4043 _(b'[-p TEXT]'),
4038 4044 norepo=True,
4039 4045 )
4040 4046 def debuguiprompt(ui, prompt=b''):
4041 4047 """show plain prompt"""
4042 4048 r = ui.prompt(prompt)
4043 4049 ui.writenoi18n(b'response: %s\n' % r)
4044 4050
4045 4051
4046 4052 @command(b'debugupdatecaches', [])
4047 4053 def debugupdatecaches(ui, repo, *pats, **opts):
4048 4054 """warm all known caches in the repository"""
4049 4055 with repo.wlock(), repo.lock():
4050 4056 repo.updatecaches(caches=repository.CACHES_ALL)
4051 4057
4052 4058
4053 4059 @command(
4054 4060 b'debugupgraderepo',
4055 4061 [
4056 4062 (
4057 4063 b'o',
4058 4064 b'optimize',
4059 4065 [],
4060 4066 _(b'extra optimization to perform'),
4061 4067 _(b'NAME'),
4062 4068 ),
4063 4069 (b'', b'run', False, _(b'performs an upgrade')),
4064 4070 (b'', b'backup', True, _(b'keep the old repository content around')),
4065 4071 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
4066 4072 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
4067 4073 (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
4068 4074 ],
4069 4075 )
4070 4076 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
4071 4077 """upgrade a repository to use different features
4072 4078
4073 4079 If no arguments are specified, the repository is evaluated for upgrade
4074 4080 and a list of problems and potential optimizations is printed.
4075 4081
4076 4082 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
4077 4083 can be influenced via additional arguments. More details will be provided
4078 4084 by the command output when run without ``--run``.
4079 4085
4080 4086 During the upgrade, the repository will be locked and no writes will be
4081 4087 allowed.
4082 4088
4083 4089 At the end of the upgrade, the repository may not be readable while new
4084 4090 repository data is swapped in. This window will be as long as it takes to
4085 4091 rename some directories inside the ``.hg`` directory. On most machines, this
4086 4092 should complete almost instantaneously and the chances of a consumer being
4087 4093 unable to access the repository should be low.
4088 4094
4089 4095 By default, all revlogs will be upgraded. You can restrict this using flags
4090 4096 such as `--manifest`:
4091 4097
4092 4098 * `--manifest`: only optimize the manifest
4093 4099 * `--no-manifest`: optimize all revlog but the manifest
4094 4100 * `--changelog`: optimize the changelog only
4095 4101 * `--no-changelog --no-manifest`: optimize filelogs only
4096 4102 * `--filelogs`: optimize the filelogs only
4097 4103 * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
4098 4104 """
4099 4105 return upgrade.upgraderepo(
4100 4106 ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
4101 4107 )
4102 4108
4103 4109
4104 4110 @command(
4105 4111 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
4106 4112 )
4107 4113 def debugwalk(ui, repo, *pats, **opts):
4108 4114 """show how files match on given patterns"""
4109 4115 opts = pycompat.byteskwargs(opts)
4110 4116 m = scmutil.match(repo[None], pats, opts)
4111 4117 if ui.verbose:
4112 4118 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
4113 4119 items = list(repo[None].walk(m))
4114 4120 if not items:
4115 4121 return
4116 4122 f = lambda fn: fn
4117 4123 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
4118 4124 f = lambda fn: util.normpath(fn)
4119 4125 fmt = b'f %%-%ds %%-%ds %%s' % (
4120 4126 max([len(abs) for abs in items]),
4121 4127 max([len(repo.pathto(abs)) for abs in items]),
4122 4128 )
4123 4129 for abs in items:
4124 4130 line = fmt % (
4125 4131 abs,
4126 4132 f(repo.pathto(abs)),
4127 4133 m.exact(abs) and b'exact' or b'',
4128 4134 )
4129 4135 ui.write(b"%s\n" % line.rstrip())
4130 4136
4131 4137
4132 4138 @command(b'debugwhyunstable', [], _(b'REV'))
4133 4139 def debugwhyunstable(ui, repo, rev):
4134 4140 """explain instabilities of a changeset"""
4135 4141 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
4136 4142 dnodes = b''
4137 4143 if entry.get(b'divergentnodes'):
4138 4144 dnodes = (
4139 4145 b' '.join(
4140 4146 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
4141 4147 for ctx in entry[b'divergentnodes']
4142 4148 )
4143 4149 + b' '
4144 4150 )
4145 4151 ui.write(
4146 4152 b'%s: %s%s %s\n'
4147 4153 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
4148 4154 )
4149 4155
4150 4156
4151 4157 @command(
4152 4158 b'debugwireargs',
4153 4159 [
4154 4160 (b'', b'three', b'', b'three'),
4155 4161 (b'', b'four', b'', b'four'),
4156 4162 (b'', b'five', b'', b'five'),
4157 4163 ]
4158 4164 + cmdutil.remoteopts,
4159 4165 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
4160 4166 norepo=True,
4161 4167 )
4162 4168 def debugwireargs(ui, repopath, *vals, **opts):
4163 4169 opts = pycompat.byteskwargs(opts)
4164 4170 repo = hg.peer(ui, opts, repopath)
4165 4171 try:
4166 4172 for opt in cmdutil.remoteopts:
4167 4173 del opts[opt[1]]
4168 4174 args = {}
4169 4175 for k, v in pycompat.iteritems(opts):
4170 4176 if v:
4171 4177 args[k] = v
4172 4178 args = pycompat.strkwargs(args)
4173 4179 # run twice to check that we don't mess up the stream for the next command
4174 4180 res1 = repo.debugwireargs(*vals, **args)
4175 4181 res2 = repo.debugwireargs(*vals, **args)
4176 4182 ui.write(b"%s\n" % res1)
4177 4183 if res1 != res2:
4178 4184 ui.warn(b"%s\n" % res2)
4179 4185 finally:
4180 4186 repo.close()
4181 4187
4182 4188
4183 4189 def _parsewirelangblocks(fh):
4184 4190 activeaction = None
4185 4191 blocklines = []
4186 4192 lastindent = 0
4187 4193
4188 4194 for line in fh:
4189 4195 line = line.rstrip()
4190 4196 if not line:
4191 4197 continue
4192 4198
4193 4199 if line.startswith(b'#'):
4194 4200 continue
4195 4201
4196 4202 if not line.startswith(b' '):
4197 4203 # New block. Flush previous one.
4198 4204 if activeaction:
4199 4205 yield activeaction, blocklines
4200 4206
4201 4207 activeaction = line
4202 4208 blocklines = []
4203 4209 lastindent = 0
4204 4210 continue
4205 4211
4206 4212 # Else we start with an indent.
4207 4213
4208 4214 if not activeaction:
4209 4215 raise error.Abort(_(b'indented line outside of block'))
4210 4216
4211 4217 indent = len(line) - len(line.lstrip())
4212 4218
4213 4219 # If this line is indented more than the last line, concatenate it.
4214 4220 if indent > lastindent and blocklines:
4215 4221 blocklines[-1] += line.lstrip()
4216 4222 else:
4217 4223 blocklines.append(line)
4218 4224 lastindent = indent
4219 4225
4220 4226 # Flush last block.
4221 4227 if activeaction:
4222 4228 yield activeaction, blocklines
4223 4229
4224 4230
4225 4231 @command(
4226 4232 b'debugwireproto',
4227 4233 [
4228 4234 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
4229 4235 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
4230 4236 (
4231 4237 b'',
4232 4238 b'noreadstderr',
4233 4239 False,
4234 4240 _(b'do not read from stderr of the remote'),
4235 4241 ),
4236 4242 (
4237 4243 b'',
4238 4244 b'nologhandshake',
4239 4245 False,
4240 4246 _(b'do not log I/O related to the peer handshake'),
4241 4247 ),
4242 4248 ]
4243 4249 + cmdutil.remoteopts,
4244 4250 _(b'[PATH]'),
4245 4251 optionalrepo=True,
4246 4252 )
4247 4253 def debugwireproto(ui, repo, path=None, **opts):
4248 4254 """send wire protocol commands to a server
4249 4255
4250 4256 This command can be used to issue wire protocol commands to remote
4251 4257 peers and to debug the raw data being exchanged.
4252 4258
4253 4259 ``--localssh`` will start an SSH server against the current repository
4254 4260 and connect to that. By default, the connection will perform a handshake
4255 4261 and establish an appropriate peer instance.
4256 4262
4257 4263 ``--peer`` can be used to bypass the handshake protocol and construct a
4258 4264 peer instance using the specified class type. Valid values are ``raw``,
4259 4265 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
4260 4266 raw data payloads and don't support higher-level command actions.
4261 4267
4262 4268 ``--noreadstderr`` can be used to disable automatic reading from stderr
4263 4269 of the peer (for SSH connections only). Disabling automatic reading of
4264 4270 stderr is useful for making output more deterministic.
4265 4271
4266 4272 Commands are issued via a mini language which is specified via stdin.
4267 4273 The language consists of individual actions to perform. An action is
4268 4274 defined by a block. A block is defined as a line with no leading
4269 4275 space followed by 0 or more lines with leading space. Blocks are
4270 4276 effectively a high-level command with additional metadata.
4271 4277
4272 4278 Lines beginning with ``#`` are ignored.
4273 4279
4274 4280 The following sections denote available actions.
4275 4281
4276 4282 raw
4277 4283 ---
4278 4284
4279 4285 Send raw data to the server.
4280 4286
4281 4287 The block payload contains the raw data to send as one atomic send
4282 4288 operation. The data may not actually be delivered in a single system
4283 4289 call: it depends on the abilities of the transport being used.
4284 4290
4285 4291 Each line in the block is de-indented and concatenated. Then, that
4286 4292 value is evaluated as a Python b'' literal. This allows the use of
4287 4293 backslash escaping, etc.
4288 4294
4289 4295 raw+
4290 4296 ----
4291 4297
4292 4298 Behaves like ``raw`` except flushes output afterwards.
4293 4299
4294 4300 command <X>
4295 4301 -----------
4296 4302
4297 4303 Send a request to run a named command, whose name follows the ``command``
4298 4304 string.
4299 4305
4300 4306 Arguments to the command are defined as lines in this block. The format of
4301 4307 each line is ``<key> <value>``. e.g.::
4302 4308
4303 4309 command listkeys
4304 4310 namespace bookmarks
4305 4311
4306 4312 If the value begins with ``eval:``, it will be interpreted as a Python
4307 4313 literal expression. Otherwise values are interpreted as Python b'' literals.
4308 4314 This allows sending complex types and encoding special byte sequences via
4309 4315 backslash escaping.
4310 4316
4311 4317 The following arguments have special meaning:
4312 4318
4313 4319 ``PUSHFILE``
4314 4320 When defined, the *push* mechanism of the peer will be used instead
4315 4321 of the static request-response mechanism and the content of the
4316 4322 file specified in the value of this argument will be sent as the
4317 4323 command payload.
4318 4324
4319 4325 This can be used to submit a local bundle file to the remote.
4320 4326
4321 4327 batchbegin
4322 4328 ----------
4323 4329
4324 4330 Instruct the peer to begin a batched send.
4325 4331
4326 4332 All ``command`` blocks are queued for execution until the next
4327 4333 ``batchsubmit`` block.
4328 4334
4329 4335 batchsubmit
4330 4336 -----------
4331 4337
4332 4338 Submit previously queued ``command`` blocks as a batch request.
4333 4339
4334 4340 This action MUST be paired with a ``batchbegin`` action.
4335 4341
4336 4342 httprequest <method> <path>
4337 4343 ---------------------------
4338 4344
4339 4345 (HTTP peer only)
4340 4346
4341 4347 Send an HTTP request to the peer.
4342 4348
4343 4349 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
4344 4350
4345 4351 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
4346 4352 headers to add to the request. e.g. ``Accept: foo``.
4347 4353
4348 4354 The following arguments are special:
4349 4355
4350 4356 ``BODYFILE``
4351 4357 The content of the file defined as the value to this argument will be
4352 4358 transferred verbatim as the HTTP request body.
4353 4359
4354 4360 ``frame <type> <flags> <payload>``
4355 4361 Send a unified protocol frame as part of the request body.
4356 4362
4357 4363 All frames will be collected and sent as the body to the HTTP
4358 4364 request.
4359 4365
4360 4366 close
4361 4367 -----
4362 4368
4363 4369 Close the connection to the server.
4364 4370
4365 4371 flush
4366 4372 -----
4367 4373
4368 4374 Flush data written to the server.
4369 4375
4370 4376 readavailable
4371 4377 -------------
4372 4378
4373 4379 Close the write end of the connection and read all available data from
4374 4380 the server.
4375 4381
4376 4382 If the connection to the server encompasses multiple pipes, we poll both
4377 4383 pipes and read available data.
4378 4384
4379 4385 readline
4380 4386 --------
4381 4387
4382 4388 Read a line of output from the server. If there are multiple output
4383 4389 pipes, reads only the main pipe.
4384 4390
4385 4391 ereadline
4386 4392 ---------
4387 4393
4388 4394 Like ``readline``, but read from the stderr pipe, if available.
4389 4395
4390 4396 read <X>
4391 4397 --------
4392 4398
4393 4399 ``read()`` N bytes from the server's main output pipe.
4394 4400
4395 4401 eread <X>
4396 4402 ---------
4397 4403
4398 4404 ``read()`` N bytes from the server's stderr pipe, if available.
4399 4405
4400 4406 Specifying Unified Frame-Based Protocol Frames
4401 4407 ----------------------------------------------
4402 4408
4403 4409 It is possible to emit a *Unified Frame-Based Protocol* by using special
4404 4410 syntax.
4405 4411
4406 4412 A frame is composed as a type, flags, and payload. These can be parsed
4407 4413 from a string of the form:
4408 4414
4409 4415 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
4410 4416
4411 4417 ``request-id`` and ``stream-id`` are integers defining the request and
4412 4418 stream identifiers.
4413 4419
4414 4420 ``type`` can be an integer value for the frame type or the string name
4415 4421 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
4416 4422 ``command-name``.
4417 4423
4418 4424 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
4419 4425 components. Each component (and there can be just one) can be an integer
4420 4426 or a flag name for stream flags or frame flags, respectively. Values are
4421 4427 resolved to integers and then bitwise OR'd together.
4422 4428
4423 4429 ``payload`` represents the raw frame payload. If it begins with
4424 4430 ``cbor:``, the following string is evaluated as Python code and the
4425 4431 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
4426 4432 as a Python byte string literal.
4427 4433 """
4428 4434 opts = pycompat.byteskwargs(opts)
4429 4435
4430 4436 if opts[b'localssh'] and not repo:
4431 4437 raise error.Abort(_(b'--localssh requires a repository'))
4432 4438
4433 4439 if opts[b'peer'] and opts[b'peer'] not in (
4434 4440 b'raw',
4435 4441 b'http2',
4436 4442 b'ssh1',
4437 4443 b'ssh2',
4438 4444 ):
4439 4445 raise error.Abort(
4440 4446 _(b'invalid value for --peer'),
4441 4447 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
4442 4448 )
4443 4449
4444 4450 if path and opts[b'localssh']:
4445 4451 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
4446 4452
4447 4453 if ui.interactive():
4448 4454 ui.write(_(b'(waiting for commands on stdin)\n'))
4449 4455
4450 4456 blocks = list(_parsewirelangblocks(ui.fin))
4451 4457
4452 4458 proc = None
4453 4459 stdin = None
4454 4460 stdout = None
4455 4461 stderr = None
4456 4462 opener = None
4457 4463
4458 4464 if opts[b'localssh']:
4459 4465 # We start the SSH server in its own process so there is process
4460 4466 # separation. This prevents a whole class of potential bugs around
4461 4467 # shared state from interfering with server operation.
4462 4468 args = procutil.hgcmd() + [
4463 4469 b'-R',
4464 4470 repo.root,
4465 4471 b'debugserve',
4466 4472 b'--sshstdio',
4467 4473 ]
4468 4474 proc = subprocess.Popen(
4469 4475 pycompat.rapply(procutil.tonativestr, args),
4470 4476 stdin=subprocess.PIPE,
4471 4477 stdout=subprocess.PIPE,
4472 4478 stderr=subprocess.PIPE,
4473 4479 bufsize=0,
4474 4480 )
4475 4481
4476 4482 stdin = proc.stdin
4477 4483 stdout = proc.stdout
4478 4484 stderr = proc.stderr
4479 4485
4480 4486 # We turn the pipes into observers so we can log I/O.
4481 4487 if ui.verbose or opts[b'peer'] == b'raw':
4482 4488 stdin = util.makeloggingfileobject(
4483 4489 ui, proc.stdin, b'i', logdata=True
4484 4490 )
4485 4491 stdout = util.makeloggingfileobject(
4486 4492 ui, proc.stdout, b'o', logdata=True
4487 4493 )
4488 4494 stderr = util.makeloggingfileobject(
4489 4495 ui, proc.stderr, b'e', logdata=True
4490 4496 )
4491 4497
4492 4498 # --localssh also implies the peer connection settings.
4493 4499
4494 4500 url = b'ssh://localserver'
4495 4501 autoreadstderr = not opts[b'noreadstderr']
4496 4502
4497 4503 if opts[b'peer'] == b'ssh1':
4498 4504 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
4499 4505 peer = sshpeer.sshv1peer(
4500 4506 ui,
4501 4507 url,
4502 4508 proc,
4503 4509 stdin,
4504 4510 stdout,
4505 4511 stderr,
4506 4512 None,
4507 4513 autoreadstderr=autoreadstderr,
4508 4514 )
4509 4515 elif opts[b'peer'] == b'ssh2':
4510 4516 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
4511 4517 peer = sshpeer.sshv2peer(
4512 4518 ui,
4513 4519 url,
4514 4520 proc,
4515 4521 stdin,
4516 4522 stdout,
4517 4523 stderr,
4518 4524 None,
4519 4525 autoreadstderr=autoreadstderr,
4520 4526 )
4521 4527 elif opts[b'peer'] == b'raw':
4522 4528 ui.write(_(b'using raw connection to peer\n'))
4523 4529 peer = None
4524 4530 else:
4525 4531 ui.write(_(b'creating ssh peer from handshake results\n'))
4526 4532 peer = sshpeer.makepeer(
4527 4533 ui,
4528 4534 url,
4529 4535 proc,
4530 4536 stdin,
4531 4537 stdout,
4532 4538 stderr,
4533 4539 autoreadstderr=autoreadstderr,
4534 4540 )
4535 4541
4536 4542 elif path:
4537 4543 # We bypass hg.peer() so we can proxy the sockets.
4538 4544 # TODO consider not doing this because we skip
4539 4545 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4540 4546 u = urlutil.url(path)
4541 4547 if u.scheme != b'http':
4542 4548 raise error.Abort(_(b'only http:// paths are currently supported'))
4543 4549
4544 4550 url, authinfo = u.authinfo()
4545 4551 openerargs = {
4546 4552 'useragent': b'Mercurial debugwireproto',
4547 4553 }
4548 4554
4549 4555 # Turn pipes/sockets into observers so we can log I/O.
4550 4556 if ui.verbose:
4551 4557 openerargs.update(
4552 4558 {
4553 4559 'loggingfh': ui,
4554 4560 'loggingname': b's',
4555 4561 'loggingopts': {
4556 4562 'logdata': True,
4557 4563 'logdataapis': False,
4558 4564 },
4559 4565 }
4560 4566 )
4561 4567
4562 4568 if ui.debugflag:
4563 4569 openerargs['loggingopts']['logdataapis'] = True
4564 4570
4565 4571 # Don't send default headers when in raw mode. This allows us to
4566 4572 # bypass most of the behavior of our URL handling code so we can
4567 4573 # have near complete control over what's sent on the wire.
4568 4574 if opts[b'peer'] == b'raw':
4569 4575 openerargs['sendaccept'] = False
4570 4576
4571 4577 opener = urlmod.opener(ui, authinfo, **openerargs)
4572 4578
4573 4579 if opts[b'peer'] == b'http2':
4574 4580 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4575 4581 # We go through makepeer() because we need an API descriptor for
4576 4582 # the peer instance to be useful.
4577 4583 maybe_silent = (
4578 4584 ui.silent()
4579 4585 if opts[b'nologhandshake']
4580 4586 else util.nullcontextmanager()
4581 4587 )
4582 4588 with maybe_silent, ui.configoverride(
4583 4589 {(b'experimental', b'httppeer.advertise-v2'): True}
4584 4590 ):
4585 4591 peer = httppeer.makepeer(ui, path, opener=opener)
4586 4592
4587 4593 if not isinstance(peer, httppeer.httpv2peer):
4588 4594 raise error.Abort(
4589 4595 _(
4590 4596 b'could not instantiate HTTP peer for '
4591 4597 b'wire protocol version 2'
4592 4598 ),
4593 4599 hint=_(
4594 4600 b'the server may not have the feature '
4595 4601 b'enabled or is not allowing this '
4596 4602 b'client version'
4597 4603 ),
4598 4604 )
4599 4605
4600 4606 elif opts[b'peer'] == b'raw':
4601 4607 ui.write(_(b'using raw connection to peer\n'))
4602 4608 peer = None
4603 4609 elif opts[b'peer']:
4604 4610 raise error.Abort(
4605 4611 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4606 4612 )
4607 4613 else:
4608 4614 peer = httppeer.makepeer(ui, path, opener=opener)
4609 4615
4610 4616 # We /could/ populate stdin/stdout with sock.makefile()...
4611 4617 else:
4612 4618 raise error.Abort(_(b'unsupported connection configuration'))
4613 4619
4614 4620 batchedcommands = None
4615 4621
4616 4622 # Now perform actions based on the parsed wire language instructions.
4617 4623 for action, lines in blocks:
4618 4624 if action in (b'raw', b'raw+'):
4619 4625 if not stdin:
4620 4626 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4621 4627
4622 4628 # Concatenate the data together.
4623 4629 data = b''.join(l.lstrip() for l in lines)
4624 4630 data = stringutil.unescapestr(data)
4625 4631 stdin.write(data)
4626 4632
4627 4633 if action == b'raw+':
4628 4634 stdin.flush()
4629 4635 elif action == b'flush':
4630 4636 if not stdin:
4631 4637 raise error.Abort(_(b'cannot call flush on this peer'))
4632 4638 stdin.flush()
4633 4639 elif action.startswith(b'command'):
4634 4640 if not peer:
4635 4641 raise error.Abort(
4636 4642 _(
4637 4643 b'cannot send commands unless peer instance '
4638 4644 b'is available'
4639 4645 )
4640 4646 )
4641 4647
4642 4648 command = action.split(b' ', 1)[1]
4643 4649
4644 4650 args = {}
4645 4651 for line in lines:
4646 4652 # We need to allow empty values.
4647 4653 fields = line.lstrip().split(b' ', 1)
4648 4654 if len(fields) == 1:
4649 4655 key = fields[0]
4650 4656 value = b''
4651 4657 else:
4652 4658 key, value = fields
4653 4659
4654 4660 if value.startswith(b'eval:'):
4655 4661 value = stringutil.evalpythonliteral(value[5:])
4656 4662 else:
4657 4663 value = stringutil.unescapestr(value)
4658 4664
4659 4665 args[key] = value
4660 4666
4661 4667 if batchedcommands is not None:
4662 4668 batchedcommands.append((command, args))
4663 4669 continue
4664 4670
4665 4671 ui.status(_(b'sending %s command\n') % command)
4666 4672
4667 4673 if b'PUSHFILE' in args:
4668 4674 with open(args[b'PUSHFILE'], 'rb') as fh:
4669 4675 del args[b'PUSHFILE']
4670 4676 res, output = peer._callpush(
4671 4677 command, fh, **pycompat.strkwargs(args)
4672 4678 )
4673 4679 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4674 4680 ui.status(
4675 4681 _(b'remote output: %s\n') % stringutil.escapestr(output)
4676 4682 )
4677 4683 else:
4678 4684 with peer.commandexecutor() as e:
4679 4685 res = e.callcommand(command, args).result()
4680 4686
4681 4687 if isinstance(res, wireprotov2peer.commandresponse):
4682 4688 val = res.objects()
4683 4689 ui.status(
4684 4690 _(b'response: %s\n')
4685 4691 % stringutil.pprint(val, bprefix=True, indent=2)
4686 4692 )
4687 4693 else:
4688 4694 ui.status(
4689 4695 _(b'response: %s\n')
4690 4696 % stringutil.pprint(res, bprefix=True, indent=2)
4691 4697 )
4692 4698
4693 4699 elif action == b'batchbegin':
4694 4700 if batchedcommands is not None:
4695 4701 raise error.Abort(_(b'nested batchbegin not allowed'))
4696 4702
4697 4703 batchedcommands = []
4698 4704 elif action == b'batchsubmit':
4699 4705 # There is a batching API we could go through. But it would be
4700 4706 # difficult to normalize requests into function calls. It is easier
4701 4707 # to bypass this layer and normalize to commands + args.
4702 4708 ui.status(
4703 4709 _(b'sending batch with %d sub-commands\n')
4704 4710 % len(batchedcommands)
4705 4711 )
4706 4712 assert peer is not None
4707 4713 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4708 4714 ui.status(
4709 4715 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4710 4716 )
4711 4717
4712 4718 batchedcommands = None
4713 4719
4714 4720 elif action.startswith(b'httprequest '):
4715 4721 if not opener:
4716 4722 raise error.Abort(
4717 4723 _(b'cannot use httprequest without an HTTP peer')
4718 4724 )
4719 4725
4720 4726 request = action.split(b' ', 2)
4721 4727 if len(request) != 3:
4722 4728 raise error.Abort(
4723 4729 _(
4724 4730 b'invalid httprequest: expected format is '
4725 4731 b'"httprequest <method> <path>'
4726 4732 )
4727 4733 )
4728 4734
4729 4735 method, httppath = request[1:]
4730 4736 headers = {}
4731 4737 body = None
4732 4738 frames = []
4733 4739 for line in lines:
4734 4740 line = line.lstrip()
4735 4741 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4736 4742 if m:
4737 4743 # Headers need to use native strings.
4738 4744 key = pycompat.strurl(m.group(1))
4739 4745 value = pycompat.strurl(m.group(2))
4740 4746 headers[key] = value
4741 4747 continue
4742 4748
4743 4749 if line.startswith(b'BODYFILE '):
4744 4750 with open(line.split(b' ', 1), b'rb') as fh:
4745 4751 body = fh.read()
4746 4752 elif line.startswith(b'frame '):
4747 4753 frame = wireprotoframing.makeframefromhumanstring(
4748 4754 line[len(b'frame ') :]
4749 4755 )
4750 4756
4751 4757 frames.append(frame)
4752 4758 else:
4753 4759 raise error.Abort(
4754 4760 _(b'unknown argument to httprequest: %s') % line
4755 4761 )
4756 4762
4757 4763 url = path + httppath
4758 4764
4759 4765 if frames:
4760 4766 body = b''.join(bytes(f) for f in frames)
4761 4767
4762 4768 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4763 4769
4764 4770 # urllib.Request insists on using has_data() as a proxy for
4765 4771 # determining the request method. Override that to use our
4766 4772 # explicitly requested method.
4767 4773 req.get_method = lambda: pycompat.sysstr(method)
4768 4774
4769 4775 try:
4770 4776 res = opener.open(req)
4771 4777 body = res.read()
4772 4778 except util.urlerr.urlerror as e:
4773 4779 # read() method must be called, but only exists in Python 2
4774 4780 getattr(e, 'read', lambda: None)()
4775 4781 continue
4776 4782
4777 4783 ct = res.headers.get('Content-Type')
4778 4784 if ct == 'application/mercurial-cbor':
4779 4785 ui.write(
4780 4786 _(b'cbor> %s\n')
4781 4787 % stringutil.pprint(
4782 4788 cborutil.decodeall(body), bprefix=True, indent=2
4783 4789 )
4784 4790 )
4785 4791
4786 4792 elif action == b'close':
4787 4793 assert peer is not None
4788 4794 peer.close()
4789 4795 elif action == b'readavailable':
4790 4796 if not stdout or not stderr:
4791 4797 raise error.Abort(
4792 4798 _(b'readavailable not available on this peer')
4793 4799 )
4794 4800
4795 4801 stdin.close()
4796 4802 stdout.read()
4797 4803 stderr.read()
4798 4804
4799 4805 elif action == b'readline':
4800 4806 if not stdout:
4801 4807 raise error.Abort(_(b'readline not available on this peer'))
4802 4808 stdout.readline()
4803 4809 elif action == b'ereadline':
4804 4810 if not stderr:
4805 4811 raise error.Abort(_(b'ereadline not available on this peer'))
4806 4812 stderr.readline()
4807 4813 elif action.startswith(b'read '):
4808 4814 count = int(action.split(b' ', 1)[1])
4809 4815 if not stdout:
4810 4816 raise error.Abort(_(b'read not available on this peer'))
4811 4817 stdout.read(count)
4812 4818 elif action.startswith(b'eread '):
4813 4819 count = int(action.split(b' ', 1)[1])
4814 4820 if not stderr:
4815 4821 raise error.Abort(_(b'eread not available on this peer'))
4816 4822 stderr.read(count)
4817 4823 else:
4818 4824 raise error.Abort(_(b'unknown action: %s') % action)
4819 4825
4820 4826 if batchedcommands is not None:
4821 4827 raise error.Abort(_(b'unclosed "batchbegin" request'))
4822 4828
4823 4829 if peer:
4824 4830 peer.close()
4825 4831
4826 4832 if proc:
4827 4833 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now