##// END OF EJS Templates
debugsidedata: small doc improvement...
marmoute -
r43406:ba5b062a default
parent child Browse files
Show More
@@ -1,4263 +1,4265
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 changegroup,
42 42 cmdutil,
43 43 color,
44 44 context,
45 45 copies,
46 46 dagparser,
47 47 encoding,
48 48 error,
49 49 exchange,
50 50 extensions,
51 51 filemerge,
52 52 filesetlang,
53 53 formatter,
54 54 hg,
55 55 httppeer,
56 56 localrepo,
57 57 lock as lockmod,
58 58 logcmdutil,
59 59 merge as mergemod,
60 60 obsolete,
61 61 obsutil,
62 62 phases,
63 63 policy,
64 64 pvec,
65 65 pycompat,
66 66 registrar,
67 67 repair,
68 68 revlog,
69 69 revset,
70 70 revsetlang,
71 71 scmutil,
72 72 setdiscovery,
73 73 simplemerge,
74 74 sshpeer,
75 75 sslutil,
76 76 streamclone,
77 77 templater,
78 78 treediscovery,
79 79 upgrade,
80 80 url as urlmod,
81 81 util,
82 82 vfs as vfsmod,
83 83 wireprotoframing,
84 84 wireprotoserver,
85 85 wireprotov2peer,
86 86 )
87 87 from .utils import (
88 88 cborutil,
89 89 compression,
90 90 dateutil,
91 91 procutil,
92 92 stringutil,
93 93 )
94 94
95 95 from .revlogutils import deltas as deltautil
96 96
97 97 release = lockmod.release
98 98
99 99 command = registrar.command()
100 100
101 101
102 102 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
103 103 def debugancestor(ui, repo, *args):
104 104 """find the ancestor revision of two revisions in a given index"""
105 105 if len(args) == 3:
106 106 index, rev1, rev2 = args
107 107 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
108 108 lookup = r.lookup
109 109 elif len(args) == 2:
110 110 if not repo:
111 111 raise error.Abort(
112 112 _(b'there is no Mercurial repository here (.hg not found)')
113 113 )
114 114 rev1, rev2 = args
115 115 r = repo.changelog
116 116 lookup = repo.lookup
117 117 else:
118 118 raise error.Abort(_(b'either two or three arguments required'))
119 119 a = r.ancestor(lookup(rev1), lookup(rev2))
120 120 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
121 121
122 122
123 123 @command(b'debugapplystreamclonebundle', [], b'FILE')
124 124 def debugapplystreamclonebundle(ui, repo, fname):
125 125 """apply a stream clone bundle file"""
126 126 f = hg.openpath(ui, fname)
127 127 gen = exchange.readbundle(ui, f, fname)
128 128 gen.apply(repo)
129 129
130 130
131 131 @command(
132 132 b'debugbuilddag',
133 133 [
134 134 (
135 135 b'm',
136 136 b'mergeable-file',
137 137 None,
138 138 _(b'add single file mergeable changes'),
139 139 ),
140 140 (
141 141 b'o',
142 142 b'overwritten-file',
143 143 None,
144 144 _(b'add single file all revs overwrite'),
145 145 ),
146 146 (b'n', b'new-file', None, _(b'add new file at each rev')),
147 147 ],
148 148 _(b'[OPTION]... [TEXT]'),
149 149 )
150 150 def debugbuilddag(
151 151 ui,
152 152 repo,
153 153 text=None,
154 154 mergeable_file=False,
155 155 overwritten_file=False,
156 156 new_file=False,
157 157 ):
158 158 """builds a repo with a given DAG from scratch in the current empty repo
159 159
160 160 The description of the DAG is read from stdin if not given on the
161 161 command line.
162 162
163 163 Elements:
164 164
165 165 - "+n" is a linear run of n nodes based on the current default parent
166 166 - "." is a single node based on the current default parent
167 167 - "$" resets the default parent to null (implied at the start);
168 168 otherwise the default parent is always the last node created
169 169 - "<p" sets the default parent to the backref p
170 170 - "*p" is a fork at parent p, which is a backref
171 171 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
172 172 - "/p2" is a merge of the preceding node and p2
173 173 - ":tag" defines a local tag for the preceding node
174 174 - "@branch" sets the named branch for subsequent nodes
175 175 - "#...\\n" is a comment up to the end of the line
176 176
177 177 Whitespace between the above elements is ignored.
178 178
179 179 A backref is either
180 180
181 181 - a number n, which references the node curr-n, where curr is the current
182 182 node, or
183 183 - the name of a local tag you placed earlier using ":tag", or
184 184 - empty to denote the default parent.
185 185
186 186 All string valued-elements are either strictly alphanumeric, or must
187 187 be enclosed in double quotes ("..."), with "\\" as escape character.
188 188 """
189 189
190 190 if text is None:
191 191 ui.status(_(b"reading DAG from stdin\n"))
192 192 text = ui.fin.read()
193 193
194 194 cl = repo.changelog
195 195 if len(cl) > 0:
196 196 raise error.Abort(_(b'repository is not empty'))
197 197
198 198 # determine number of revs in DAG
199 199 total = 0
200 200 for type, data in dagparser.parsedag(text):
201 201 if type == b'n':
202 202 total += 1
203 203
204 204 if mergeable_file:
205 205 linesperrev = 2
206 206 # make a file with k lines per rev
207 207 initialmergedlines = [
208 208 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
209 209 ]
210 210 initialmergedlines.append(b"")
211 211
212 212 tags = []
213 213 progress = ui.makeprogress(
214 214 _(b'building'), unit=_(b'revisions'), total=total
215 215 )
216 216 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
217 217 at = -1
218 218 atbranch = b'default'
219 219 nodeids = []
220 220 id = 0
221 221 progress.update(id)
222 222 for type, data in dagparser.parsedag(text):
223 223 if type == b'n':
224 224 ui.note((b'node %s\n' % pycompat.bytestr(data)))
225 225 id, ps = data
226 226
227 227 files = []
228 228 filecontent = {}
229 229
230 230 p2 = None
231 231 if mergeable_file:
232 232 fn = b"mf"
233 233 p1 = repo[ps[0]]
234 234 if len(ps) > 1:
235 235 p2 = repo[ps[1]]
236 236 pa = p1.ancestor(p2)
237 237 base, local, other = [
238 238 x[fn].data() for x in (pa, p1, p2)
239 239 ]
240 240 m3 = simplemerge.Merge3Text(base, local, other)
241 241 ml = [l.strip() for l in m3.merge_lines()]
242 242 ml.append(b"")
243 243 elif at > 0:
244 244 ml = p1[fn].data().split(b"\n")
245 245 else:
246 246 ml = initialmergedlines
247 247 ml[id * linesperrev] += b" r%i" % id
248 248 mergedtext = b"\n".join(ml)
249 249 files.append(fn)
250 250 filecontent[fn] = mergedtext
251 251
252 252 if overwritten_file:
253 253 fn = b"of"
254 254 files.append(fn)
255 255 filecontent[fn] = b"r%i\n" % id
256 256
257 257 if new_file:
258 258 fn = b"nf%i" % id
259 259 files.append(fn)
260 260 filecontent[fn] = b"r%i\n" % id
261 261 if len(ps) > 1:
262 262 if not p2:
263 263 p2 = repo[ps[1]]
264 264 for fn in p2:
265 265 if fn.startswith(b"nf"):
266 266 files.append(fn)
267 267 filecontent[fn] = p2[fn].data()
268 268
269 269 def fctxfn(repo, cx, path):
270 270 if path in filecontent:
271 271 return context.memfilectx(
272 272 repo, cx, path, filecontent[path]
273 273 )
274 274 return None
275 275
276 276 if len(ps) == 0 or ps[0] < 0:
277 277 pars = [None, None]
278 278 elif len(ps) == 1:
279 279 pars = [nodeids[ps[0]], None]
280 280 else:
281 281 pars = [nodeids[p] for p in ps]
282 282 cx = context.memctx(
283 283 repo,
284 284 pars,
285 285 b"r%i" % id,
286 286 files,
287 287 fctxfn,
288 288 date=(id, 0),
289 289 user=b"debugbuilddag",
290 290 extra={b'branch': atbranch},
291 291 )
292 292 nodeid = repo.commitctx(cx)
293 293 nodeids.append(nodeid)
294 294 at = id
295 295 elif type == b'l':
296 296 id, name = data
297 297 ui.note((b'tag %s\n' % name))
298 298 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
299 299 elif type == b'a':
300 300 ui.note((b'branch %s\n' % data))
301 301 atbranch = data
302 302 progress.update(id)
303 303
304 304 if tags:
305 305 repo.vfs.write(b"localtags", b"".join(tags))
306 306
307 307
308 308 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
309 309 indent_string = b' ' * indent
310 310 if all:
311 311 ui.writenoi18n(
312 312 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
313 313 % indent_string
314 314 )
315 315
316 316 def showchunks(named):
317 317 ui.write(b"\n%s%s\n" % (indent_string, named))
318 318 for deltadata in gen.deltaiter():
319 319 node, p1, p2, cs, deltabase, delta, flags = deltadata
320 320 ui.write(
321 321 b"%s%s %s %s %s %s %d\n"
322 322 % (
323 323 indent_string,
324 324 hex(node),
325 325 hex(p1),
326 326 hex(p2),
327 327 hex(cs),
328 328 hex(deltabase),
329 329 len(delta),
330 330 )
331 331 )
332 332
333 333 chunkdata = gen.changelogheader()
334 334 showchunks(b"changelog")
335 335 chunkdata = gen.manifestheader()
336 336 showchunks(b"manifest")
337 337 for chunkdata in iter(gen.filelogheader, {}):
338 338 fname = chunkdata[b'filename']
339 339 showchunks(fname)
340 340 else:
341 341 if isinstance(gen, bundle2.unbundle20):
342 342 raise error.Abort(_(b'use debugbundle2 for this file'))
343 343 chunkdata = gen.changelogheader()
344 344 for deltadata in gen.deltaiter():
345 345 node, p1, p2, cs, deltabase, delta, flags = deltadata
346 346 ui.write(b"%s%s\n" % (indent_string, hex(node)))
347 347
348 348
349 349 def _debugobsmarkers(ui, part, indent=0, **opts):
350 350 """display version and markers contained in 'data'"""
351 351 opts = pycompat.byteskwargs(opts)
352 352 data = part.read()
353 353 indent_string = b' ' * indent
354 354 try:
355 355 version, markers = obsolete._readmarkers(data)
356 356 except error.UnknownVersion as exc:
357 357 msg = b"%sunsupported version: %s (%d bytes)\n"
358 358 msg %= indent_string, exc.version, len(data)
359 359 ui.write(msg)
360 360 else:
361 361 msg = b"%sversion: %d (%d bytes)\n"
362 362 msg %= indent_string, version, len(data)
363 363 ui.write(msg)
364 364 fm = ui.formatter(b'debugobsolete', opts)
365 365 for rawmarker in sorted(markers):
366 366 m = obsutil.marker(None, rawmarker)
367 367 fm.startitem()
368 368 fm.plain(indent_string)
369 369 cmdutil.showmarker(fm, m)
370 370 fm.end()
371 371
372 372
373 373 def _debugphaseheads(ui, data, indent=0):
374 374 """display version and markers contained in 'data'"""
375 375 indent_string = b' ' * indent
376 376 headsbyphase = phases.binarydecode(data)
377 377 for phase in phases.allphases:
378 378 for head in headsbyphase[phase]:
379 379 ui.write(indent_string)
380 380 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
381 381
382 382
383 383 def _quasirepr(thing):
384 384 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
385 385 return b'{%s}' % (
386 386 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
387 387 )
388 388 return pycompat.bytestr(repr(thing))
389 389
390 390
391 391 def _debugbundle2(ui, gen, all=None, **opts):
392 392 """lists the contents of a bundle2"""
393 393 if not isinstance(gen, bundle2.unbundle20):
394 394 raise error.Abort(_(b'not a bundle2 file'))
395 395 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
396 396 parttypes = opts.get(r'part_type', [])
397 397 for part in gen.iterparts():
398 398 if parttypes and part.type not in parttypes:
399 399 continue
400 400 msg = b'%s -- %s (mandatory: %r)\n'
401 401 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
402 402 if part.type == b'changegroup':
403 403 version = part.params.get(b'version', b'01')
404 404 cg = changegroup.getunbundler(version, part, b'UN')
405 405 if not ui.quiet:
406 406 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
407 407 if part.type == b'obsmarkers':
408 408 if not ui.quiet:
409 409 _debugobsmarkers(ui, part, indent=4, **opts)
410 410 if part.type == b'phase-heads':
411 411 if not ui.quiet:
412 412 _debugphaseheads(ui, part, indent=4)
413 413
414 414
415 415 @command(
416 416 b'debugbundle',
417 417 [
418 418 (b'a', b'all', None, _(b'show all details')),
419 419 (b'', b'part-type', [], _(b'show only the named part type')),
420 420 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
421 421 ],
422 422 _(b'FILE'),
423 423 norepo=True,
424 424 )
425 425 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
426 426 """lists the contents of a bundle"""
427 427 with hg.openpath(ui, bundlepath) as f:
428 428 if spec:
429 429 spec = exchange.getbundlespec(ui, f)
430 430 ui.write(b'%s\n' % spec)
431 431 return
432 432
433 433 gen = exchange.readbundle(ui, f, bundlepath)
434 434 if isinstance(gen, bundle2.unbundle20):
435 435 return _debugbundle2(ui, gen, all=all, **opts)
436 436 _debugchangegroup(ui, gen, all=all, **opts)
437 437
438 438
439 439 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
440 440 def debugcapabilities(ui, path, **opts):
441 441 """lists the capabilities of a remote peer"""
442 442 opts = pycompat.byteskwargs(opts)
443 443 peer = hg.peer(ui, opts, path)
444 444 caps = peer.capabilities()
445 445 ui.writenoi18n(b'Main capabilities:\n')
446 446 for c in sorted(caps):
447 447 ui.write(b' %s\n' % c)
448 448 b2caps = bundle2.bundle2caps(peer)
449 449 if b2caps:
450 450 ui.writenoi18n(b'Bundle2 capabilities:\n')
451 451 for key, values in sorted(pycompat.iteritems(b2caps)):
452 452 ui.write(b' %s\n' % key)
453 453 for v in values:
454 454 ui.write(b' %s\n' % v)
455 455
456 456
457 457 @command(b'debugcheckstate', [], b'')
458 458 def debugcheckstate(ui, repo):
459 459 """validate the correctness of the current dirstate"""
460 460 parent1, parent2 = repo.dirstate.parents()
461 461 m1 = repo[parent1].manifest()
462 462 m2 = repo[parent2].manifest()
463 463 errors = 0
464 464 for f in repo.dirstate:
465 465 state = repo.dirstate[f]
466 466 if state in b"nr" and f not in m1:
467 467 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
468 468 errors += 1
469 469 if state in b"a" and f in m1:
470 470 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
471 471 errors += 1
472 472 if state in b"m" and f not in m1 and f not in m2:
473 473 ui.warn(
474 474 _(b"%s in state %s, but not in either manifest\n") % (f, state)
475 475 )
476 476 errors += 1
477 477 for f in m1:
478 478 state = repo.dirstate[f]
479 479 if state not in b"nrm":
480 480 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
481 481 errors += 1
482 482 if errors:
483 483 error = _(b".hg/dirstate inconsistent with current parent's manifest")
484 484 raise error.Abort(error)
485 485
486 486
487 487 @command(
488 488 b'debugcolor',
489 489 [(b'', b'style', None, _(b'show all configured styles'))],
490 490 b'hg debugcolor',
491 491 )
492 492 def debugcolor(ui, repo, **opts):
493 493 """show available color, effects or style"""
494 494 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
495 495 if opts.get(r'style'):
496 496 return _debugdisplaystyle(ui)
497 497 else:
498 498 return _debugdisplaycolor(ui)
499 499
500 500
501 501 def _debugdisplaycolor(ui):
502 502 ui = ui.copy()
503 503 ui._styles.clear()
504 504 for effect in color._activeeffects(ui).keys():
505 505 ui._styles[effect] = effect
506 506 if ui._terminfoparams:
507 507 for k, v in ui.configitems(b'color'):
508 508 if k.startswith(b'color.'):
509 509 ui._styles[k] = k[6:]
510 510 elif k.startswith(b'terminfo.'):
511 511 ui._styles[k] = k[9:]
512 512 ui.write(_(b'available colors:\n'))
513 513 # sort label with a '_' after the other to group '_background' entry.
514 514 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
515 515 for colorname, label in items:
516 516 ui.write(b'%s\n' % colorname, label=label)
517 517
518 518
519 519 def _debugdisplaystyle(ui):
520 520 ui.write(_(b'available style:\n'))
521 521 if not ui._styles:
522 522 return
523 523 width = max(len(s) for s in ui._styles)
524 524 for label, effects in sorted(ui._styles.items()):
525 525 ui.write(b'%s' % label, label=label)
526 526 if effects:
527 527 # 50
528 528 ui.write(b': ')
529 529 ui.write(b' ' * (max(0, width - len(label))))
530 530 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
531 531 ui.write(b'\n')
532 532
533 533
534 534 @command(b'debugcreatestreamclonebundle', [], b'FILE')
535 535 def debugcreatestreamclonebundle(ui, repo, fname):
536 536 """create a stream clone bundle file
537 537
538 538 Stream bundles are special bundles that are essentially archives of
539 539 revlog files. They are commonly used for cloning very quickly.
540 540 """
541 541 # TODO we may want to turn this into an abort when this functionality
542 542 # is moved into `hg bundle`.
543 543 if phases.hassecret(repo):
544 544 ui.warn(
545 545 _(
546 546 b'(warning: stream clone bundle will contain secret '
547 547 b'revisions)\n'
548 548 )
549 549 )
550 550
551 551 requirements, gen = streamclone.generatebundlev1(repo)
552 552 changegroup.writechunks(ui, gen, fname)
553 553
554 554 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
555 555
556 556
557 557 @command(
558 558 b'debugdag',
559 559 [
560 560 (b't', b'tags', None, _(b'use tags as labels')),
561 561 (b'b', b'branches', None, _(b'annotate with branch names')),
562 562 (b'', b'dots', None, _(b'use dots for runs')),
563 563 (b's', b'spaces', None, _(b'separate elements by spaces')),
564 564 ],
565 565 _(b'[OPTION]... [FILE [REV]...]'),
566 566 optionalrepo=True,
567 567 )
568 568 def debugdag(ui, repo, file_=None, *revs, **opts):
569 569 """format the changelog or an index DAG as a concise textual description
570 570
571 571 If you pass a revlog index, the revlog's DAG is emitted. If you list
572 572 revision numbers, they get labeled in the output as rN.
573 573
574 574 Otherwise, the changelog DAG of the current repo is emitted.
575 575 """
576 576 spaces = opts.get(r'spaces')
577 577 dots = opts.get(r'dots')
578 578 if file_:
579 579 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
580 580 revs = set((int(r) for r in revs))
581 581
582 582 def events():
583 583 for r in rlog:
584 584 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
585 585 if r in revs:
586 586 yield b'l', (r, b"r%i" % r)
587 587
588 588 elif repo:
589 589 cl = repo.changelog
590 590 tags = opts.get(r'tags')
591 591 branches = opts.get(r'branches')
592 592 if tags:
593 593 labels = {}
594 594 for l, n in repo.tags().items():
595 595 labels.setdefault(cl.rev(n), []).append(l)
596 596
597 597 def events():
598 598 b = b"default"
599 599 for r in cl:
600 600 if branches:
601 601 newb = cl.read(cl.node(r))[5][b'branch']
602 602 if newb != b:
603 603 yield b'a', newb
604 604 b = newb
605 605 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
606 606 if tags:
607 607 ls = labels.get(r)
608 608 if ls:
609 609 for l in ls:
610 610 yield b'l', (r, l)
611 611
612 612 else:
613 613 raise error.Abort(_(b'need repo for changelog dag'))
614 614
615 615 for line in dagparser.dagtextlines(
616 616 events(),
617 617 addspaces=spaces,
618 618 wraplabels=True,
619 619 wrapannotations=True,
620 620 wrapnonlinear=dots,
621 621 usedots=dots,
622 622 maxlinewidth=70,
623 623 ):
624 624 ui.write(line)
625 625 ui.write(b"\n")
626 626
627 627
628 628 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
629 629 def debugdata(ui, repo, file_, rev=None, **opts):
630 630 """dump the contents of a data file revision"""
631 631 opts = pycompat.byteskwargs(opts)
632 632 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
633 633 if rev is not None:
634 634 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
635 635 file_, rev = None, file_
636 636 elif rev is None:
637 637 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
638 638 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
639 639 try:
640 640 ui.write(r.rawdata(r.lookup(rev)))
641 641 except KeyError:
642 642 raise error.Abort(_(b'invalid revision identifier %s') % rev)
643 643
644 644
645 645 @command(
646 646 b'debugdate',
647 647 [(b'e', b'extended', None, _(b'try extended date formats'))],
648 648 _(b'[-e] DATE [RANGE]'),
649 649 norepo=True,
650 650 optionalrepo=True,
651 651 )
652 652 def debugdate(ui, date, range=None, **opts):
653 653 """parse and display a date"""
654 654 if opts[r"extended"]:
655 655 d = dateutil.parsedate(date, util.extendeddateformats)
656 656 else:
657 657 d = dateutil.parsedate(date)
658 658 ui.writenoi18n(b"internal: %d %d\n" % d)
659 659 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
660 660 if range:
661 661 m = dateutil.matchdate(range)
662 662 ui.writenoi18n(b"match: %s\n" % m(d[0]))
663 663
664 664
665 665 @command(
666 666 b'debugdeltachain',
667 667 cmdutil.debugrevlogopts + cmdutil.formatteropts,
668 668 _(b'-c|-m|FILE'),
669 669 optionalrepo=True,
670 670 )
671 671 def debugdeltachain(ui, repo, file_=None, **opts):
672 672 """dump information about delta chains in a revlog
673 673
674 674 Output can be templatized. Available template keywords are:
675 675
676 676 :``rev``: revision number
677 677 :``chainid``: delta chain identifier (numbered by unique base)
678 678 :``chainlen``: delta chain length to this revision
679 679 :``prevrev``: previous revision in delta chain
680 680 :``deltatype``: role of delta / how it was computed
681 681 :``compsize``: compressed size of revision
682 682 :``uncompsize``: uncompressed size of revision
683 683 :``chainsize``: total size of compressed revisions in chain
684 684 :``chainratio``: total chain size divided by uncompressed revision size
685 685 (new delta chains typically start at ratio 2.00)
686 686 :``lindist``: linear distance from base revision in delta chain to end
687 687 of this revision
688 688 :``extradist``: total size of revisions not part of this delta chain from
689 689 base of delta chain to end of this revision; a measurement
690 690 of how much extra data we need to read/seek across to read
691 691 the delta chain for this revision
692 692 :``extraratio``: extradist divided by chainsize; another representation of
693 693 how much unrelated data is needed to load this delta chain
694 694
695 695 If the repository is configured to use the sparse read, additional keywords
696 696 are available:
697 697
698 698 :``readsize``: total size of data read from the disk for a revision
699 699 (sum of the sizes of all the blocks)
700 700 :``largestblock``: size of the largest block of data read from the disk
701 701 :``readdensity``: density of useful bytes in the data read from the disk
702 702 :``srchunks``: in how many data hunks the whole revision would be read
703 703
704 704 The sparse read can be enabled with experimental.sparse-read = True
705 705 """
706 706 opts = pycompat.byteskwargs(opts)
707 707 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
708 708 index = r.index
709 709 start = r.start
710 710 length = r.length
711 711 generaldelta = r.version & revlog.FLAG_GENERALDELTA
712 712 withsparseread = getattr(r, '_withsparseread', False)
713 713
714 714 def revinfo(rev):
715 715 e = index[rev]
716 716 compsize = e[1]
717 717 uncompsize = e[2]
718 718 chainsize = 0
719 719
720 720 if generaldelta:
721 721 if e[3] == e[5]:
722 722 deltatype = b'p1'
723 723 elif e[3] == e[6]:
724 724 deltatype = b'p2'
725 725 elif e[3] == rev - 1:
726 726 deltatype = b'prev'
727 727 elif e[3] == rev:
728 728 deltatype = b'base'
729 729 else:
730 730 deltatype = b'other'
731 731 else:
732 732 if e[3] == rev:
733 733 deltatype = b'base'
734 734 else:
735 735 deltatype = b'prev'
736 736
737 737 chain = r._deltachain(rev)[0]
738 738 for iterrev in chain:
739 739 e = index[iterrev]
740 740 chainsize += e[1]
741 741
742 742 return compsize, uncompsize, deltatype, chain, chainsize
743 743
744 744 fm = ui.formatter(b'debugdeltachain', opts)
745 745
746 746 fm.plain(
747 747 b' rev chain# chainlen prev delta '
748 748 b'size rawsize chainsize ratio lindist extradist '
749 749 b'extraratio'
750 750 )
751 751 if withsparseread:
752 752 fm.plain(b' readsize largestblk rddensity srchunks')
753 753 fm.plain(b'\n')
754 754
755 755 chainbases = {}
756 756 for rev in r:
757 757 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
758 758 chainbase = chain[0]
759 759 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
760 760 basestart = start(chainbase)
761 761 revstart = start(rev)
762 762 lineardist = revstart + comp - basestart
763 763 extradist = lineardist - chainsize
764 764 try:
765 765 prevrev = chain[-2]
766 766 except IndexError:
767 767 prevrev = -1
768 768
769 769 if uncomp != 0:
770 770 chainratio = float(chainsize) / float(uncomp)
771 771 else:
772 772 chainratio = chainsize
773 773
774 774 if chainsize != 0:
775 775 extraratio = float(extradist) / float(chainsize)
776 776 else:
777 777 extraratio = extradist
778 778
779 779 fm.startitem()
780 780 fm.write(
781 781 b'rev chainid chainlen prevrev deltatype compsize '
782 782 b'uncompsize chainsize chainratio lindist extradist '
783 783 b'extraratio',
784 784 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
785 785 rev,
786 786 chainid,
787 787 len(chain),
788 788 prevrev,
789 789 deltatype,
790 790 comp,
791 791 uncomp,
792 792 chainsize,
793 793 chainratio,
794 794 lineardist,
795 795 extradist,
796 796 extraratio,
797 797 rev=rev,
798 798 chainid=chainid,
799 799 chainlen=len(chain),
800 800 prevrev=prevrev,
801 801 deltatype=deltatype,
802 802 compsize=comp,
803 803 uncompsize=uncomp,
804 804 chainsize=chainsize,
805 805 chainratio=chainratio,
806 806 lindist=lineardist,
807 807 extradist=extradist,
808 808 extraratio=extraratio,
809 809 )
810 810 if withsparseread:
811 811 readsize = 0
812 812 largestblock = 0
813 813 srchunks = 0
814 814
815 815 for revschunk in deltautil.slicechunk(r, chain):
816 816 srchunks += 1
817 817 blkend = start(revschunk[-1]) + length(revschunk[-1])
818 818 blksize = blkend - start(revschunk[0])
819 819
820 820 readsize += blksize
821 821 if largestblock < blksize:
822 822 largestblock = blksize
823 823
824 824 if readsize:
825 825 readdensity = float(chainsize) / float(readsize)
826 826 else:
827 827 readdensity = 1
828 828
829 829 fm.write(
830 830 b'readsize largestblock readdensity srchunks',
831 831 b' %10d %10d %9.5f %8d',
832 832 readsize,
833 833 largestblock,
834 834 readdensity,
835 835 srchunks,
836 836 readsize=readsize,
837 837 largestblock=largestblock,
838 838 readdensity=readdensity,
839 839 srchunks=srchunks,
840 840 )
841 841
842 842 fm.plain(b'\n')
843 843
844 844 fm.end()
845 845
846 846
847 847 @command(
848 848 b'debugdirstate|debugstate',
849 849 [
850 850 (
851 851 b'',
852 852 b'nodates',
853 853 None,
854 854 _(b'do not display the saved mtime (DEPRECATED)'),
855 855 ),
856 856 (b'', b'dates', True, _(b'display the saved mtime')),
857 857 (b'', b'datesort', None, _(b'sort by saved mtime')),
858 858 ],
859 859 _(b'[OPTION]...'),
860 860 )
861 861 def debugstate(ui, repo, **opts):
862 862 """show the contents of the current dirstate"""
863 863
864 864 nodates = not opts[r'dates']
865 865 if opts.get(r'nodates') is not None:
866 866 nodates = True
867 867 datesort = opts.get(r'datesort')
868 868
869 869 if datesort:
870 870 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
871 871 else:
872 872 keyfunc = None # sort by filename
873 873 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
874 874 if ent[3] == -1:
875 875 timestr = b'unset '
876 876 elif nodates:
877 877 timestr = b'set '
878 878 else:
879 879 timestr = time.strftime(
880 880 r"%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
881 881 )
882 882 timestr = encoding.strtolocal(timestr)
883 883 if ent[1] & 0o20000:
884 884 mode = b'lnk'
885 885 else:
886 886 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
887 887 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
888 888 for f in repo.dirstate.copies():
889 889 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
890 890
891 891
892 892 @command(
893 893 b'debugdiscovery',
894 894 [
895 895 (b'', b'old', None, _(b'use old-style discovery')),
896 896 (
897 897 b'',
898 898 b'nonheads',
899 899 None,
900 900 _(b'use old-style discovery with non-heads included'),
901 901 ),
902 902 (b'', b'rev', [], b'restrict discovery to this set of revs'),
903 903 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
904 904 ]
905 905 + cmdutil.remoteopts,
906 906 _(b'[--rev REV] [OTHER]'),
907 907 )
908 908 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
909 909 """runs the changeset discovery protocol in isolation"""
910 910 opts = pycompat.byteskwargs(opts)
911 911 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
912 912 remote = hg.peer(repo, opts, remoteurl)
913 913 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
914 914
915 915 # make sure tests are repeatable
916 916 random.seed(int(opts[b'seed']))
917 917
918 918 if opts.get(b'old'):
919 919
920 920 def doit(pushedrevs, remoteheads, remote=remote):
921 921 if not util.safehasattr(remote, b'branches'):
922 922 # enable in-client legacy support
923 923 remote = localrepo.locallegacypeer(remote.local())
924 924 common, _in, hds = treediscovery.findcommonincoming(
925 925 repo, remote, force=True
926 926 )
927 927 common = set(common)
928 928 if not opts.get(b'nonheads'):
929 929 ui.writenoi18n(
930 930 b"unpruned common: %s\n"
931 931 % b" ".join(sorted(short(n) for n in common))
932 932 )
933 933
934 934 clnode = repo.changelog.node
935 935 common = repo.revs(b'heads(::%ln)', common)
936 936 common = {clnode(r) for r in common}
937 937 return common, hds
938 938
939 939 else:
940 940
941 941 def doit(pushedrevs, remoteheads, remote=remote):
942 942 nodes = None
943 943 if pushedrevs:
944 944 revs = scmutil.revrange(repo, pushedrevs)
945 945 nodes = [repo[r].node() for r in revs]
946 946 common, any, hds = setdiscovery.findcommonheads(
947 947 ui, repo, remote, ancestorsof=nodes
948 948 )
949 949 return common, hds
950 950
951 951 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
952 952 localrevs = opts[b'rev']
953 953 with util.timedcm(b'debug-discovery') as t:
954 954 common, hds = doit(localrevs, remoterevs)
955 955
956 956 # compute all statistics
957 957 common = set(common)
958 958 rheads = set(hds)
959 959 lheads = set(repo.heads())
960 960
961 961 data = {}
962 962 data[b'elapsed'] = t.elapsed
963 963 data[b'nb-common'] = len(common)
964 964 data[b'nb-common-local'] = len(common & lheads)
965 965 data[b'nb-common-remote'] = len(common & rheads)
966 966 data[b'nb-common-both'] = len(common & rheads & lheads)
967 967 data[b'nb-local'] = len(lheads)
968 968 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
969 969 data[b'nb-remote'] = len(rheads)
970 970 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
971 971 data[b'nb-revs'] = len(repo.revs(b'all()'))
972 972 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
973 973 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
974 974
975 975 # display discovery summary
976 976 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
977 977 ui.writenoi18n(b"heads summary:\n")
978 978 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
979 979 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
980 980 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
981 981 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
982 982 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
983 983 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
984 984 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
985 985 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
986 986 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
987 987 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
988 988 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
989 989 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
990 990 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
991 991
992 992 if ui.verbose:
993 993 ui.writenoi18n(
994 994 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
995 995 )
996 996
997 997
998 998 _chunksize = 4 << 10
999 999
1000 1000
1001 1001 @command(
1002 1002 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1003 1003 )
1004 1004 def debugdownload(ui, repo, url, output=None, **opts):
1005 1005 """download a resource using Mercurial logic and config
1006 1006 """
1007 1007 fh = urlmod.open(ui, url, output)
1008 1008
1009 1009 dest = ui
1010 1010 if output:
1011 1011 dest = open(output, b"wb", _chunksize)
1012 1012 try:
1013 1013 data = fh.read(_chunksize)
1014 1014 while data:
1015 1015 dest.write(data)
1016 1016 data = fh.read(_chunksize)
1017 1017 finally:
1018 1018 if output:
1019 1019 dest.close()
1020 1020
1021 1021
1022 1022 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1023 1023 def debugextensions(ui, repo, **opts):
1024 1024 '''show information about active extensions'''
1025 1025 opts = pycompat.byteskwargs(opts)
1026 1026 exts = extensions.extensions(ui)
1027 1027 hgver = util.version()
1028 1028 fm = ui.formatter(b'debugextensions', opts)
1029 1029 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1030 1030 isinternal = extensions.ismoduleinternal(extmod)
1031 1031 extsource = pycompat.fsencode(extmod.__file__)
1032 1032 if isinternal:
1033 1033 exttestedwith = [] # never expose magic string to users
1034 1034 else:
1035 1035 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1036 1036 extbuglink = getattr(extmod, 'buglink', None)
1037 1037
1038 1038 fm.startitem()
1039 1039
1040 1040 if ui.quiet or ui.verbose:
1041 1041 fm.write(b'name', b'%s\n', extname)
1042 1042 else:
1043 1043 fm.write(b'name', b'%s', extname)
1044 1044 if isinternal or hgver in exttestedwith:
1045 1045 fm.plain(b'\n')
1046 1046 elif not exttestedwith:
1047 1047 fm.plain(_(b' (untested!)\n'))
1048 1048 else:
1049 1049 lasttestedversion = exttestedwith[-1]
1050 1050 fm.plain(b' (%s!)\n' % lasttestedversion)
1051 1051
1052 1052 fm.condwrite(
1053 1053 ui.verbose and extsource,
1054 1054 b'source',
1055 1055 _(b' location: %s\n'),
1056 1056 extsource or b"",
1057 1057 )
1058 1058
1059 1059 if ui.verbose:
1060 1060 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1061 1061 fm.data(bundled=isinternal)
1062 1062
1063 1063 fm.condwrite(
1064 1064 ui.verbose and exttestedwith,
1065 1065 b'testedwith',
1066 1066 _(b' tested with: %s\n'),
1067 1067 fm.formatlist(exttestedwith, name=b'ver'),
1068 1068 )
1069 1069
1070 1070 fm.condwrite(
1071 1071 ui.verbose and extbuglink,
1072 1072 b'buglink',
1073 1073 _(b' bug reporting: %s\n'),
1074 1074 extbuglink or b"",
1075 1075 )
1076 1076
1077 1077 fm.end()
1078 1078
1079 1079
1080 1080 @command(
1081 1081 b'debugfileset',
1082 1082 [
1083 1083 (
1084 1084 b'r',
1085 1085 b'rev',
1086 1086 b'',
1087 1087 _(b'apply the filespec on this revision'),
1088 1088 _(b'REV'),
1089 1089 ),
1090 1090 (
1091 1091 b'',
1092 1092 b'all-files',
1093 1093 False,
1094 1094 _(b'test files from all revisions and working directory'),
1095 1095 ),
1096 1096 (
1097 1097 b's',
1098 1098 b'show-matcher',
1099 1099 None,
1100 1100 _(b'print internal representation of matcher'),
1101 1101 ),
1102 1102 (
1103 1103 b'p',
1104 1104 b'show-stage',
1105 1105 [],
1106 1106 _(b'print parsed tree at the given stage'),
1107 1107 _(b'NAME'),
1108 1108 ),
1109 1109 ],
1110 1110 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1111 1111 )
1112 1112 def debugfileset(ui, repo, expr, **opts):
1113 1113 '''parse and apply a fileset specification'''
1114 1114 from . import fileset
1115 1115
1116 1116 fileset.symbols # force import of fileset so we have predicates to optimize
1117 1117 opts = pycompat.byteskwargs(opts)
1118 1118 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1119 1119
1120 1120 stages = [
1121 1121 (b'parsed', pycompat.identity),
1122 1122 (b'analyzed', filesetlang.analyze),
1123 1123 (b'optimized', filesetlang.optimize),
1124 1124 ]
1125 1125 stagenames = set(n for n, f in stages)
1126 1126
1127 1127 showalways = set()
1128 1128 if ui.verbose and not opts[b'show_stage']:
1129 1129 # show parsed tree by --verbose (deprecated)
1130 1130 showalways.add(b'parsed')
1131 1131 if opts[b'show_stage'] == [b'all']:
1132 1132 showalways.update(stagenames)
1133 1133 else:
1134 1134 for n in opts[b'show_stage']:
1135 1135 if n not in stagenames:
1136 1136 raise error.Abort(_(b'invalid stage name: %s') % n)
1137 1137 showalways.update(opts[b'show_stage'])
1138 1138
1139 1139 tree = filesetlang.parse(expr)
1140 1140 for n, f in stages:
1141 1141 tree = f(tree)
1142 1142 if n in showalways:
1143 1143 if opts[b'show_stage'] or n != b'parsed':
1144 1144 ui.write(b"* %s:\n" % n)
1145 1145 ui.write(filesetlang.prettyformat(tree), b"\n")
1146 1146
1147 1147 files = set()
1148 1148 if opts[b'all_files']:
1149 1149 for r in repo:
1150 1150 c = repo[r]
1151 1151 files.update(c.files())
1152 1152 files.update(c.substate)
1153 1153 if opts[b'all_files'] or ctx.rev() is None:
1154 1154 wctx = repo[None]
1155 1155 files.update(
1156 1156 repo.dirstate.walk(
1157 1157 scmutil.matchall(repo),
1158 1158 subrepos=list(wctx.substate),
1159 1159 unknown=True,
1160 1160 ignored=True,
1161 1161 )
1162 1162 )
1163 1163 files.update(wctx.substate)
1164 1164 else:
1165 1165 files.update(ctx.files())
1166 1166 files.update(ctx.substate)
1167 1167
1168 1168 m = ctx.matchfileset(expr)
1169 1169 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1170 1170 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1171 1171 for f in sorted(files):
1172 1172 if not m(f):
1173 1173 continue
1174 1174 ui.write(b"%s\n" % f)
1175 1175
1176 1176
1177 1177 @command(b'debugformat', [] + cmdutil.formatteropts)
1178 1178 def debugformat(ui, repo, **opts):
1179 1179 """display format information about the current repository
1180 1180
1181 1181 Use --verbose to get extra information about current config value and
1182 1182 Mercurial default."""
1183 1183 opts = pycompat.byteskwargs(opts)
1184 1184 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1185 1185 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1186 1186
1187 1187 def makeformatname(name):
1188 1188 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1189 1189
1190 1190 fm = ui.formatter(b'debugformat', opts)
1191 1191 if fm.isplain():
1192 1192
1193 1193 def formatvalue(value):
1194 1194 if util.safehasattr(value, b'startswith'):
1195 1195 return value
1196 1196 if value:
1197 1197 return b'yes'
1198 1198 else:
1199 1199 return b'no'
1200 1200
1201 1201 else:
1202 1202 formatvalue = pycompat.identity
1203 1203
1204 1204 fm.plain(b'format-variant')
1205 1205 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1206 1206 fm.plain(b' repo')
1207 1207 if ui.verbose:
1208 1208 fm.plain(b' config default')
1209 1209 fm.plain(b'\n')
1210 1210 for fv in upgrade.allformatvariant:
1211 1211 fm.startitem()
1212 1212 repovalue = fv.fromrepo(repo)
1213 1213 configvalue = fv.fromconfig(repo)
1214 1214
1215 1215 if repovalue != configvalue:
1216 1216 namelabel = b'formatvariant.name.mismatchconfig'
1217 1217 repolabel = b'formatvariant.repo.mismatchconfig'
1218 1218 elif repovalue != fv.default:
1219 1219 namelabel = b'formatvariant.name.mismatchdefault'
1220 1220 repolabel = b'formatvariant.repo.mismatchdefault'
1221 1221 else:
1222 1222 namelabel = b'formatvariant.name.uptodate'
1223 1223 repolabel = b'formatvariant.repo.uptodate'
1224 1224
1225 1225 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1226 1226 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1227 1227 if fv.default != configvalue:
1228 1228 configlabel = b'formatvariant.config.special'
1229 1229 else:
1230 1230 configlabel = b'formatvariant.config.default'
1231 1231 fm.condwrite(
1232 1232 ui.verbose,
1233 1233 b'config',
1234 1234 b' %6s',
1235 1235 formatvalue(configvalue),
1236 1236 label=configlabel,
1237 1237 )
1238 1238 fm.condwrite(
1239 1239 ui.verbose,
1240 1240 b'default',
1241 1241 b' %7s',
1242 1242 formatvalue(fv.default),
1243 1243 label=b'formatvariant.default',
1244 1244 )
1245 1245 fm.plain(b'\n')
1246 1246 fm.end()
1247 1247
1248 1248
1249 1249 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1250 1250 def debugfsinfo(ui, path=b"."):
1251 1251 """show information detected about current filesystem"""
1252 1252 ui.writenoi18n(b'path: %s\n' % path)
1253 1253 ui.writenoi18n(
1254 1254 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1255 1255 )
1256 1256 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1257 1257 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1258 1258 ui.writenoi18n(
1259 1259 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1260 1260 )
1261 1261 ui.writenoi18n(
1262 1262 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1263 1263 )
1264 1264 casesensitive = b'(unknown)'
1265 1265 try:
1266 1266 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1267 1267 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1268 1268 except OSError:
1269 1269 pass
1270 1270 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1271 1271
1272 1272
1273 1273 @command(
1274 1274 b'debuggetbundle',
1275 1275 [
1276 1276 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1277 1277 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1278 1278 (
1279 1279 b't',
1280 1280 b'type',
1281 1281 b'bzip2',
1282 1282 _(b'bundle compression type to use'),
1283 1283 _(b'TYPE'),
1284 1284 ),
1285 1285 ],
1286 1286 _(b'REPO FILE [-H|-C ID]...'),
1287 1287 norepo=True,
1288 1288 )
1289 1289 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1290 1290 """retrieves a bundle from a repo
1291 1291
1292 1292 Every ID must be a full-length hex node id string. Saves the bundle to the
1293 1293 given file.
1294 1294 """
1295 1295 opts = pycompat.byteskwargs(opts)
1296 1296 repo = hg.peer(ui, opts, repopath)
1297 1297 if not repo.capable(b'getbundle'):
1298 1298 raise error.Abort(b"getbundle() not supported by target repository")
1299 1299 args = {}
1300 1300 if common:
1301 1301 args[r'common'] = [bin(s) for s in common]
1302 1302 if head:
1303 1303 args[r'heads'] = [bin(s) for s in head]
1304 1304 # TODO: get desired bundlecaps from command line.
1305 1305 args[r'bundlecaps'] = None
1306 1306 bundle = repo.getbundle(b'debug', **args)
1307 1307
1308 1308 bundletype = opts.get(b'type', b'bzip2').lower()
1309 1309 btypes = {
1310 1310 b'none': b'HG10UN',
1311 1311 b'bzip2': b'HG10BZ',
1312 1312 b'gzip': b'HG10GZ',
1313 1313 b'bundle2': b'HG20',
1314 1314 }
1315 1315 bundletype = btypes.get(bundletype)
1316 1316 if bundletype not in bundle2.bundletypes:
1317 1317 raise error.Abort(_(b'unknown bundle type specified with --type'))
1318 1318 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1319 1319
1320 1320
1321 1321 @command(b'debugignore', [], b'[FILE]')
1322 1322 def debugignore(ui, repo, *files, **opts):
1323 1323 """display the combined ignore pattern and information about ignored files
1324 1324
1325 1325 With no argument display the combined ignore pattern.
1326 1326
1327 1327 Given space separated file names, shows if the given file is ignored and
1328 1328 if so, show the ignore rule (file and line number) that matched it.
1329 1329 """
1330 1330 ignore = repo.dirstate._ignore
1331 1331 if not files:
1332 1332 # Show all the patterns
1333 1333 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1334 1334 else:
1335 1335 m = scmutil.match(repo[None], pats=files)
1336 1336 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1337 1337 for f in m.files():
1338 1338 nf = util.normpath(f)
1339 1339 ignored = None
1340 1340 ignoredata = None
1341 1341 if nf != b'.':
1342 1342 if ignore(nf):
1343 1343 ignored = nf
1344 1344 ignoredata = repo.dirstate._ignorefileandline(nf)
1345 1345 else:
1346 1346 for p in util.finddirs(nf):
1347 1347 if ignore(p):
1348 1348 ignored = p
1349 1349 ignoredata = repo.dirstate._ignorefileandline(p)
1350 1350 break
1351 1351 if ignored:
1352 1352 if ignored == nf:
1353 1353 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1354 1354 else:
1355 1355 ui.write(
1356 1356 _(
1357 1357 b"%s is ignored because of "
1358 1358 b"containing directory %s\n"
1359 1359 )
1360 1360 % (uipathfn(f), ignored)
1361 1361 )
1362 1362 ignorefile, lineno, line = ignoredata
1363 1363 ui.write(
1364 1364 _(b"(ignore rule in %s, line %d: '%s')\n")
1365 1365 % (ignorefile, lineno, line)
1366 1366 )
1367 1367 else:
1368 1368 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1369 1369
1370 1370
1371 1371 @command(
1372 1372 b'debugindex',
1373 1373 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1374 1374 _(b'-c|-m|FILE'),
1375 1375 )
1376 1376 def debugindex(ui, repo, file_=None, **opts):
1377 1377 """dump index data for a storage primitive"""
1378 1378 opts = pycompat.byteskwargs(opts)
1379 1379 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1380 1380
1381 1381 if ui.debugflag:
1382 1382 shortfn = hex
1383 1383 else:
1384 1384 shortfn = short
1385 1385
1386 1386 idlen = 12
1387 1387 for i in store:
1388 1388 idlen = len(shortfn(store.node(i)))
1389 1389 break
1390 1390
1391 1391 fm = ui.formatter(b'debugindex', opts)
1392 1392 fm.plain(
1393 1393 b' rev linkrev %s %s p2\n'
1394 1394 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1395 1395 )
1396 1396
1397 1397 for rev in store:
1398 1398 node = store.node(rev)
1399 1399 parents = store.parents(node)
1400 1400
1401 1401 fm.startitem()
1402 1402 fm.write(b'rev', b'%6d ', rev)
1403 1403 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1404 1404 fm.write(b'node', b'%s ', shortfn(node))
1405 1405 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1406 1406 fm.write(b'p2', b'%s', shortfn(parents[1]))
1407 1407 fm.plain(b'\n')
1408 1408
1409 1409 fm.end()
1410 1410
1411 1411
1412 1412 @command(
1413 1413 b'debugindexdot',
1414 1414 cmdutil.debugrevlogopts,
1415 1415 _(b'-c|-m|FILE'),
1416 1416 optionalrepo=True,
1417 1417 )
1418 1418 def debugindexdot(ui, repo, file_=None, **opts):
1419 1419 """dump an index DAG as a graphviz dot file"""
1420 1420 opts = pycompat.byteskwargs(opts)
1421 1421 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1422 1422 ui.writenoi18n(b"digraph G {\n")
1423 1423 for i in r:
1424 1424 node = r.node(i)
1425 1425 pp = r.parents(node)
1426 1426 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1427 1427 if pp[1] != nullid:
1428 1428 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1429 1429 ui.write(b"}\n")
1430 1430
1431 1431
1432 1432 @command(b'debugindexstats', [])
1433 1433 def debugindexstats(ui, repo):
1434 1434 """show stats related to the changelog index"""
1435 1435 repo.changelog.shortest(nullid, 1)
1436 1436 index = repo.changelog.index
1437 1437 if not util.safehasattr(index, b'stats'):
1438 1438 raise error.Abort(_(b'debugindexstats only works with native code'))
1439 1439 for k, v in sorted(index.stats().items()):
1440 1440 ui.write(b'%s: %d\n' % (k, v))
1441 1441
1442 1442
1443 1443 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1444 1444 def debuginstall(ui, **opts):
1445 1445 '''test Mercurial installation
1446 1446
1447 1447 Returns 0 on success.
1448 1448 '''
1449 1449 opts = pycompat.byteskwargs(opts)
1450 1450
1451 1451 problems = 0
1452 1452
1453 1453 fm = ui.formatter(b'debuginstall', opts)
1454 1454 fm.startitem()
1455 1455
1456 1456 # encoding
1457 1457 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1458 1458 err = None
1459 1459 try:
1460 1460 codecs.lookup(pycompat.sysstr(encoding.encoding))
1461 1461 except LookupError as inst:
1462 1462 err = stringutil.forcebytestr(inst)
1463 1463 problems += 1
1464 1464 fm.condwrite(
1465 1465 err,
1466 1466 b'encodingerror',
1467 1467 _(b" %s\n (check that your locale is properly set)\n"),
1468 1468 err,
1469 1469 )
1470 1470
1471 1471 # Python
1472 1472 fm.write(
1473 1473 b'pythonexe',
1474 1474 _(b"checking Python executable (%s)\n"),
1475 1475 pycompat.sysexecutable or _(b"unknown"),
1476 1476 )
1477 1477 fm.write(
1478 1478 b'pythonver',
1479 1479 _(b"checking Python version (%s)\n"),
1480 1480 (b"%d.%d.%d" % sys.version_info[:3]),
1481 1481 )
1482 1482 fm.write(
1483 1483 b'pythonlib',
1484 1484 _(b"checking Python lib (%s)...\n"),
1485 1485 os.path.dirname(pycompat.fsencode(os.__file__)),
1486 1486 )
1487 1487
1488 1488 security = set(sslutil.supportedprotocols)
1489 1489 if sslutil.hassni:
1490 1490 security.add(b'sni')
1491 1491
1492 1492 fm.write(
1493 1493 b'pythonsecurity',
1494 1494 _(b"checking Python security support (%s)\n"),
1495 1495 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1496 1496 )
1497 1497
1498 1498 # These are warnings, not errors. So don't increment problem count. This
1499 1499 # may change in the future.
1500 1500 if b'tls1.2' not in security:
1501 1501 fm.plain(
1502 1502 _(
1503 1503 b' TLS 1.2 not supported by Python install; '
1504 1504 b'network connections lack modern security\n'
1505 1505 )
1506 1506 )
1507 1507 if b'sni' not in security:
1508 1508 fm.plain(
1509 1509 _(
1510 1510 b' SNI not supported by Python install; may have '
1511 1511 b'connectivity issues with some servers\n'
1512 1512 )
1513 1513 )
1514 1514
1515 1515 # TODO print CA cert info
1516 1516
1517 1517 # hg version
1518 1518 hgver = util.version()
1519 1519 fm.write(
1520 1520 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1521 1521 )
1522 1522 fm.write(
1523 1523 b'hgverextra',
1524 1524 _(b"checking Mercurial custom build (%s)\n"),
1525 1525 b'+'.join(hgver.split(b'+')[1:]),
1526 1526 )
1527 1527
1528 1528 # compiled modules
1529 1529 fm.write(
1530 1530 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1531 1531 )
1532 1532 fm.write(
1533 1533 b'hgmodules',
1534 1534 _(b"checking installed modules (%s)...\n"),
1535 1535 os.path.dirname(pycompat.fsencode(__file__)),
1536 1536 )
1537 1537
1538 1538 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1539 1539 rustext = rustandc # for now, that's the only case
1540 1540 cext = policy.policy in (b'c', b'allow') or rustandc
1541 1541 nopure = cext or rustext
1542 1542 if nopure:
1543 1543 err = None
1544 1544 try:
1545 1545 if cext:
1546 1546 from .cext import (
1547 1547 base85,
1548 1548 bdiff,
1549 1549 mpatch,
1550 1550 osutil,
1551 1551 )
1552 1552
1553 1553 # quiet pyflakes
1554 1554 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1555 1555 if rustext:
1556 1556 from .rustext import (
1557 1557 ancestor,
1558 1558 dirstate,
1559 1559 )
1560 1560
1561 1561 dir(ancestor), dir(dirstate) # quiet pyflakes
1562 1562 except Exception as inst:
1563 1563 err = stringutil.forcebytestr(inst)
1564 1564 problems += 1
1565 1565 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1566 1566
1567 1567 compengines = util.compengines._engines.values()
1568 1568 fm.write(
1569 1569 b'compengines',
1570 1570 _(b'checking registered compression engines (%s)\n'),
1571 1571 fm.formatlist(
1572 1572 sorted(e.name() for e in compengines),
1573 1573 name=b'compengine',
1574 1574 fmt=b'%s',
1575 1575 sep=b', ',
1576 1576 ),
1577 1577 )
1578 1578 fm.write(
1579 1579 b'compenginesavail',
1580 1580 _(b'checking available compression engines (%s)\n'),
1581 1581 fm.formatlist(
1582 1582 sorted(e.name() for e in compengines if e.available()),
1583 1583 name=b'compengine',
1584 1584 fmt=b'%s',
1585 1585 sep=b', ',
1586 1586 ),
1587 1587 )
1588 1588 wirecompengines = compression.compengines.supportedwireengines(
1589 1589 compression.SERVERROLE
1590 1590 )
1591 1591 fm.write(
1592 1592 b'compenginesserver',
1593 1593 _(
1594 1594 b'checking available compression engines '
1595 1595 b'for wire protocol (%s)\n'
1596 1596 ),
1597 1597 fm.formatlist(
1598 1598 [e.name() for e in wirecompengines if e.wireprotosupport()],
1599 1599 name=b'compengine',
1600 1600 fmt=b'%s',
1601 1601 sep=b', ',
1602 1602 ),
1603 1603 )
1604 1604 re2 = b'missing'
1605 1605 if util._re2:
1606 1606 re2 = b'available'
1607 1607 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1608 1608 fm.data(re2=bool(util._re2))
1609 1609
1610 1610 # templates
1611 1611 p = templater.templatepaths()
1612 1612 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1613 1613 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1614 1614 if p:
1615 1615 m = templater.templatepath(b"map-cmdline.default")
1616 1616 if m:
1617 1617 # template found, check if it is working
1618 1618 err = None
1619 1619 try:
1620 1620 templater.templater.frommapfile(m)
1621 1621 except Exception as inst:
1622 1622 err = stringutil.forcebytestr(inst)
1623 1623 p = None
1624 1624 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1625 1625 else:
1626 1626 p = None
1627 1627 fm.condwrite(
1628 1628 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1629 1629 )
1630 1630 fm.condwrite(
1631 1631 not m,
1632 1632 b'defaulttemplatenotfound',
1633 1633 _(b" template '%s' not found\n"),
1634 1634 b"default",
1635 1635 )
1636 1636 if not p:
1637 1637 problems += 1
1638 1638 fm.condwrite(
1639 1639 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1640 1640 )
1641 1641
1642 1642 # editor
1643 1643 editor = ui.geteditor()
1644 1644 editor = util.expandpath(editor)
1645 1645 editorbin = procutil.shellsplit(editor)[0]
1646 1646 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1647 1647 cmdpath = procutil.findexe(editorbin)
1648 1648 fm.condwrite(
1649 1649 not cmdpath and editor == b'vi',
1650 1650 b'vinotfound',
1651 1651 _(
1652 1652 b" No commit editor set and can't find %s in PATH\n"
1653 1653 b" (specify a commit editor in your configuration"
1654 1654 b" file)\n"
1655 1655 ),
1656 1656 not cmdpath and editor == b'vi' and editorbin,
1657 1657 )
1658 1658 fm.condwrite(
1659 1659 not cmdpath and editor != b'vi',
1660 1660 b'editornotfound',
1661 1661 _(
1662 1662 b" Can't find editor '%s' in PATH\n"
1663 1663 b" (specify a commit editor in your configuration"
1664 1664 b" file)\n"
1665 1665 ),
1666 1666 not cmdpath and editorbin,
1667 1667 )
1668 1668 if not cmdpath and editor != b'vi':
1669 1669 problems += 1
1670 1670
1671 1671 # check username
1672 1672 username = None
1673 1673 err = None
1674 1674 try:
1675 1675 username = ui.username()
1676 1676 except error.Abort as e:
1677 1677 err = stringutil.forcebytestr(e)
1678 1678 problems += 1
1679 1679
1680 1680 fm.condwrite(
1681 1681 username, b'username', _(b"checking username (%s)\n"), username
1682 1682 )
1683 1683 fm.condwrite(
1684 1684 err,
1685 1685 b'usernameerror',
1686 1686 _(
1687 1687 b"checking username...\n %s\n"
1688 1688 b" (specify a username in your configuration file)\n"
1689 1689 ),
1690 1690 err,
1691 1691 )
1692 1692
1693 1693 for name, mod in extensions.extensions():
1694 1694 handler = getattr(mod, 'debuginstall', None)
1695 1695 if handler is not None:
1696 1696 problems += handler(ui, fm)
1697 1697
1698 1698 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1699 1699 if not problems:
1700 1700 fm.data(problems=problems)
1701 1701 fm.condwrite(
1702 1702 problems,
1703 1703 b'problems',
1704 1704 _(b"%d problems detected, please check your install!\n"),
1705 1705 problems,
1706 1706 )
1707 1707 fm.end()
1708 1708
1709 1709 return problems
1710 1710
1711 1711
1712 1712 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1713 1713 def debugknown(ui, repopath, *ids, **opts):
1714 1714 """test whether node ids are known to a repo
1715 1715
1716 1716 Every ID must be a full-length hex node id string. Returns a list of 0s
1717 1717 and 1s indicating unknown/known.
1718 1718 """
1719 1719 opts = pycompat.byteskwargs(opts)
1720 1720 repo = hg.peer(ui, opts, repopath)
1721 1721 if not repo.capable(b'known'):
1722 1722 raise error.Abort(b"known() not supported by target repository")
1723 1723 flags = repo.known([bin(s) for s in ids])
1724 1724 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1725 1725
1726 1726
1727 1727 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1728 1728 def debuglabelcomplete(ui, repo, *args):
1729 1729 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1730 1730 debugnamecomplete(ui, repo, *args)
1731 1731
1732 1732
1733 1733 @command(
1734 1734 b'debuglocks',
1735 1735 [
1736 1736 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1737 1737 (
1738 1738 b'W',
1739 1739 b'force-wlock',
1740 1740 None,
1741 1741 _(b'free the working state lock (DANGEROUS)'),
1742 1742 ),
1743 1743 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1744 1744 (
1745 1745 b'S',
1746 1746 b'set-wlock',
1747 1747 None,
1748 1748 _(b'set the working state lock until stopped'),
1749 1749 ),
1750 1750 ],
1751 1751 _(b'[OPTION]...'),
1752 1752 )
1753 1753 def debuglocks(ui, repo, **opts):
1754 1754 """show or modify state of locks
1755 1755
1756 1756 By default, this command will show which locks are held. This
1757 1757 includes the user and process holding the lock, the amount of time
1758 1758 the lock has been held, and the machine name where the process is
1759 1759 running if it's not local.
1760 1760
1761 1761 Locks protect the integrity of Mercurial's data, so should be
1762 1762 treated with care. System crashes or other interruptions may cause
1763 1763 locks to not be properly released, though Mercurial will usually
1764 1764 detect and remove such stale locks automatically.
1765 1765
1766 1766 However, detecting stale locks may not always be possible (for
1767 1767 instance, on a shared filesystem). Removing locks may also be
1768 1768 blocked by filesystem permissions.
1769 1769
1770 1770 Setting a lock will prevent other commands from changing the data.
1771 1771 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1772 1772 The set locks are removed when the command exits.
1773 1773
1774 1774 Returns 0 if no locks are held.
1775 1775
1776 1776 """
1777 1777
1778 1778 if opts.get(r'force_lock'):
1779 1779 repo.svfs.unlink(b'lock')
1780 1780 if opts.get(r'force_wlock'):
1781 1781 repo.vfs.unlink(b'wlock')
1782 1782 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1783 1783 return 0
1784 1784
1785 1785 locks = []
1786 1786 try:
1787 1787 if opts.get(r'set_wlock'):
1788 1788 try:
1789 1789 locks.append(repo.wlock(False))
1790 1790 except error.LockHeld:
1791 1791 raise error.Abort(_(b'wlock is already held'))
1792 1792 if opts.get(r'set_lock'):
1793 1793 try:
1794 1794 locks.append(repo.lock(False))
1795 1795 except error.LockHeld:
1796 1796 raise error.Abort(_(b'lock is already held'))
1797 1797 if len(locks):
1798 1798 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1799 1799 return 0
1800 1800 finally:
1801 1801 release(*locks)
1802 1802
1803 1803 now = time.time()
1804 1804 held = 0
1805 1805
1806 1806 def report(vfs, name, method):
1807 1807 # this causes stale locks to get reaped for more accurate reporting
1808 1808 try:
1809 1809 l = method(False)
1810 1810 except error.LockHeld:
1811 1811 l = None
1812 1812
1813 1813 if l:
1814 1814 l.release()
1815 1815 else:
1816 1816 try:
1817 1817 st = vfs.lstat(name)
1818 1818 age = now - st[stat.ST_MTIME]
1819 1819 user = util.username(st.st_uid)
1820 1820 locker = vfs.readlock(name)
1821 1821 if b":" in locker:
1822 1822 host, pid = locker.split(b':')
1823 1823 if host == socket.gethostname():
1824 1824 locker = b'user %s, process %s' % (user or b'None', pid)
1825 1825 else:
1826 1826 locker = b'user %s, process %s, host %s' % (
1827 1827 user or b'None',
1828 1828 pid,
1829 1829 host,
1830 1830 )
1831 1831 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1832 1832 return 1
1833 1833 except OSError as e:
1834 1834 if e.errno != errno.ENOENT:
1835 1835 raise
1836 1836
1837 1837 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1838 1838 return 0
1839 1839
1840 1840 held += report(repo.svfs, b"lock", repo.lock)
1841 1841 held += report(repo.vfs, b"wlock", repo.wlock)
1842 1842
1843 1843 return held
1844 1844
1845 1845
1846 1846 @command(
1847 1847 b'debugmanifestfulltextcache',
1848 1848 [
1849 1849 (b'', b'clear', False, _(b'clear the cache')),
1850 1850 (
1851 1851 b'a',
1852 1852 b'add',
1853 1853 [],
1854 1854 _(b'add the given manifest nodes to the cache'),
1855 1855 _(b'NODE'),
1856 1856 ),
1857 1857 ],
1858 1858 b'',
1859 1859 )
1860 1860 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1861 1861 """show, clear or amend the contents of the manifest fulltext cache"""
1862 1862
1863 1863 def getcache():
1864 1864 r = repo.manifestlog.getstorage(b'')
1865 1865 try:
1866 1866 return r._fulltextcache
1867 1867 except AttributeError:
1868 1868 msg = _(
1869 1869 b"Current revlog implementation doesn't appear to have a "
1870 1870 b"manifest fulltext cache\n"
1871 1871 )
1872 1872 raise error.Abort(msg)
1873 1873
1874 1874 if opts.get(r'clear'):
1875 1875 with repo.wlock():
1876 1876 cache = getcache()
1877 1877 cache.clear(clear_persisted_data=True)
1878 1878 return
1879 1879
1880 1880 if add:
1881 1881 with repo.wlock():
1882 1882 m = repo.manifestlog
1883 1883 store = m.getstorage(b'')
1884 1884 for n in add:
1885 1885 try:
1886 1886 manifest = m[store.lookup(n)]
1887 1887 except error.LookupError as e:
1888 1888 raise error.Abort(e, hint=b"Check your manifest node id")
1889 1889 manifest.read() # stores revisision in cache too
1890 1890 return
1891 1891
1892 1892 cache = getcache()
1893 1893 if not len(cache):
1894 1894 ui.write(_(b'cache empty\n'))
1895 1895 else:
1896 1896 ui.write(
1897 1897 _(
1898 1898 b'cache contains %d manifest entries, in order of most to '
1899 1899 b'least recent:\n'
1900 1900 )
1901 1901 % (len(cache),)
1902 1902 )
1903 1903 totalsize = 0
1904 1904 for nodeid in cache:
1905 1905 # Use cache.get to not update the LRU order
1906 1906 data = cache.peek(nodeid)
1907 1907 size = len(data)
1908 1908 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1909 1909 ui.write(
1910 1910 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1911 1911 )
1912 1912 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1913 1913 ui.write(
1914 1914 _(b'total cache data size %s, on-disk %s\n')
1915 1915 % (util.bytecount(totalsize), util.bytecount(ondisk))
1916 1916 )
1917 1917
1918 1918
1919 1919 @command(b'debugmergestate', [], b'')
1920 1920 def debugmergestate(ui, repo, *args):
1921 1921 """print merge state
1922 1922
1923 1923 Use --verbose to print out information about whether v1 or v2 merge state
1924 1924 was chosen."""
1925 1925
1926 1926 def _hashornull(h):
1927 1927 if h == nullhex:
1928 1928 return b'null'
1929 1929 else:
1930 1930 return h
1931 1931
1932 1932 def printrecords(version):
1933 1933 ui.writenoi18n(b'* version %d records\n' % version)
1934 1934 if version == 1:
1935 1935 records = v1records
1936 1936 else:
1937 1937 records = v2records
1938 1938
1939 1939 for rtype, record in records:
1940 1940 # pretty print some record types
1941 1941 if rtype == b'L':
1942 1942 ui.writenoi18n(b'local: %s\n' % record)
1943 1943 elif rtype == b'O':
1944 1944 ui.writenoi18n(b'other: %s\n' % record)
1945 1945 elif rtype == b'm':
1946 1946 driver, mdstate = record.split(b'\0', 1)
1947 1947 ui.writenoi18n(
1948 1948 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1949 1949 )
1950 1950 elif rtype in b'FDC':
1951 1951 r = record.split(b'\0')
1952 1952 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1953 1953 if version == 1:
1954 1954 onode = b'not stored in v1 format'
1955 1955 flags = r[7]
1956 1956 else:
1957 1957 onode, flags = r[7:9]
1958 1958 ui.writenoi18n(
1959 1959 b'file: %s (record type "%s", state "%s", hash %s)\n'
1960 1960 % (f, rtype, state, _hashornull(hash))
1961 1961 )
1962 1962 ui.writenoi18n(
1963 1963 b' local path: %s (flags "%s")\n' % (lfile, flags)
1964 1964 )
1965 1965 ui.writenoi18n(
1966 1966 b' ancestor path: %s (node %s)\n'
1967 1967 % (afile, _hashornull(anode))
1968 1968 )
1969 1969 ui.writenoi18n(
1970 1970 b' other path: %s (node %s)\n'
1971 1971 % (ofile, _hashornull(onode))
1972 1972 )
1973 1973 elif rtype == b'f':
1974 1974 filename, rawextras = record.split(b'\0', 1)
1975 1975 extras = rawextras.split(b'\0')
1976 1976 i = 0
1977 1977 extrastrings = []
1978 1978 while i < len(extras):
1979 1979 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1980 1980 i += 2
1981 1981
1982 1982 ui.writenoi18n(
1983 1983 b'file extras: %s (%s)\n'
1984 1984 % (filename, b', '.join(extrastrings))
1985 1985 )
1986 1986 elif rtype == b'l':
1987 1987 labels = record.split(b'\0', 2)
1988 1988 labels = [l for l in labels if len(l) > 0]
1989 1989 ui.writenoi18n(b'labels:\n')
1990 1990 ui.write((b' local: %s\n' % labels[0]))
1991 1991 ui.write((b' other: %s\n' % labels[1]))
1992 1992 if len(labels) > 2:
1993 1993 ui.write((b' base: %s\n' % labels[2]))
1994 1994 else:
1995 1995 ui.writenoi18n(
1996 1996 b'unrecognized entry: %s\t%s\n'
1997 1997 % (rtype, record.replace(b'\0', b'\t'))
1998 1998 )
1999 1999
2000 2000 # Avoid mergestate.read() since it may raise an exception for unsupported
2001 2001 # merge state records. We shouldn't be doing this, but this is OK since this
2002 2002 # command is pretty low-level.
2003 2003 ms = mergemod.mergestate(repo)
2004 2004
2005 2005 # sort so that reasonable information is on top
2006 2006 v1records = ms._readrecordsv1()
2007 2007 v2records = ms._readrecordsv2()
2008 2008 order = b'LOml'
2009 2009
2010 2010 def key(r):
2011 2011 idx = order.find(r[0])
2012 2012 if idx == -1:
2013 2013 return (1, r[1])
2014 2014 else:
2015 2015 return (0, idx)
2016 2016
2017 2017 v1records.sort(key=key)
2018 2018 v2records.sort(key=key)
2019 2019
2020 2020 if not v1records and not v2records:
2021 2021 ui.writenoi18n(b'no merge state found\n')
2022 2022 elif not v2records:
2023 2023 ui.notenoi18n(b'no version 2 merge state\n')
2024 2024 printrecords(1)
2025 2025 elif ms._v1v2match(v1records, v2records):
2026 2026 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2027 2027 printrecords(2)
2028 2028 else:
2029 2029 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2030 2030 printrecords(1)
2031 2031 if ui.verbose:
2032 2032 printrecords(2)
2033 2033
2034 2034
2035 2035 @command(b'debugnamecomplete', [], _(b'NAME...'))
2036 2036 def debugnamecomplete(ui, repo, *args):
2037 2037 '''complete "names" - tags, open branch names, bookmark names'''
2038 2038
2039 2039 names = set()
2040 2040 # since we previously only listed open branches, we will handle that
2041 2041 # specially (after this for loop)
2042 2042 for name, ns in pycompat.iteritems(repo.names):
2043 2043 if name != b'branches':
2044 2044 names.update(ns.listnames(repo))
2045 2045 names.update(
2046 2046 tag
2047 2047 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2048 2048 if not closed
2049 2049 )
2050 2050 completions = set()
2051 2051 if not args:
2052 2052 args = [b'']
2053 2053 for a in args:
2054 2054 completions.update(n for n in names if n.startswith(a))
2055 2055 ui.write(b'\n'.join(sorted(completions)))
2056 2056 ui.write(b'\n')
2057 2057
2058 2058
2059 2059 @command(
2060 2060 b'debugobsolete',
2061 2061 [
2062 2062 (b'', b'flags', 0, _(b'markers flag')),
2063 2063 (
2064 2064 b'',
2065 2065 b'record-parents',
2066 2066 False,
2067 2067 _(b'record parent information for the precursor'),
2068 2068 ),
2069 2069 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2070 2070 (
2071 2071 b'',
2072 2072 b'exclusive',
2073 2073 False,
2074 2074 _(b'restrict display to markers only relevant to REV'),
2075 2075 ),
2076 2076 (b'', b'index', False, _(b'display index of the marker')),
2077 2077 (b'', b'delete', [], _(b'delete markers specified by indices')),
2078 2078 ]
2079 2079 + cmdutil.commitopts2
2080 2080 + cmdutil.formatteropts,
2081 2081 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2082 2082 )
2083 2083 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2084 2084 """create arbitrary obsolete marker
2085 2085
2086 2086 With no arguments, displays the list of obsolescence markers."""
2087 2087
2088 2088 opts = pycompat.byteskwargs(opts)
2089 2089
2090 2090 def parsenodeid(s):
2091 2091 try:
2092 2092 # We do not use revsingle/revrange functions here to accept
2093 2093 # arbitrary node identifiers, possibly not present in the
2094 2094 # local repository.
2095 2095 n = bin(s)
2096 2096 if len(n) != len(nullid):
2097 2097 raise TypeError()
2098 2098 return n
2099 2099 except TypeError:
2100 2100 raise error.Abort(
2101 2101 b'changeset references must be full hexadecimal '
2102 2102 b'node identifiers'
2103 2103 )
2104 2104
2105 2105 if opts.get(b'delete'):
2106 2106 indices = []
2107 2107 for v in opts.get(b'delete'):
2108 2108 try:
2109 2109 indices.append(int(v))
2110 2110 except ValueError:
2111 2111 raise error.Abort(
2112 2112 _(b'invalid index value: %r') % v,
2113 2113 hint=_(b'use integers for indices'),
2114 2114 )
2115 2115
2116 2116 if repo.currenttransaction():
2117 2117 raise error.Abort(
2118 2118 _(b'cannot delete obsmarkers in the middle of transaction.')
2119 2119 )
2120 2120
2121 2121 with repo.lock():
2122 2122 n = repair.deleteobsmarkers(repo.obsstore, indices)
2123 2123 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2124 2124
2125 2125 return
2126 2126
2127 2127 if precursor is not None:
2128 2128 if opts[b'rev']:
2129 2129 raise error.Abort(b'cannot select revision when creating marker')
2130 2130 metadata = {}
2131 2131 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2132 2132 succs = tuple(parsenodeid(succ) for succ in successors)
2133 2133 l = repo.lock()
2134 2134 try:
2135 2135 tr = repo.transaction(b'debugobsolete')
2136 2136 try:
2137 2137 date = opts.get(b'date')
2138 2138 if date:
2139 2139 date = dateutil.parsedate(date)
2140 2140 else:
2141 2141 date = None
2142 2142 prec = parsenodeid(precursor)
2143 2143 parents = None
2144 2144 if opts[b'record_parents']:
2145 2145 if prec not in repo.unfiltered():
2146 2146 raise error.Abort(
2147 2147 b'cannot used --record-parents on '
2148 2148 b'unknown changesets'
2149 2149 )
2150 2150 parents = repo.unfiltered()[prec].parents()
2151 2151 parents = tuple(p.node() for p in parents)
2152 2152 repo.obsstore.create(
2153 2153 tr,
2154 2154 prec,
2155 2155 succs,
2156 2156 opts[b'flags'],
2157 2157 parents=parents,
2158 2158 date=date,
2159 2159 metadata=metadata,
2160 2160 ui=ui,
2161 2161 )
2162 2162 tr.close()
2163 2163 except ValueError as exc:
2164 2164 raise error.Abort(
2165 2165 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2166 2166 )
2167 2167 finally:
2168 2168 tr.release()
2169 2169 finally:
2170 2170 l.release()
2171 2171 else:
2172 2172 if opts[b'rev']:
2173 2173 revs = scmutil.revrange(repo, opts[b'rev'])
2174 2174 nodes = [repo[r].node() for r in revs]
2175 2175 markers = list(
2176 2176 obsutil.getmarkers(
2177 2177 repo, nodes=nodes, exclusive=opts[b'exclusive']
2178 2178 )
2179 2179 )
2180 2180 markers.sort(key=lambda x: x._data)
2181 2181 else:
2182 2182 markers = obsutil.getmarkers(repo)
2183 2183
2184 2184 markerstoiter = markers
2185 2185 isrelevant = lambda m: True
2186 2186 if opts.get(b'rev') and opts.get(b'index'):
2187 2187 markerstoiter = obsutil.getmarkers(repo)
2188 2188 markerset = set(markers)
2189 2189 isrelevant = lambda m: m in markerset
2190 2190
2191 2191 fm = ui.formatter(b'debugobsolete', opts)
2192 2192 for i, m in enumerate(markerstoiter):
2193 2193 if not isrelevant(m):
2194 2194 # marker can be irrelevant when we're iterating over a set
2195 2195 # of markers (markerstoiter) which is bigger than the set
2196 2196 # of markers we want to display (markers)
2197 2197 # this can happen if both --index and --rev options are
2198 2198 # provided and thus we need to iterate over all of the markers
2199 2199 # to get the correct indices, but only display the ones that
2200 2200 # are relevant to --rev value
2201 2201 continue
2202 2202 fm.startitem()
2203 2203 ind = i if opts.get(b'index') else None
2204 2204 cmdutil.showmarker(fm, m, index=ind)
2205 2205 fm.end()
2206 2206
2207 2207
2208 2208 @command(
2209 2209 b'debugp1copies',
2210 2210 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2211 2211 _(b'[-r REV]'),
2212 2212 )
2213 2213 def debugp1copies(ui, repo, **opts):
2214 2214 """dump copy information compared to p1"""
2215 2215
2216 2216 opts = pycompat.byteskwargs(opts)
2217 2217 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2218 2218 for dst, src in ctx.p1copies().items():
2219 2219 ui.write(b'%s -> %s\n' % (src, dst))
2220 2220
2221 2221
2222 2222 @command(
2223 2223 b'debugp2copies',
2224 2224 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2225 2225 _(b'[-r REV]'),
2226 2226 )
2227 2227 def debugp1copies(ui, repo, **opts):
2228 2228 """dump copy information compared to p2"""
2229 2229
2230 2230 opts = pycompat.byteskwargs(opts)
2231 2231 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2232 2232 for dst, src in ctx.p2copies().items():
2233 2233 ui.write(b'%s -> %s\n' % (src, dst))
2234 2234
2235 2235
2236 2236 @command(
2237 2237 b'debugpathcomplete',
2238 2238 [
2239 2239 (b'f', b'full', None, _(b'complete an entire path')),
2240 2240 (b'n', b'normal', None, _(b'show only normal files')),
2241 2241 (b'a', b'added', None, _(b'show only added files')),
2242 2242 (b'r', b'removed', None, _(b'show only removed files')),
2243 2243 ],
2244 2244 _(b'FILESPEC...'),
2245 2245 )
2246 2246 def debugpathcomplete(ui, repo, *specs, **opts):
2247 2247 '''complete part or all of a tracked path
2248 2248
2249 2249 This command supports shells that offer path name completion. It
2250 2250 currently completes only files already known to the dirstate.
2251 2251
2252 2252 Completion extends only to the next path segment unless
2253 2253 --full is specified, in which case entire paths are used.'''
2254 2254
2255 2255 def complete(path, acceptable):
2256 2256 dirstate = repo.dirstate
2257 2257 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2258 2258 rootdir = repo.root + pycompat.ossep
2259 2259 if spec != repo.root and not spec.startswith(rootdir):
2260 2260 return [], []
2261 2261 if os.path.isdir(spec):
2262 2262 spec += b'/'
2263 2263 spec = spec[len(rootdir) :]
2264 2264 fixpaths = pycompat.ossep != b'/'
2265 2265 if fixpaths:
2266 2266 spec = spec.replace(pycompat.ossep, b'/')
2267 2267 speclen = len(spec)
2268 2268 fullpaths = opts[r'full']
2269 2269 files, dirs = set(), set()
2270 2270 adddir, addfile = dirs.add, files.add
2271 2271 for f, st in pycompat.iteritems(dirstate):
2272 2272 if f.startswith(spec) and st[0] in acceptable:
2273 2273 if fixpaths:
2274 2274 f = f.replace(b'/', pycompat.ossep)
2275 2275 if fullpaths:
2276 2276 addfile(f)
2277 2277 continue
2278 2278 s = f.find(pycompat.ossep, speclen)
2279 2279 if s >= 0:
2280 2280 adddir(f[:s])
2281 2281 else:
2282 2282 addfile(f)
2283 2283 return files, dirs
2284 2284
2285 2285 acceptable = b''
2286 2286 if opts[r'normal']:
2287 2287 acceptable += b'nm'
2288 2288 if opts[r'added']:
2289 2289 acceptable += b'a'
2290 2290 if opts[r'removed']:
2291 2291 acceptable += b'r'
2292 2292 cwd = repo.getcwd()
2293 2293 if not specs:
2294 2294 specs = [b'.']
2295 2295
2296 2296 files, dirs = set(), set()
2297 2297 for spec in specs:
2298 2298 f, d = complete(spec, acceptable or b'nmar')
2299 2299 files.update(f)
2300 2300 dirs.update(d)
2301 2301 files.update(dirs)
2302 2302 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2303 2303 ui.write(b'\n')
2304 2304
2305 2305
2306 2306 @command(
2307 2307 b'debugpathcopies',
2308 2308 cmdutil.walkopts,
2309 2309 b'hg debugpathcopies REV1 REV2 [FILE]',
2310 2310 inferrepo=True,
2311 2311 )
2312 2312 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2313 2313 """show copies between two revisions"""
2314 2314 ctx1 = scmutil.revsingle(repo, rev1)
2315 2315 ctx2 = scmutil.revsingle(repo, rev2)
2316 2316 m = scmutil.match(ctx1, pats, opts)
2317 2317 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2318 2318 ui.write(b'%s -> %s\n' % (src, dst))
2319 2319
2320 2320
2321 2321 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2322 2322 def debugpeer(ui, path):
2323 2323 """establish a connection to a peer repository"""
2324 2324 # Always enable peer request logging. Requires --debug to display
2325 2325 # though.
2326 2326 overrides = {
2327 2327 (b'devel', b'debug.peer-request'): True,
2328 2328 }
2329 2329
2330 2330 with ui.configoverride(overrides):
2331 2331 peer = hg.peer(ui, {}, path)
2332 2332
2333 2333 local = peer.local() is not None
2334 2334 canpush = peer.canpush()
2335 2335
2336 2336 ui.write(_(b'url: %s\n') % peer.url())
2337 2337 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2338 2338 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2339 2339
2340 2340
2341 2341 @command(
2342 2342 b'debugpickmergetool',
2343 2343 [
2344 2344 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2345 2345 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2346 2346 ]
2347 2347 + cmdutil.walkopts
2348 2348 + cmdutil.mergetoolopts,
2349 2349 _(b'[PATTERN]...'),
2350 2350 inferrepo=True,
2351 2351 )
2352 2352 def debugpickmergetool(ui, repo, *pats, **opts):
2353 2353 """examine which merge tool is chosen for specified file
2354 2354
2355 2355 As described in :hg:`help merge-tools`, Mercurial examines
2356 2356 configurations below in this order to decide which merge tool is
2357 2357 chosen for specified file.
2358 2358
2359 2359 1. ``--tool`` option
2360 2360 2. ``HGMERGE`` environment variable
2361 2361 3. configurations in ``merge-patterns`` section
2362 2362 4. configuration of ``ui.merge``
2363 2363 5. configurations in ``merge-tools`` section
2364 2364 6. ``hgmerge`` tool (for historical reason only)
2365 2365 7. default tool for fallback (``:merge`` or ``:prompt``)
2366 2366
2367 2367 This command writes out examination result in the style below::
2368 2368
2369 2369 FILE = MERGETOOL
2370 2370
2371 2371 By default, all files known in the first parent context of the
2372 2372 working directory are examined. Use file patterns and/or -I/-X
2373 2373 options to limit target files. -r/--rev is also useful to examine
2374 2374 files in another context without actual updating to it.
2375 2375
2376 2376 With --debug, this command shows warning messages while matching
2377 2377 against ``merge-patterns`` and so on, too. It is recommended to
2378 2378 use this option with explicit file patterns and/or -I/-X options,
2379 2379 because this option increases amount of output per file according
2380 2380 to configurations in hgrc.
2381 2381
2382 2382 With -v/--verbose, this command shows configurations below at
2383 2383 first (only if specified).
2384 2384
2385 2385 - ``--tool`` option
2386 2386 - ``HGMERGE`` environment variable
2387 2387 - configuration of ``ui.merge``
2388 2388
2389 2389 If merge tool is chosen before matching against
2390 2390 ``merge-patterns``, this command can't show any helpful
2391 2391 information, even with --debug. In such case, information above is
2392 2392 useful to know why a merge tool is chosen.
2393 2393 """
2394 2394 opts = pycompat.byteskwargs(opts)
2395 2395 overrides = {}
2396 2396 if opts[b'tool']:
2397 2397 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2398 2398 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2399 2399
2400 2400 with ui.configoverride(overrides, b'debugmergepatterns'):
2401 2401 hgmerge = encoding.environ.get(b"HGMERGE")
2402 2402 if hgmerge is not None:
2403 2403 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2404 2404 uimerge = ui.config(b"ui", b"merge")
2405 2405 if uimerge:
2406 2406 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2407 2407
2408 2408 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2409 2409 m = scmutil.match(ctx, pats, opts)
2410 2410 changedelete = opts[b'changedelete']
2411 2411 for path in ctx.walk(m):
2412 2412 fctx = ctx[path]
2413 2413 try:
2414 2414 if not ui.debugflag:
2415 2415 ui.pushbuffer(error=True)
2416 2416 tool, toolpath = filemerge._picktool(
2417 2417 repo,
2418 2418 ui,
2419 2419 path,
2420 2420 fctx.isbinary(),
2421 2421 b'l' in fctx.flags(),
2422 2422 changedelete,
2423 2423 )
2424 2424 finally:
2425 2425 if not ui.debugflag:
2426 2426 ui.popbuffer()
2427 2427 ui.write(b'%s = %s\n' % (path, tool))
2428 2428
2429 2429
2430 2430 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2431 2431 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2432 2432 '''access the pushkey key/value protocol
2433 2433
2434 2434 With two args, list the keys in the given namespace.
2435 2435
2436 2436 With five args, set a key to new if it currently is set to old.
2437 2437 Reports success or failure.
2438 2438 '''
2439 2439
2440 2440 target = hg.peer(ui, {}, repopath)
2441 2441 if keyinfo:
2442 2442 key, old, new = keyinfo
2443 2443 with target.commandexecutor() as e:
2444 2444 r = e.callcommand(
2445 2445 b'pushkey',
2446 2446 {
2447 2447 b'namespace': namespace,
2448 2448 b'key': key,
2449 2449 b'old': old,
2450 2450 b'new': new,
2451 2451 },
2452 2452 ).result()
2453 2453
2454 2454 ui.status(pycompat.bytestr(r) + b'\n')
2455 2455 return not r
2456 2456 else:
2457 2457 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2458 2458 ui.write(
2459 2459 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2460 2460 )
2461 2461
2462 2462
2463 2463 @command(b'debugpvec', [], _(b'A B'))
2464 2464 def debugpvec(ui, repo, a, b=None):
2465 2465 ca = scmutil.revsingle(repo, a)
2466 2466 cb = scmutil.revsingle(repo, b)
2467 2467 pa = pvec.ctxpvec(ca)
2468 2468 pb = pvec.ctxpvec(cb)
2469 2469 if pa == pb:
2470 2470 rel = b"="
2471 2471 elif pa > pb:
2472 2472 rel = b">"
2473 2473 elif pa < pb:
2474 2474 rel = b"<"
2475 2475 elif pa | pb:
2476 2476 rel = b"|"
2477 2477 ui.write(_(b"a: %s\n") % pa)
2478 2478 ui.write(_(b"b: %s\n") % pb)
2479 2479 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2480 2480 ui.write(
2481 2481 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2482 2482 % (
2483 2483 abs(pa._depth - pb._depth),
2484 2484 pvec._hamming(pa._vec, pb._vec),
2485 2485 pa.distance(pb),
2486 2486 rel,
2487 2487 )
2488 2488 )
2489 2489
2490 2490
2491 2491 @command(
2492 2492 b'debugrebuilddirstate|debugrebuildstate',
2493 2493 [
2494 2494 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2495 2495 (
2496 2496 b'',
2497 2497 b'minimal',
2498 2498 None,
2499 2499 _(
2500 2500 b'only rebuild files that are inconsistent with '
2501 2501 b'the working copy parent'
2502 2502 ),
2503 2503 ),
2504 2504 ],
2505 2505 _(b'[-r REV]'),
2506 2506 )
2507 2507 def debugrebuilddirstate(ui, repo, rev, **opts):
2508 2508 """rebuild the dirstate as it would look like for the given revision
2509 2509
2510 2510 If no revision is specified the first current parent will be used.
2511 2511
2512 2512 The dirstate will be set to the files of the given revision.
2513 2513 The actual working directory content or existing dirstate
2514 2514 information such as adds or removes is not considered.
2515 2515
2516 2516 ``minimal`` will only rebuild the dirstate status for files that claim to be
2517 2517 tracked but are not in the parent manifest, or that exist in the parent
2518 2518 manifest but are not in the dirstate. It will not change adds, removes, or
2519 2519 modified files that are in the working copy parent.
2520 2520
2521 2521 One use of this command is to make the next :hg:`status` invocation
2522 2522 check the actual file content.
2523 2523 """
2524 2524 ctx = scmutil.revsingle(repo, rev)
2525 2525 with repo.wlock():
2526 2526 dirstate = repo.dirstate
2527 2527 changedfiles = None
2528 2528 # See command doc for what minimal does.
2529 2529 if opts.get(r'minimal'):
2530 2530 manifestfiles = set(ctx.manifest().keys())
2531 2531 dirstatefiles = set(dirstate)
2532 2532 manifestonly = manifestfiles - dirstatefiles
2533 2533 dsonly = dirstatefiles - manifestfiles
2534 2534 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2535 2535 changedfiles = manifestonly | dsnotadded
2536 2536
2537 2537 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2538 2538
2539 2539
2540 2540 @command(b'debugrebuildfncache', [], b'')
2541 2541 def debugrebuildfncache(ui, repo):
2542 2542 """rebuild the fncache file"""
2543 2543 repair.rebuildfncache(ui, repo)
2544 2544
2545 2545
2546 2546 @command(
2547 2547 b'debugrename',
2548 2548 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2549 2549 _(b'[-r REV] [FILE]...'),
2550 2550 )
2551 2551 def debugrename(ui, repo, *pats, **opts):
2552 2552 """dump rename information"""
2553 2553
2554 2554 opts = pycompat.byteskwargs(opts)
2555 2555 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2556 2556 m = scmutil.match(ctx, pats, opts)
2557 2557 for abs in ctx.walk(m):
2558 2558 fctx = ctx[abs]
2559 2559 o = fctx.filelog().renamed(fctx.filenode())
2560 2560 rel = repo.pathto(abs)
2561 2561 if o:
2562 2562 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2563 2563 else:
2564 2564 ui.write(_(b"%s not renamed\n") % rel)
2565 2565
2566 2566
2567 2567 @command(
2568 2568 b'debugrevlog',
2569 2569 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2570 2570 _(b'-c|-m|FILE'),
2571 2571 optionalrepo=True,
2572 2572 )
2573 2573 def debugrevlog(ui, repo, file_=None, **opts):
2574 2574 """show data and statistics about a revlog"""
2575 2575 opts = pycompat.byteskwargs(opts)
2576 2576 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2577 2577
2578 2578 if opts.get(b"dump"):
2579 2579 numrevs = len(r)
2580 2580 ui.write(
2581 2581 (
2582 2582 b"# rev p1rev p2rev start end deltastart base p1 p2"
2583 2583 b" rawsize totalsize compression heads chainlen\n"
2584 2584 )
2585 2585 )
2586 2586 ts = 0
2587 2587 heads = set()
2588 2588
2589 2589 for rev in pycompat.xrange(numrevs):
2590 2590 dbase = r.deltaparent(rev)
2591 2591 if dbase == -1:
2592 2592 dbase = rev
2593 2593 cbase = r.chainbase(rev)
2594 2594 clen = r.chainlen(rev)
2595 2595 p1, p2 = r.parentrevs(rev)
2596 2596 rs = r.rawsize(rev)
2597 2597 ts = ts + rs
2598 2598 heads -= set(r.parentrevs(rev))
2599 2599 heads.add(rev)
2600 2600 try:
2601 2601 compression = ts / r.end(rev)
2602 2602 except ZeroDivisionError:
2603 2603 compression = 0
2604 2604 ui.write(
2605 2605 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2606 2606 b"%11d %5d %8d\n"
2607 2607 % (
2608 2608 rev,
2609 2609 p1,
2610 2610 p2,
2611 2611 r.start(rev),
2612 2612 r.end(rev),
2613 2613 r.start(dbase),
2614 2614 r.start(cbase),
2615 2615 r.start(p1),
2616 2616 r.start(p2),
2617 2617 rs,
2618 2618 ts,
2619 2619 compression,
2620 2620 len(heads),
2621 2621 clen,
2622 2622 )
2623 2623 )
2624 2624 return 0
2625 2625
2626 2626 v = r.version
2627 2627 format = v & 0xFFFF
2628 2628 flags = []
2629 2629 gdelta = False
2630 2630 if v & revlog.FLAG_INLINE_DATA:
2631 2631 flags.append(b'inline')
2632 2632 if v & revlog.FLAG_GENERALDELTA:
2633 2633 gdelta = True
2634 2634 flags.append(b'generaldelta')
2635 2635 if not flags:
2636 2636 flags = [b'(none)']
2637 2637
2638 2638 ### tracks merge vs single parent
2639 2639 nummerges = 0
2640 2640
2641 2641 ### tracks ways the "delta" are build
2642 2642 # nodelta
2643 2643 numempty = 0
2644 2644 numemptytext = 0
2645 2645 numemptydelta = 0
2646 2646 # full file content
2647 2647 numfull = 0
2648 2648 # intermediate snapshot against a prior snapshot
2649 2649 numsemi = 0
2650 2650 # snapshot count per depth
2651 2651 numsnapdepth = collections.defaultdict(lambda: 0)
2652 2652 # delta against previous revision
2653 2653 numprev = 0
2654 2654 # delta against first or second parent (not prev)
2655 2655 nump1 = 0
2656 2656 nump2 = 0
2657 2657 # delta against neither prev nor parents
2658 2658 numother = 0
2659 2659 # delta against prev that are also first or second parent
2660 2660 # (details of `numprev`)
2661 2661 nump1prev = 0
2662 2662 nump2prev = 0
2663 2663
2664 2664 # data about delta chain of each revs
2665 2665 chainlengths = []
2666 2666 chainbases = []
2667 2667 chainspans = []
2668 2668
2669 2669 # data about each revision
2670 2670 datasize = [None, 0, 0]
2671 2671 fullsize = [None, 0, 0]
2672 2672 semisize = [None, 0, 0]
2673 2673 # snapshot count per depth
2674 2674 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2675 2675 deltasize = [None, 0, 0]
2676 2676 chunktypecounts = {}
2677 2677 chunktypesizes = {}
2678 2678
2679 2679 def addsize(size, l):
2680 2680 if l[0] is None or size < l[0]:
2681 2681 l[0] = size
2682 2682 if size > l[1]:
2683 2683 l[1] = size
2684 2684 l[2] += size
2685 2685
2686 2686 numrevs = len(r)
2687 2687 for rev in pycompat.xrange(numrevs):
2688 2688 p1, p2 = r.parentrevs(rev)
2689 2689 delta = r.deltaparent(rev)
2690 2690 if format > 0:
2691 2691 addsize(r.rawsize(rev), datasize)
2692 2692 if p2 != nullrev:
2693 2693 nummerges += 1
2694 2694 size = r.length(rev)
2695 2695 if delta == nullrev:
2696 2696 chainlengths.append(0)
2697 2697 chainbases.append(r.start(rev))
2698 2698 chainspans.append(size)
2699 2699 if size == 0:
2700 2700 numempty += 1
2701 2701 numemptytext += 1
2702 2702 else:
2703 2703 numfull += 1
2704 2704 numsnapdepth[0] += 1
2705 2705 addsize(size, fullsize)
2706 2706 addsize(size, snapsizedepth[0])
2707 2707 else:
2708 2708 chainlengths.append(chainlengths[delta] + 1)
2709 2709 baseaddr = chainbases[delta]
2710 2710 revaddr = r.start(rev)
2711 2711 chainbases.append(baseaddr)
2712 2712 chainspans.append((revaddr - baseaddr) + size)
2713 2713 if size == 0:
2714 2714 numempty += 1
2715 2715 numemptydelta += 1
2716 2716 elif r.issnapshot(rev):
2717 2717 addsize(size, semisize)
2718 2718 numsemi += 1
2719 2719 depth = r.snapshotdepth(rev)
2720 2720 numsnapdepth[depth] += 1
2721 2721 addsize(size, snapsizedepth[depth])
2722 2722 else:
2723 2723 addsize(size, deltasize)
2724 2724 if delta == rev - 1:
2725 2725 numprev += 1
2726 2726 if delta == p1:
2727 2727 nump1prev += 1
2728 2728 elif delta == p2:
2729 2729 nump2prev += 1
2730 2730 elif delta == p1:
2731 2731 nump1 += 1
2732 2732 elif delta == p2:
2733 2733 nump2 += 1
2734 2734 elif delta != nullrev:
2735 2735 numother += 1
2736 2736
2737 2737 # Obtain data on the raw chunks in the revlog.
2738 2738 if util.safehasattr(r, b'_getsegmentforrevs'):
2739 2739 segment = r._getsegmentforrevs(rev, rev)[1]
2740 2740 else:
2741 2741 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2742 2742 if segment:
2743 2743 chunktype = bytes(segment[0:1])
2744 2744 else:
2745 2745 chunktype = b'empty'
2746 2746
2747 2747 if chunktype not in chunktypecounts:
2748 2748 chunktypecounts[chunktype] = 0
2749 2749 chunktypesizes[chunktype] = 0
2750 2750
2751 2751 chunktypecounts[chunktype] += 1
2752 2752 chunktypesizes[chunktype] += size
2753 2753
2754 2754 # Adjust size min value for empty cases
2755 2755 for size in (datasize, fullsize, semisize, deltasize):
2756 2756 if size[0] is None:
2757 2757 size[0] = 0
2758 2758
2759 2759 numdeltas = numrevs - numfull - numempty - numsemi
2760 2760 numoprev = numprev - nump1prev - nump2prev
2761 2761 totalrawsize = datasize[2]
2762 2762 datasize[2] /= numrevs
2763 2763 fulltotal = fullsize[2]
2764 2764 if numfull == 0:
2765 2765 fullsize[2] = 0
2766 2766 else:
2767 2767 fullsize[2] /= numfull
2768 2768 semitotal = semisize[2]
2769 2769 snaptotal = {}
2770 2770 if numsemi > 0:
2771 2771 semisize[2] /= numsemi
2772 2772 for depth in snapsizedepth:
2773 2773 snaptotal[depth] = snapsizedepth[depth][2]
2774 2774 snapsizedepth[depth][2] /= numsnapdepth[depth]
2775 2775
2776 2776 deltatotal = deltasize[2]
2777 2777 if numdeltas > 0:
2778 2778 deltasize[2] /= numdeltas
2779 2779 totalsize = fulltotal + semitotal + deltatotal
2780 2780 avgchainlen = sum(chainlengths) / numrevs
2781 2781 maxchainlen = max(chainlengths)
2782 2782 maxchainspan = max(chainspans)
2783 2783 compratio = 1
2784 2784 if totalsize:
2785 2785 compratio = totalrawsize / totalsize
2786 2786
2787 2787 basedfmtstr = b'%%%dd\n'
2788 2788 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2789 2789
2790 2790 def dfmtstr(max):
2791 2791 return basedfmtstr % len(str(max))
2792 2792
2793 2793 def pcfmtstr(max, padding=0):
2794 2794 return basepcfmtstr % (len(str(max)), b' ' * padding)
2795 2795
2796 2796 def pcfmt(value, total):
2797 2797 if total:
2798 2798 return (value, 100 * float(value) / total)
2799 2799 else:
2800 2800 return value, 100.0
2801 2801
2802 2802 ui.writenoi18n(b'format : %d\n' % format)
2803 2803 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2804 2804
2805 2805 ui.write(b'\n')
2806 2806 fmt = pcfmtstr(totalsize)
2807 2807 fmt2 = dfmtstr(totalsize)
2808 2808 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2809 2809 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2810 2810 ui.writenoi18n(
2811 2811 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2812 2812 )
2813 2813 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2814 2814 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2815 2815 ui.writenoi18n(
2816 2816 b' text : '
2817 2817 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2818 2818 )
2819 2819 ui.writenoi18n(
2820 2820 b' delta : '
2821 2821 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2822 2822 )
2823 2823 ui.writenoi18n(
2824 2824 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2825 2825 )
2826 2826 for depth in sorted(numsnapdepth):
2827 2827 ui.write(
2828 2828 (b' lvl-%-3d : ' % depth)
2829 2829 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2830 2830 )
2831 2831 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2832 2832 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2833 2833 ui.writenoi18n(
2834 2834 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2835 2835 )
2836 2836 for depth in sorted(numsnapdepth):
2837 2837 ui.write(
2838 2838 (b' lvl-%-3d : ' % depth)
2839 2839 + fmt % pcfmt(snaptotal[depth], totalsize)
2840 2840 )
2841 2841 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2842 2842
2843 2843 def fmtchunktype(chunktype):
2844 2844 if chunktype == b'empty':
2845 2845 return b' %s : ' % chunktype
2846 2846 elif chunktype in pycompat.bytestr(string.ascii_letters):
2847 2847 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2848 2848 else:
2849 2849 return b' 0x%s : ' % hex(chunktype)
2850 2850
2851 2851 ui.write(b'\n')
2852 2852 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2853 2853 for chunktype in sorted(chunktypecounts):
2854 2854 ui.write(fmtchunktype(chunktype))
2855 2855 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2856 2856 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2857 2857 for chunktype in sorted(chunktypecounts):
2858 2858 ui.write(fmtchunktype(chunktype))
2859 2859 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2860 2860
2861 2861 ui.write(b'\n')
2862 2862 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2863 2863 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2864 2864 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2865 2865 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2866 2866 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2867 2867
2868 2868 if format > 0:
2869 2869 ui.write(b'\n')
2870 2870 ui.writenoi18n(
2871 2871 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2872 2872 % tuple(datasize)
2873 2873 )
2874 2874 ui.writenoi18n(
2875 2875 b'full revision size (min/max/avg) : %d / %d / %d\n'
2876 2876 % tuple(fullsize)
2877 2877 )
2878 2878 ui.writenoi18n(
2879 2879 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2880 2880 % tuple(semisize)
2881 2881 )
2882 2882 for depth in sorted(snapsizedepth):
2883 2883 if depth == 0:
2884 2884 continue
2885 2885 ui.writenoi18n(
2886 2886 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2887 2887 % ((depth,) + tuple(snapsizedepth[depth]))
2888 2888 )
2889 2889 ui.writenoi18n(
2890 2890 b'delta size (min/max/avg) : %d / %d / %d\n'
2891 2891 % tuple(deltasize)
2892 2892 )
2893 2893
2894 2894 if numdeltas > 0:
2895 2895 ui.write(b'\n')
2896 2896 fmt = pcfmtstr(numdeltas)
2897 2897 fmt2 = pcfmtstr(numdeltas, 4)
2898 2898 ui.writenoi18n(
2899 2899 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2900 2900 )
2901 2901 if numprev > 0:
2902 2902 ui.writenoi18n(
2903 2903 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2904 2904 )
2905 2905 ui.writenoi18n(
2906 2906 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2907 2907 )
2908 2908 ui.writenoi18n(
2909 2909 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2910 2910 )
2911 2911 if gdelta:
2912 2912 ui.writenoi18n(
2913 2913 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2914 2914 )
2915 2915 ui.writenoi18n(
2916 2916 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2917 2917 )
2918 2918 ui.writenoi18n(
2919 2919 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2920 2920 )
2921 2921
2922 2922
2923 2923 @command(
2924 2924 b'debugrevlogindex',
2925 2925 cmdutil.debugrevlogopts
2926 2926 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2927 2927 _(b'[-f FORMAT] -c|-m|FILE'),
2928 2928 optionalrepo=True,
2929 2929 )
2930 2930 def debugrevlogindex(ui, repo, file_=None, **opts):
2931 2931 """dump the contents of a revlog index"""
2932 2932 opts = pycompat.byteskwargs(opts)
2933 2933 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2934 2934 format = opts.get(b'format', 0)
2935 2935 if format not in (0, 1):
2936 2936 raise error.Abort(_(b"unknown format %d") % format)
2937 2937
2938 2938 if ui.debugflag:
2939 2939 shortfn = hex
2940 2940 else:
2941 2941 shortfn = short
2942 2942
2943 2943 # There might not be anything in r, so have a sane default
2944 2944 idlen = 12
2945 2945 for i in r:
2946 2946 idlen = len(shortfn(r.node(i)))
2947 2947 break
2948 2948
2949 2949 if format == 0:
2950 2950 if ui.verbose:
2951 2951 ui.writenoi18n(
2952 2952 b" rev offset length linkrev %s %s p2\n"
2953 2953 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2954 2954 )
2955 2955 else:
2956 2956 ui.writenoi18n(
2957 2957 b" rev linkrev %s %s p2\n"
2958 2958 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2959 2959 )
2960 2960 elif format == 1:
2961 2961 if ui.verbose:
2962 2962 ui.writenoi18n(
2963 2963 (
2964 2964 b" rev flag offset length size link p1"
2965 2965 b" p2 %s\n"
2966 2966 )
2967 2967 % b"nodeid".rjust(idlen)
2968 2968 )
2969 2969 else:
2970 2970 ui.writenoi18n(
2971 2971 b" rev flag size link p1 p2 %s\n"
2972 2972 % b"nodeid".rjust(idlen)
2973 2973 )
2974 2974
2975 2975 for i in r:
2976 2976 node = r.node(i)
2977 2977 if format == 0:
2978 2978 try:
2979 2979 pp = r.parents(node)
2980 2980 except Exception:
2981 2981 pp = [nullid, nullid]
2982 2982 if ui.verbose:
2983 2983 ui.write(
2984 2984 b"% 6d % 9d % 7d % 7d %s %s %s\n"
2985 2985 % (
2986 2986 i,
2987 2987 r.start(i),
2988 2988 r.length(i),
2989 2989 r.linkrev(i),
2990 2990 shortfn(node),
2991 2991 shortfn(pp[0]),
2992 2992 shortfn(pp[1]),
2993 2993 )
2994 2994 )
2995 2995 else:
2996 2996 ui.write(
2997 2997 b"% 6d % 7d %s %s %s\n"
2998 2998 % (
2999 2999 i,
3000 3000 r.linkrev(i),
3001 3001 shortfn(node),
3002 3002 shortfn(pp[0]),
3003 3003 shortfn(pp[1]),
3004 3004 )
3005 3005 )
3006 3006 elif format == 1:
3007 3007 pr = r.parentrevs(i)
3008 3008 if ui.verbose:
3009 3009 ui.write(
3010 3010 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3011 3011 % (
3012 3012 i,
3013 3013 r.flags(i),
3014 3014 r.start(i),
3015 3015 r.length(i),
3016 3016 r.rawsize(i),
3017 3017 r.linkrev(i),
3018 3018 pr[0],
3019 3019 pr[1],
3020 3020 shortfn(node),
3021 3021 )
3022 3022 )
3023 3023 else:
3024 3024 ui.write(
3025 3025 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3026 3026 % (
3027 3027 i,
3028 3028 r.flags(i),
3029 3029 r.rawsize(i),
3030 3030 r.linkrev(i),
3031 3031 pr[0],
3032 3032 pr[1],
3033 3033 shortfn(node),
3034 3034 )
3035 3035 )
3036 3036
3037 3037
3038 3038 @command(
3039 3039 b'debugrevspec',
3040 3040 [
3041 3041 (
3042 3042 b'',
3043 3043 b'optimize',
3044 3044 None,
3045 3045 _(b'print parsed tree after optimizing (DEPRECATED)'),
3046 3046 ),
3047 3047 (
3048 3048 b'',
3049 3049 b'show-revs',
3050 3050 True,
3051 3051 _(b'print list of result revisions (default)'),
3052 3052 ),
3053 3053 (
3054 3054 b's',
3055 3055 b'show-set',
3056 3056 None,
3057 3057 _(b'print internal representation of result set'),
3058 3058 ),
3059 3059 (
3060 3060 b'p',
3061 3061 b'show-stage',
3062 3062 [],
3063 3063 _(b'print parsed tree at the given stage'),
3064 3064 _(b'NAME'),
3065 3065 ),
3066 3066 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3067 3067 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3068 3068 ],
3069 3069 b'REVSPEC',
3070 3070 )
3071 3071 def debugrevspec(ui, repo, expr, **opts):
3072 3072 """parse and apply a revision specification
3073 3073
3074 3074 Use -p/--show-stage option to print the parsed tree at the given stages.
3075 3075 Use -p all to print tree at every stage.
3076 3076
3077 3077 Use --no-show-revs option with -s or -p to print only the set
3078 3078 representation or the parsed tree respectively.
3079 3079
3080 3080 Use --verify-optimized to compare the optimized result with the unoptimized
3081 3081 one. Returns 1 if the optimized result differs.
3082 3082 """
3083 3083 opts = pycompat.byteskwargs(opts)
3084 3084 aliases = ui.configitems(b'revsetalias')
3085 3085 stages = [
3086 3086 (b'parsed', lambda tree: tree),
3087 3087 (
3088 3088 b'expanded',
3089 3089 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3090 3090 ),
3091 3091 (b'concatenated', revsetlang.foldconcat),
3092 3092 (b'analyzed', revsetlang.analyze),
3093 3093 (b'optimized', revsetlang.optimize),
3094 3094 ]
3095 3095 if opts[b'no_optimized']:
3096 3096 stages = stages[:-1]
3097 3097 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3098 3098 raise error.Abort(
3099 3099 _(b'cannot use --verify-optimized with --no-optimized')
3100 3100 )
3101 3101 stagenames = set(n for n, f in stages)
3102 3102
3103 3103 showalways = set()
3104 3104 showchanged = set()
3105 3105 if ui.verbose and not opts[b'show_stage']:
3106 3106 # show parsed tree by --verbose (deprecated)
3107 3107 showalways.add(b'parsed')
3108 3108 showchanged.update([b'expanded', b'concatenated'])
3109 3109 if opts[b'optimize']:
3110 3110 showalways.add(b'optimized')
3111 3111 if opts[b'show_stage'] and opts[b'optimize']:
3112 3112 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3113 3113 if opts[b'show_stage'] == [b'all']:
3114 3114 showalways.update(stagenames)
3115 3115 else:
3116 3116 for n in opts[b'show_stage']:
3117 3117 if n not in stagenames:
3118 3118 raise error.Abort(_(b'invalid stage name: %s') % n)
3119 3119 showalways.update(opts[b'show_stage'])
3120 3120
3121 3121 treebystage = {}
3122 3122 printedtree = None
3123 3123 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3124 3124 for n, f in stages:
3125 3125 treebystage[n] = tree = f(tree)
3126 3126 if n in showalways or (n in showchanged and tree != printedtree):
3127 3127 if opts[b'show_stage'] or n != b'parsed':
3128 3128 ui.write(b"* %s:\n" % n)
3129 3129 ui.write(revsetlang.prettyformat(tree), b"\n")
3130 3130 printedtree = tree
3131 3131
3132 3132 if opts[b'verify_optimized']:
3133 3133 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3134 3134 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3135 3135 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3136 3136 ui.writenoi18n(
3137 3137 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3138 3138 )
3139 3139 ui.writenoi18n(
3140 3140 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3141 3141 )
3142 3142 arevs = list(arevs)
3143 3143 brevs = list(brevs)
3144 3144 if arevs == brevs:
3145 3145 return 0
3146 3146 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3147 3147 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3148 3148 sm = difflib.SequenceMatcher(None, arevs, brevs)
3149 3149 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3150 3150 if tag in (r'delete', r'replace'):
3151 3151 for c in arevs[alo:ahi]:
3152 3152 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3153 3153 if tag in (r'insert', r'replace'):
3154 3154 for c in brevs[blo:bhi]:
3155 3155 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3156 3156 if tag == r'equal':
3157 3157 for c in arevs[alo:ahi]:
3158 3158 ui.write(b' %d\n' % c)
3159 3159 return 1
3160 3160
3161 3161 func = revset.makematcher(tree)
3162 3162 revs = func(repo)
3163 3163 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3164 3164 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3165 3165 if not opts[b'show_revs']:
3166 3166 return
3167 3167 for c in revs:
3168 3168 ui.write(b"%d\n" % c)
3169 3169
3170 3170
3171 3171 @command(
3172 3172 b'debugserve',
3173 3173 [
3174 3174 (
3175 3175 b'',
3176 3176 b'sshstdio',
3177 3177 False,
3178 3178 _(b'run an SSH server bound to process handles'),
3179 3179 ),
3180 3180 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3181 3181 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3182 3182 ],
3183 3183 b'',
3184 3184 )
3185 3185 def debugserve(ui, repo, **opts):
3186 3186 """run a server with advanced settings
3187 3187
3188 3188 This command is similar to :hg:`serve`. It exists partially as a
3189 3189 workaround to the fact that ``hg serve --stdio`` must have specific
3190 3190 arguments for security reasons.
3191 3191 """
3192 3192 opts = pycompat.byteskwargs(opts)
3193 3193
3194 3194 if not opts[b'sshstdio']:
3195 3195 raise error.Abort(_(b'only --sshstdio is currently supported'))
3196 3196
3197 3197 logfh = None
3198 3198
3199 3199 if opts[b'logiofd'] and opts[b'logiofile']:
3200 3200 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3201 3201
3202 3202 if opts[b'logiofd']:
3203 3203 # Line buffered because output is line based.
3204 3204 try:
3205 3205 logfh = os.fdopen(int(opts[b'logiofd']), r'ab', 1)
3206 3206 except OSError as e:
3207 3207 if e.errno != errno.ESPIPE:
3208 3208 raise
3209 3209 # can't seek a pipe, so `ab` mode fails on py3
3210 3210 logfh = os.fdopen(int(opts[b'logiofd']), r'wb', 1)
3211 3211 elif opts[b'logiofile']:
3212 3212 logfh = open(opts[b'logiofile'], b'ab', 1)
3213 3213
3214 3214 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3215 3215 s.serve_forever()
3216 3216
3217 3217
3218 3218 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3219 3219 def debugsetparents(ui, repo, rev1, rev2=None):
3220 3220 """manually set the parents of the current working directory
3221 3221
3222 3222 This is useful for writing repository conversion tools, but should
3223 3223 be used with care. For example, neither the working directory nor the
3224 3224 dirstate is updated, so file status may be incorrect after running this
3225 3225 command.
3226 3226
3227 3227 Returns 0 on success.
3228 3228 """
3229 3229
3230 3230 node1 = scmutil.revsingle(repo, rev1).node()
3231 3231 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3232 3232
3233 3233 with repo.wlock():
3234 3234 repo.setparents(node1, node2)
3235 3235
3236 3236
3237 3237 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3238 3238 def debugsidedata(ui, repo, file_, rev=None, **opts):
3239 """dump the side data for a cl/manifest/file revision"""
3239 """dump the side data for a cl/manifest/file revision
3240
3241 Use --verbose to dump the sidedata content."""
3240 3242 opts = pycompat.byteskwargs(opts)
3241 3243 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3242 3244 if rev is not None:
3243 3245 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3244 3246 file_, rev = None, file_
3245 3247 elif rev is None:
3246 3248 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3247 3249 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3248 3250 r = getattr(r, '_revlog', r)
3249 3251 try:
3250 3252 sidedata = r.sidedata(r.lookup(rev))
3251 3253 except KeyError:
3252 3254 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3253 3255 if sidedata:
3254 3256 sidedata = list(sidedata.items())
3255 3257 sidedata.sort()
3256 3258 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3257 3259 for key, value in sidedata:
3258 3260 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3259 3261 if ui.verbose:
3260 3262 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3261 3263
3262 3264
3263 3265 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3264 3266 def debugssl(ui, repo, source=None, **opts):
3265 3267 '''test a secure connection to a server
3266 3268
3267 3269 This builds the certificate chain for the server on Windows, installing the
3268 3270 missing intermediates and trusted root via Windows Update if necessary. It
3269 3271 does nothing on other platforms.
3270 3272
3271 3273 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3272 3274 that server is used. See :hg:`help urls` for more information.
3273 3275
3274 3276 If the update succeeds, retry the original operation. Otherwise, the cause
3275 3277 of the SSL error is likely another issue.
3276 3278 '''
3277 3279 if not pycompat.iswindows:
3278 3280 raise error.Abort(
3279 3281 _(b'certificate chain building is only possible on Windows')
3280 3282 )
3281 3283
3282 3284 if not source:
3283 3285 if not repo:
3284 3286 raise error.Abort(
3285 3287 _(
3286 3288 b"there is no Mercurial repository here, and no "
3287 3289 b"server specified"
3288 3290 )
3289 3291 )
3290 3292 source = b"default"
3291 3293
3292 3294 source, branches = hg.parseurl(ui.expandpath(source))
3293 3295 url = util.url(source)
3294 3296
3295 3297 defaultport = {b'https': 443, b'ssh': 22}
3296 3298 if url.scheme in defaultport:
3297 3299 try:
3298 3300 addr = (url.host, int(url.port or defaultport[url.scheme]))
3299 3301 except ValueError:
3300 3302 raise error.Abort(_(b"malformed port number in URL"))
3301 3303 else:
3302 3304 raise error.Abort(_(b"only https and ssh connections are supported"))
3303 3305
3304 3306 from . import win32
3305 3307
3306 3308 s = ssl.wrap_socket(
3307 3309 socket.socket(),
3308 3310 ssl_version=ssl.PROTOCOL_TLS,
3309 3311 cert_reqs=ssl.CERT_NONE,
3310 3312 ca_certs=None,
3311 3313 )
3312 3314
3313 3315 try:
3314 3316 s.connect(addr)
3315 3317 cert = s.getpeercert(True)
3316 3318
3317 3319 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3318 3320
3319 3321 complete = win32.checkcertificatechain(cert, build=False)
3320 3322
3321 3323 if not complete:
3322 3324 ui.status(_(b'certificate chain is incomplete, updating... '))
3323 3325
3324 3326 if not win32.checkcertificatechain(cert):
3325 3327 ui.status(_(b'failed.\n'))
3326 3328 else:
3327 3329 ui.status(_(b'done.\n'))
3328 3330 else:
3329 3331 ui.status(_(b'full certificate chain is available\n'))
3330 3332 finally:
3331 3333 s.close()
3332 3334
3333 3335
3334 3336 @command(
3335 3337 b'debugsub',
3336 3338 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3337 3339 _(b'[-r REV] [REV]'),
3338 3340 )
3339 3341 def debugsub(ui, repo, rev=None):
3340 3342 ctx = scmutil.revsingle(repo, rev, None)
3341 3343 for k, v in sorted(ctx.substate.items()):
3342 3344 ui.writenoi18n(b'path %s\n' % k)
3343 3345 ui.writenoi18n(b' source %s\n' % v[0])
3344 3346 ui.writenoi18n(b' revision %s\n' % v[1])
3345 3347
3346 3348
3347 3349 @command(
3348 3350 b'debugsuccessorssets',
3349 3351 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3350 3352 _(b'[REV]'),
3351 3353 )
3352 3354 def debugsuccessorssets(ui, repo, *revs, **opts):
3353 3355 """show set of successors for revision
3354 3356
3355 3357 A successors set of changeset A is a consistent group of revisions that
3356 3358 succeed A. It contains non-obsolete changesets only unless closests
3357 3359 successors set is set.
3358 3360
3359 3361 In most cases a changeset A has a single successors set containing a single
3360 3362 successor (changeset A replaced by A').
3361 3363
3362 3364 A changeset that is made obsolete with no successors are called "pruned".
3363 3365 Such changesets have no successors sets at all.
3364 3366
3365 3367 A changeset that has been "split" will have a successors set containing
3366 3368 more than one successor.
3367 3369
3368 3370 A changeset that has been rewritten in multiple different ways is called
3369 3371 "divergent". Such changesets have multiple successor sets (each of which
3370 3372 may also be split, i.e. have multiple successors).
3371 3373
3372 3374 Results are displayed as follows::
3373 3375
3374 3376 <rev1>
3375 3377 <successors-1A>
3376 3378 <rev2>
3377 3379 <successors-2A>
3378 3380 <successors-2B1> <successors-2B2> <successors-2B3>
3379 3381
3380 3382 Here rev2 has two possible (i.e. divergent) successors sets. The first
3381 3383 holds one element, whereas the second holds three (i.e. the changeset has
3382 3384 been split).
3383 3385 """
3384 3386 # passed to successorssets caching computation from one call to another
3385 3387 cache = {}
3386 3388 ctx2str = bytes
3387 3389 node2str = short
3388 3390 for rev in scmutil.revrange(repo, revs):
3389 3391 ctx = repo[rev]
3390 3392 ui.write(b'%s\n' % ctx2str(ctx))
3391 3393 for succsset in obsutil.successorssets(
3392 3394 repo, ctx.node(), closest=opts[r'closest'], cache=cache
3393 3395 ):
3394 3396 if succsset:
3395 3397 ui.write(b' ')
3396 3398 ui.write(node2str(succsset[0]))
3397 3399 for node in succsset[1:]:
3398 3400 ui.write(b' ')
3399 3401 ui.write(node2str(node))
3400 3402 ui.write(b'\n')
3401 3403
3402 3404
3403 3405 @command(
3404 3406 b'debugtemplate',
3405 3407 [
3406 3408 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3407 3409 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3408 3410 ],
3409 3411 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3410 3412 optionalrepo=True,
3411 3413 )
3412 3414 def debugtemplate(ui, repo, tmpl, **opts):
3413 3415 """parse and apply a template
3414 3416
3415 3417 If -r/--rev is given, the template is processed as a log template and
3416 3418 applied to the given changesets. Otherwise, it is processed as a generic
3417 3419 template.
3418 3420
3419 3421 Use --verbose to print the parsed tree.
3420 3422 """
3421 3423 revs = None
3422 3424 if opts[r'rev']:
3423 3425 if repo is None:
3424 3426 raise error.RepoError(
3425 3427 _(b'there is no Mercurial repository here (.hg not found)')
3426 3428 )
3427 3429 revs = scmutil.revrange(repo, opts[r'rev'])
3428 3430
3429 3431 props = {}
3430 3432 for d in opts[r'define']:
3431 3433 try:
3432 3434 k, v = (e.strip() for e in d.split(b'=', 1))
3433 3435 if not k or k == b'ui':
3434 3436 raise ValueError
3435 3437 props[k] = v
3436 3438 except ValueError:
3437 3439 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3438 3440
3439 3441 if ui.verbose:
3440 3442 aliases = ui.configitems(b'templatealias')
3441 3443 tree = templater.parse(tmpl)
3442 3444 ui.note(templater.prettyformat(tree), b'\n')
3443 3445 newtree = templater.expandaliases(tree, aliases)
3444 3446 if newtree != tree:
3445 3447 ui.notenoi18n(
3446 3448 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3447 3449 )
3448 3450
3449 3451 if revs is None:
3450 3452 tres = formatter.templateresources(ui, repo)
3451 3453 t = formatter.maketemplater(ui, tmpl, resources=tres)
3452 3454 if ui.verbose:
3453 3455 kwds, funcs = t.symbolsuseddefault()
3454 3456 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3455 3457 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3456 3458 ui.write(t.renderdefault(props))
3457 3459 else:
3458 3460 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3459 3461 if ui.verbose:
3460 3462 kwds, funcs = displayer.t.symbolsuseddefault()
3461 3463 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3462 3464 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3463 3465 for r in revs:
3464 3466 displayer.show(repo[r], **pycompat.strkwargs(props))
3465 3467 displayer.close()
3466 3468
3467 3469
3468 3470 @command(
3469 3471 b'debuguigetpass',
3470 3472 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3471 3473 _(b'[-p TEXT]'),
3472 3474 norepo=True,
3473 3475 )
3474 3476 def debuguigetpass(ui, prompt=b''):
3475 3477 """show prompt to type password"""
3476 3478 r = ui.getpass(prompt)
3477 3479 ui.writenoi18n(b'respose: %s\n' % r)
3478 3480
3479 3481
3480 3482 @command(
3481 3483 b'debuguiprompt',
3482 3484 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3483 3485 _(b'[-p TEXT]'),
3484 3486 norepo=True,
3485 3487 )
3486 3488 def debuguiprompt(ui, prompt=b''):
3487 3489 """show plain prompt"""
3488 3490 r = ui.prompt(prompt)
3489 3491 ui.writenoi18n(b'response: %s\n' % r)
3490 3492
3491 3493
3492 3494 @command(b'debugupdatecaches', [])
3493 3495 def debugupdatecaches(ui, repo, *pats, **opts):
3494 3496 """warm all known caches in the repository"""
3495 3497 with repo.wlock(), repo.lock():
3496 3498 repo.updatecaches(full=True)
3497 3499
3498 3500
3499 3501 @command(
3500 3502 b'debugupgraderepo',
3501 3503 [
3502 3504 (
3503 3505 b'o',
3504 3506 b'optimize',
3505 3507 [],
3506 3508 _(b'extra optimization to perform'),
3507 3509 _(b'NAME'),
3508 3510 ),
3509 3511 (b'', b'run', False, _(b'performs an upgrade')),
3510 3512 (b'', b'backup', True, _(b'keep the old repository content around')),
3511 3513 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3512 3514 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3513 3515 ],
3514 3516 )
3515 3517 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3516 3518 """upgrade a repository to use different features
3517 3519
3518 3520 If no arguments are specified, the repository is evaluated for upgrade
3519 3521 and a list of problems and potential optimizations is printed.
3520 3522
3521 3523 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3522 3524 can be influenced via additional arguments. More details will be provided
3523 3525 by the command output when run without ``--run``.
3524 3526
3525 3527 During the upgrade, the repository will be locked and no writes will be
3526 3528 allowed.
3527 3529
3528 3530 At the end of the upgrade, the repository may not be readable while new
3529 3531 repository data is swapped in. This window will be as long as it takes to
3530 3532 rename some directories inside the ``.hg`` directory. On most machines, this
3531 3533 should complete almost instantaneously and the chances of a consumer being
3532 3534 unable to access the repository should be low.
3533 3535
3534 3536 By default, all revlog will be upgraded. You can restrict this using flag
3535 3537 such as `--manifest`:
3536 3538
3537 3539 * `--manifest`: only optimize the manifest
3538 3540 * `--no-manifest`: optimize all revlog but the manifest
3539 3541 * `--changelog`: optimize the changelog only
3540 3542 * `--no-changelog --no-manifest`: optimize filelogs only
3541 3543 """
3542 3544 return upgrade.upgraderepo(
3543 3545 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3544 3546 )
3545 3547
3546 3548
3547 3549 @command(
3548 3550 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3549 3551 )
3550 3552 def debugwalk(ui, repo, *pats, **opts):
3551 3553 """show how files match on given patterns"""
3552 3554 opts = pycompat.byteskwargs(opts)
3553 3555 m = scmutil.match(repo[None], pats, opts)
3554 3556 if ui.verbose:
3555 3557 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3556 3558 items = list(repo[None].walk(m))
3557 3559 if not items:
3558 3560 return
3559 3561 f = lambda fn: fn
3560 3562 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3561 3563 f = lambda fn: util.normpath(fn)
3562 3564 fmt = b'f %%-%ds %%-%ds %%s' % (
3563 3565 max([len(abs) for abs in items]),
3564 3566 max([len(repo.pathto(abs)) for abs in items]),
3565 3567 )
3566 3568 for abs in items:
3567 3569 line = fmt % (
3568 3570 abs,
3569 3571 f(repo.pathto(abs)),
3570 3572 m.exact(abs) and b'exact' or b'',
3571 3573 )
3572 3574 ui.write(b"%s\n" % line.rstrip())
3573 3575
3574 3576
3575 3577 @command(b'debugwhyunstable', [], _(b'REV'))
3576 3578 def debugwhyunstable(ui, repo, rev):
3577 3579 """explain instabilities of a changeset"""
3578 3580 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3579 3581 dnodes = b''
3580 3582 if entry.get(b'divergentnodes'):
3581 3583 dnodes = (
3582 3584 b' '.join(
3583 3585 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3584 3586 for ctx in entry[b'divergentnodes']
3585 3587 )
3586 3588 + b' '
3587 3589 )
3588 3590 ui.write(
3589 3591 b'%s: %s%s %s\n'
3590 3592 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3591 3593 )
3592 3594
3593 3595
3594 3596 @command(
3595 3597 b'debugwireargs',
3596 3598 [
3597 3599 (b'', b'three', b'', b'three'),
3598 3600 (b'', b'four', b'', b'four'),
3599 3601 (b'', b'five', b'', b'five'),
3600 3602 ]
3601 3603 + cmdutil.remoteopts,
3602 3604 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3603 3605 norepo=True,
3604 3606 )
3605 3607 def debugwireargs(ui, repopath, *vals, **opts):
3606 3608 opts = pycompat.byteskwargs(opts)
3607 3609 repo = hg.peer(ui, opts, repopath)
3608 3610 for opt in cmdutil.remoteopts:
3609 3611 del opts[opt[1]]
3610 3612 args = {}
3611 3613 for k, v in pycompat.iteritems(opts):
3612 3614 if v:
3613 3615 args[k] = v
3614 3616 args = pycompat.strkwargs(args)
3615 3617 # run twice to check that we don't mess up the stream for the next command
3616 3618 res1 = repo.debugwireargs(*vals, **args)
3617 3619 res2 = repo.debugwireargs(*vals, **args)
3618 3620 ui.write(b"%s\n" % res1)
3619 3621 if res1 != res2:
3620 3622 ui.warn(b"%s\n" % res2)
3621 3623
3622 3624
3623 3625 def _parsewirelangblocks(fh):
3624 3626 activeaction = None
3625 3627 blocklines = []
3626 3628 lastindent = 0
3627 3629
3628 3630 for line in fh:
3629 3631 line = line.rstrip()
3630 3632 if not line:
3631 3633 continue
3632 3634
3633 3635 if line.startswith(b'#'):
3634 3636 continue
3635 3637
3636 3638 if not line.startswith(b' '):
3637 3639 # New block. Flush previous one.
3638 3640 if activeaction:
3639 3641 yield activeaction, blocklines
3640 3642
3641 3643 activeaction = line
3642 3644 blocklines = []
3643 3645 lastindent = 0
3644 3646 continue
3645 3647
3646 3648 # Else we start with an indent.
3647 3649
3648 3650 if not activeaction:
3649 3651 raise error.Abort(_(b'indented line outside of block'))
3650 3652
3651 3653 indent = len(line) - len(line.lstrip())
3652 3654
3653 3655 # If this line is indented more than the last line, concatenate it.
3654 3656 if indent > lastindent and blocklines:
3655 3657 blocklines[-1] += line.lstrip()
3656 3658 else:
3657 3659 blocklines.append(line)
3658 3660 lastindent = indent
3659 3661
3660 3662 # Flush last block.
3661 3663 if activeaction:
3662 3664 yield activeaction, blocklines
3663 3665
3664 3666
3665 3667 @command(
3666 3668 b'debugwireproto',
3667 3669 [
3668 3670 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3669 3671 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3670 3672 (
3671 3673 b'',
3672 3674 b'noreadstderr',
3673 3675 False,
3674 3676 _(b'do not read from stderr of the remote'),
3675 3677 ),
3676 3678 (
3677 3679 b'',
3678 3680 b'nologhandshake',
3679 3681 False,
3680 3682 _(b'do not log I/O related to the peer handshake'),
3681 3683 ),
3682 3684 ]
3683 3685 + cmdutil.remoteopts,
3684 3686 _(b'[PATH]'),
3685 3687 optionalrepo=True,
3686 3688 )
3687 3689 def debugwireproto(ui, repo, path=None, **opts):
3688 3690 """send wire protocol commands to a server
3689 3691
3690 3692 This command can be used to issue wire protocol commands to remote
3691 3693 peers and to debug the raw data being exchanged.
3692 3694
3693 3695 ``--localssh`` will start an SSH server against the current repository
3694 3696 and connect to that. By default, the connection will perform a handshake
3695 3697 and establish an appropriate peer instance.
3696 3698
3697 3699 ``--peer`` can be used to bypass the handshake protocol and construct a
3698 3700 peer instance using the specified class type. Valid values are ``raw``,
3699 3701 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3700 3702 raw data payloads and don't support higher-level command actions.
3701 3703
3702 3704 ``--noreadstderr`` can be used to disable automatic reading from stderr
3703 3705 of the peer (for SSH connections only). Disabling automatic reading of
3704 3706 stderr is useful for making output more deterministic.
3705 3707
3706 3708 Commands are issued via a mini language which is specified via stdin.
3707 3709 The language consists of individual actions to perform. An action is
3708 3710 defined by a block. A block is defined as a line with no leading
3709 3711 space followed by 0 or more lines with leading space. Blocks are
3710 3712 effectively a high-level command with additional metadata.
3711 3713
3712 3714 Lines beginning with ``#`` are ignored.
3713 3715
3714 3716 The following sections denote available actions.
3715 3717
3716 3718 raw
3717 3719 ---
3718 3720
3719 3721 Send raw data to the server.
3720 3722
3721 3723 The block payload contains the raw data to send as one atomic send
3722 3724 operation. The data may not actually be delivered in a single system
3723 3725 call: it depends on the abilities of the transport being used.
3724 3726
3725 3727 Each line in the block is de-indented and concatenated. Then, that
3726 3728 value is evaluated as a Python b'' literal. This allows the use of
3727 3729 backslash escaping, etc.
3728 3730
3729 3731 raw+
3730 3732 ----
3731 3733
3732 3734 Behaves like ``raw`` except flushes output afterwards.
3733 3735
3734 3736 command <X>
3735 3737 -----------
3736 3738
3737 3739 Send a request to run a named command, whose name follows the ``command``
3738 3740 string.
3739 3741
3740 3742 Arguments to the command are defined as lines in this block. The format of
3741 3743 each line is ``<key> <value>``. e.g.::
3742 3744
3743 3745 command listkeys
3744 3746 namespace bookmarks
3745 3747
3746 3748 If the value begins with ``eval:``, it will be interpreted as a Python
3747 3749 literal expression. Otherwise values are interpreted as Python b'' literals.
3748 3750 This allows sending complex types and encoding special byte sequences via
3749 3751 backslash escaping.
3750 3752
3751 3753 The following arguments have special meaning:
3752 3754
3753 3755 ``PUSHFILE``
3754 3756 When defined, the *push* mechanism of the peer will be used instead
3755 3757 of the static request-response mechanism and the content of the
3756 3758 file specified in the value of this argument will be sent as the
3757 3759 command payload.
3758 3760
3759 3761 This can be used to submit a local bundle file to the remote.
3760 3762
3761 3763 batchbegin
3762 3764 ----------
3763 3765
3764 3766 Instruct the peer to begin a batched send.
3765 3767
3766 3768 All ``command`` blocks are queued for execution until the next
3767 3769 ``batchsubmit`` block.
3768 3770
3769 3771 batchsubmit
3770 3772 -----------
3771 3773
3772 3774 Submit previously queued ``command`` blocks as a batch request.
3773 3775
3774 3776 This action MUST be paired with a ``batchbegin`` action.
3775 3777
3776 3778 httprequest <method> <path>
3777 3779 ---------------------------
3778 3780
3779 3781 (HTTP peer only)
3780 3782
3781 3783 Send an HTTP request to the peer.
3782 3784
3783 3785 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3784 3786
3785 3787 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3786 3788 headers to add to the request. e.g. ``Accept: foo``.
3787 3789
3788 3790 The following arguments are special:
3789 3791
3790 3792 ``BODYFILE``
3791 3793 The content of the file defined as the value to this argument will be
3792 3794 transferred verbatim as the HTTP request body.
3793 3795
3794 3796 ``frame <type> <flags> <payload>``
3795 3797 Send a unified protocol frame as part of the request body.
3796 3798
3797 3799 All frames will be collected and sent as the body to the HTTP
3798 3800 request.
3799 3801
3800 3802 close
3801 3803 -----
3802 3804
3803 3805 Close the connection to the server.
3804 3806
3805 3807 flush
3806 3808 -----
3807 3809
3808 3810 Flush data written to the server.
3809 3811
3810 3812 readavailable
3811 3813 -------------
3812 3814
3813 3815 Close the write end of the connection and read all available data from
3814 3816 the server.
3815 3817
3816 3818 If the connection to the server encompasses multiple pipes, we poll both
3817 3819 pipes and read available data.
3818 3820
3819 3821 readline
3820 3822 --------
3821 3823
3822 3824 Read a line of output from the server. If there are multiple output
3823 3825 pipes, reads only the main pipe.
3824 3826
3825 3827 ereadline
3826 3828 ---------
3827 3829
3828 3830 Like ``readline``, but read from the stderr pipe, if available.
3829 3831
3830 3832 read <X>
3831 3833 --------
3832 3834
3833 3835 ``read()`` N bytes from the server's main output pipe.
3834 3836
3835 3837 eread <X>
3836 3838 ---------
3837 3839
3838 3840 ``read()`` N bytes from the server's stderr pipe, if available.
3839 3841
3840 3842 Specifying Unified Frame-Based Protocol Frames
3841 3843 ----------------------------------------------
3842 3844
3843 3845 It is possible to emit a *Unified Frame-Based Protocol* by using special
3844 3846 syntax.
3845 3847
3846 3848 A frame is composed as a type, flags, and payload. These can be parsed
3847 3849 from a string of the form:
3848 3850
3849 3851 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3850 3852
3851 3853 ``request-id`` and ``stream-id`` are integers defining the request and
3852 3854 stream identifiers.
3853 3855
3854 3856 ``type`` can be an integer value for the frame type or the string name
3855 3857 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3856 3858 ``command-name``.
3857 3859
3858 3860 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3859 3861 components. Each component (and there can be just one) can be an integer
3860 3862 or a flag name for stream flags or frame flags, respectively. Values are
3861 3863 resolved to integers and then bitwise OR'd together.
3862 3864
3863 3865 ``payload`` represents the raw frame payload. If it begins with
3864 3866 ``cbor:``, the following string is evaluated as Python code and the
3865 3867 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3866 3868 as a Python byte string literal.
3867 3869 """
3868 3870 opts = pycompat.byteskwargs(opts)
3869 3871
3870 3872 if opts[b'localssh'] and not repo:
3871 3873 raise error.Abort(_(b'--localssh requires a repository'))
3872 3874
3873 3875 if opts[b'peer'] and opts[b'peer'] not in (
3874 3876 b'raw',
3875 3877 b'http2',
3876 3878 b'ssh1',
3877 3879 b'ssh2',
3878 3880 ):
3879 3881 raise error.Abort(
3880 3882 _(b'invalid value for --peer'),
3881 3883 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3882 3884 )
3883 3885
3884 3886 if path and opts[b'localssh']:
3885 3887 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3886 3888
3887 3889 if ui.interactive():
3888 3890 ui.write(_(b'(waiting for commands on stdin)\n'))
3889 3891
3890 3892 blocks = list(_parsewirelangblocks(ui.fin))
3891 3893
3892 3894 proc = None
3893 3895 stdin = None
3894 3896 stdout = None
3895 3897 stderr = None
3896 3898 opener = None
3897 3899
3898 3900 if opts[b'localssh']:
3899 3901 # We start the SSH server in its own process so there is process
3900 3902 # separation. This prevents a whole class of potential bugs around
3901 3903 # shared state from interfering with server operation.
3902 3904 args = procutil.hgcmd() + [
3903 3905 b'-R',
3904 3906 repo.root,
3905 3907 b'debugserve',
3906 3908 b'--sshstdio',
3907 3909 ]
3908 3910 proc = subprocess.Popen(
3909 3911 pycompat.rapply(procutil.tonativestr, args),
3910 3912 stdin=subprocess.PIPE,
3911 3913 stdout=subprocess.PIPE,
3912 3914 stderr=subprocess.PIPE,
3913 3915 bufsize=0,
3914 3916 )
3915 3917
3916 3918 stdin = proc.stdin
3917 3919 stdout = proc.stdout
3918 3920 stderr = proc.stderr
3919 3921
3920 3922 # We turn the pipes into observers so we can log I/O.
3921 3923 if ui.verbose or opts[b'peer'] == b'raw':
3922 3924 stdin = util.makeloggingfileobject(
3923 3925 ui, proc.stdin, b'i', logdata=True
3924 3926 )
3925 3927 stdout = util.makeloggingfileobject(
3926 3928 ui, proc.stdout, b'o', logdata=True
3927 3929 )
3928 3930 stderr = util.makeloggingfileobject(
3929 3931 ui, proc.stderr, b'e', logdata=True
3930 3932 )
3931 3933
3932 3934 # --localssh also implies the peer connection settings.
3933 3935
3934 3936 url = b'ssh://localserver'
3935 3937 autoreadstderr = not opts[b'noreadstderr']
3936 3938
3937 3939 if opts[b'peer'] == b'ssh1':
3938 3940 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3939 3941 peer = sshpeer.sshv1peer(
3940 3942 ui,
3941 3943 url,
3942 3944 proc,
3943 3945 stdin,
3944 3946 stdout,
3945 3947 stderr,
3946 3948 None,
3947 3949 autoreadstderr=autoreadstderr,
3948 3950 )
3949 3951 elif opts[b'peer'] == b'ssh2':
3950 3952 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3951 3953 peer = sshpeer.sshv2peer(
3952 3954 ui,
3953 3955 url,
3954 3956 proc,
3955 3957 stdin,
3956 3958 stdout,
3957 3959 stderr,
3958 3960 None,
3959 3961 autoreadstderr=autoreadstderr,
3960 3962 )
3961 3963 elif opts[b'peer'] == b'raw':
3962 3964 ui.write(_(b'using raw connection to peer\n'))
3963 3965 peer = None
3964 3966 else:
3965 3967 ui.write(_(b'creating ssh peer from handshake results\n'))
3966 3968 peer = sshpeer.makepeer(
3967 3969 ui,
3968 3970 url,
3969 3971 proc,
3970 3972 stdin,
3971 3973 stdout,
3972 3974 stderr,
3973 3975 autoreadstderr=autoreadstderr,
3974 3976 )
3975 3977
3976 3978 elif path:
3977 3979 # We bypass hg.peer() so we can proxy the sockets.
3978 3980 # TODO consider not doing this because we skip
3979 3981 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3980 3982 u = util.url(path)
3981 3983 if u.scheme != b'http':
3982 3984 raise error.Abort(_(b'only http:// paths are currently supported'))
3983 3985
3984 3986 url, authinfo = u.authinfo()
3985 3987 openerargs = {
3986 3988 r'useragent': b'Mercurial debugwireproto',
3987 3989 }
3988 3990
3989 3991 # Turn pipes/sockets into observers so we can log I/O.
3990 3992 if ui.verbose:
3991 3993 openerargs.update(
3992 3994 {
3993 3995 r'loggingfh': ui,
3994 3996 r'loggingname': b's',
3995 3997 r'loggingopts': {r'logdata': True, r'logdataapis': False,},
3996 3998 }
3997 3999 )
3998 4000
3999 4001 if ui.debugflag:
4000 4002 openerargs[r'loggingopts'][r'logdataapis'] = True
4001 4003
4002 4004 # Don't send default headers when in raw mode. This allows us to
4003 4005 # bypass most of the behavior of our URL handling code so we can
4004 4006 # have near complete control over what's sent on the wire.
4005 4007 if opts[b'peer'] == b'raw':
4006 4008 openerargs[r'sendaccept'] = False
4007 4009
4008 4010 opener = urlmod.opener(ui, authinfo, **openerargs)
4009 4011
4010 4012 if opts[b'peer'] == b'http2':
4011 4013 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4012 4014 # We go through makepeer() because we need an API descriptor for
4013 4015 # the peer instance to be useful.
4014 4016 with ui.configoverride(
4015 4017 {(b'experimental', b'httppeer.advertise-v2'): True}
4016 4018 ):
4017 4019 if opts[b'nologhandshake']:
4018 4020 ui.pushbuffer()
4019 4021
4020 4022 peer = httppeer.makepeer(ui, path, opener=opener)
4021 4023
4022 4024 if opts[b'nologhandshake']:
4023 4025 ui.popbuffer()
4024 4026
4025 4027 if not isinstance(peer, httppeer.httpv2peer):
4026 4028 raise error.Abort(
4027 4029 _(
4028 4030 b'could not instantiate HTTP peer for '
4029 4031 b'wire protocol version 2'
4030 4032 ),
4031 4033 hint=_(
4032 4034 b'the server may not have the feature '
4033 4035 b'enabled or is not allowing this '
4034 4036 b'client version'
4035 4037 ),
4036 4038 )
4037 4039
4038 4040 elif opts[b'peer'] == b'raw':
4039 4041 ui.write(_(b'using raw connection to peer\n'))
4040 4042 peer = None
4041 4043 elif opts[b'peer']:
4042 4044 raise error.Abort(
4043 4045 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4044 4046 )
4045 4047 else:
4046 4048 peer = httppeer.makepeer(ui, path, opener=opener)
4047 4049
4048 4050 # We /could/ populate stdin/stdout with sock.makefile()...
4049 4051 else:
4050 4052 raise error.Abort(_(b'unsupported connection configuration'))
4051 4053
4052 4054 batchedcommands = None
4053 4055
4054 4056 # Now perform actions based on the parsed wire language instructions.
4055 4057 for action, lines in blocks:
4056 4058 if action in (b'raw', b'raw+'):
4057 4059 if not stdin:
4058 4060 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4059 4061
4060 4062 # Concatenate the data together.
4061 4063 data = b''.join(l.lstrip() for l in lines)
4062 4064 data = stringutil.unescapestr(data)
4063 4065 stdin.write(data)
4064 4066
4065 4067 if action == b'raw+':
4066 4068 stdin.flush()
4067 4069 elif action == b'flush':
4068 4070 if not stdin:
4069 4071 raise error.Abort(_(b'cannot call flush on this peer'))
4070 4072 stdin.flush()
4071 4073 elif action.startswith(b'command'):
4072 4074 if not peer:
4073 4075 raise error.Abort(
4074 4076 _(
4075 4077 b'cannot send commands unless peer instance '
4076 4078 b'is available'
4077 4079 )
4078 4080 )
4079 4081
4080 4082 command = action.split(b' ', 1)[1]
4081 4083
4082 4084 args = {}
4083 4085 for line in lines:
4084 4086 # We need to allow empty values.
4085 4087 fields = line.lstrip().split(b' ', 1)
4086 4088 if len(fields) == 1:
4087 4089 key = fields[0]
4088 4090 value = b''
4089 4091 else:
4090 4092 key, value = fields
4091 4093
4092 4094 if value.startswith(b'eval:'):
4093 4095 value = stringutil.evalpythonliteral(value[5:])
4094 4096 else:
4095 4097 value = stringutil.unescapestr(value)
4096 4098
4097 4099 args[key] = value
4098 4100
4099 4101 if batchedcommands is not None:
4100 4102 batchedcommands.append((command, args))
4101 4103 continue
4102 4104
4103 4105 ui.status(_(b'sending %s command\n') % command)
4104 4106
4105 4107 if b'PUSHFILE' in args:
4106 4108 with open(args[b'PUSHFILE'], r'rb') as fh:
4107 4109 del args[b'PUSHFILE']
4108 4110 res, output = peer._callpush(
4109 4111 command, fh, **pycompat.strkwargs(args)
4110 4112 )
4111 4113 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4112 4114 ui.status(
4113 4115 _(b'remote output: %s\n') % stringutil.escapestr(output)
4114 4116 )
4115 4117 else:
4116 4118 with peer.commandexecutor() as e:
4117 4119 res = e.callcommand(command, args).result()
4118 4120
4119 4121 if isinstance(res, wireprotov2peer.commandresponse):
4120 4122 val = res.objects()
4121 4123 ui.status(
4122 4124 _(b'response: %s\n')
4123 4125 % stringutil.pprint(val, bprefix=True, indent=2)
4124 4126 )
4125 4127 else:
4126 4128 ui.status(
4127 4129 _(b'response: %s\n')
4128 4130 % stringutil.pprint(res, bprefix=True, indent=2)
4129 4131 )
4130 4132
4131 4133 elif action == b'batchbegin':
4132 4134 if batchedcommands is not None:
4133 4135 raise error.Abort(_(b'nested batchbegin not allowed'))
4134 4136
4135 4137 batchedcommands = []
4136 4138 elif action == b'batchsubmit':
4137 4139 # There is a batching API we could go through. But it would be
4138 4140 # difficult to normalize requests into function calls. It is easier
4139 4141 # to bypass this layer and normalize to commands + args.
4140 4142 ui.status(
4141 4143 _(b'sending batch with %d sub-commands\n')
4142 4144 % len(batchedcommands)
4143 4145 )
4144 4146 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4145 4147 ui.status(
4146 4148 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4147 4149 )
4148 4150
4149 4151 batchedcommands = None
4150 4152
4151 4153 elif action.startswith(b'httprequest '):
4152 4154 if not opener:
4153 4155 raise error.Abort(
4154 4156 _(b'cannot use httprequest without an HTTP peer')
4155 4157 )
4156 4158
4157 4159 request = action.split(b' ', 2)
4158 4160 if len(request) != 3:
4159 4161 raise error.Abort(
4160 4162 _(
4161 4163 b'invalid httprequest: expected format is '
4162 4164 b'"httprequest <method> <path>'
4163 4165 )
4164 4166 )
4165 4167
4166 4168 method, httppath = request[1:]
4167 4169 headers = {}
4168 4170 body = None
4169 4171 frames = []
4170 4172 for line in lines:
4171 4173 line = line.lstrip()
4172 4174 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4173 4175 if m:
4174 4176 # Headers need to use native strings.
4175 4177 key = pycompat.strurl(m.group(1))
4176 4178 value = pycompat.strurl(m.group(2))
4177 4179 headers[key] = value
4178 4180 continue
4179 4181
4180 4182 if line.startswith(b'BODYFILE '):
4181 4183 with open(line.split(b' ', 1), b'rb') as fh:
4182 4184 body = fh.read()
4183 4185 elif line.startswith(b'frame '):
4184 4186 frame = wireprotoframing.makeframefromhumanstring(
4185 4187 line[len(b'frame ') :]
4186 4188 )
4187 4189
4188 4190 frames.append(frame)
4189 4191 else:
4190 4192 raise error.Abort(
4191 4193 _(b'unknown argument to httprequest: %s') % line
4192 4194 )
4193 4195
4194 4196 url = path + httppath
4195 4197
4196 4198 if frames:
4197 4199 body = b''.join(bytes(f) for f in frames)
4198 4200
4199 4201 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4200 4202
4201 4203 # urllib.Request insists on using has_data() as a proxy for
4202 4204 # determining the request method. Override that to use our
4203 4205 # explicitly requested method.
4204 4206 req.get_method = lambda: pycompat.sysstr(method)
4205 4207
4206 4208 try:
4207 4209 res = opener.open(req)
4208 4210 body = res.read()
4209 4211 except util.urlerr.urlerror as e:
4210 4212 # read() method must be called, but only exists in Python 2
4211 4213 getattr(e, 'read', lambda: None)()
4212 4214 continue
4213 4215
4214 4216 ct = res.headers.get(r'Content-Type')
4215 4217 if ct == r'application/mercurial-cbor':
4216 4218 ui.write(
4217 4219 _(b'cbor> %s\n')
4218 4220 % stringutil.pprint(
4219 4221 cborutil.decodeall(body), bprefix=True, indent=2
4220 4222 )
4221 4223 )
4222 4224
4223 4225 elif action == b'close':
4224 4226 peer.close()
4225 4227 elif action == b'readavailable':
4226 4228 if not stdout or not stderr:
4227 4229 raise error.Abort(
4228 4230 _(b'readavailable not available on this peer')
4229 4231 )
4230 4232
4231 4233 stdin.close()
4232 4234 stdout.read()
4233 4235 stderr.read()
4234 4236
4235 4237 elif action == b'readline':
4236 4238 if not stdout:
4237 4239 raise error.Abort(_(b'readline not available on this peer'))
4238 4240 stdout.readline()
4239 4241 elif action == b'ereadline':
4240 4242 if not stderr:
4241 4243 raise error.Abort(_(b'ereadline not available on this peer'))
4242 4244 stderr.readline()
4243 4245 elif action.startswith(b'read '):
4244 4246 count = int(action.split(b' ', 1)[1])
4245 4247 if not stdout:
4246 4248 raise error.Abort(_(b'read not available on this peer'))
4247 4249 stdout.read(count)
4248 4250 elif action.startswith(b'eread '):
4249 4251 count = int(action.split(b' ', 1)[1])
4250 4252 if not stderr:
4251 4253 raise error.Abort(_(b'eread not available on this peer'))
4252 4254 stderr.read(count)
4253 4255 else:
4254 4256 raise error.Abort(_(b'unknown action: %s') % action)
4255 4257
4256 4258 if batchedcommands is not None:
4257 4259 raise error.Abort(_(b'unclosed "batchbegin" request'))
4258 4260
4259 4261 if peer:
4260 4262 peer.close()
4261 4263
4262 4264 if proc:
4263 4265 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now