##// END OF EJS Templates
debugcommands: move away from line buffered output on binary stream...
Gregory Szorc -
r44582:52f8b07a default
parent child Browse files
Show More
@@ -1,4285 +1,4288 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import codecs
11 11 import collections
12 12 import difflib
13 13 import errno
14 14 import operator
15 15 import os
16 16 import random
17 17 import re
18 18 import socket
19 19 import ssl
20 20 import stat
21 21 import string
22 22 import subprocess
23 23 import sys
24 24 import time
25 25
26 26 from .i18n import _
27 27 from .node import (
28 28 bin,
29 29 hex,
30 30 nullhex,
31 31 nullid,
32 32 nullrev,
33 33 short,
34 34 )
35 35 from .pycompat import (
36 36 getattr,
37 37 open,
38 38 )
39 39 from . import (
40 40 bundle2,
41 41 changegroup,
42 42 cmdutil,
43 43 color,
44 44 context,
45 45 copies,
46 46 dagparser,
47 47 encoding,
48 48 error,
49 49 exchange,
50 50 extensions,
51 51 filemerge,
52 52 filesetlang,
53 53 formatter,
54 54 hg,
55 55 httppeer,
56 56 localrepo,
57 57 lock as lockmod,
58 58 logcmdutil,
59 59 merge as mergemod,
60 60 obsolete,
61 61 obsutil,
62 62 pathutil,
63 63 phases,
64 64 policy,
65 65 pvec,
66 66 pycompat,
67 67 registrar,
68 68 repair,
69 69 revlog,
70 70 revset,
71 71 revsetlang,
72 72 scmutil,
73 73 setdiscovery,
74 74 simplemerge,
75 75 sshpeer,
76 76 sslutil,
77 77 streamclone,
78 78 templater,
79 79 treediscovery,
80 80 upgrade,
81 81 url as urlmod,
82 82 util,
83 83 vfs as vfsmod,
84 84 wireprotoframing,
85 85 wireprotoserver,
86 86 wireprotov2peer,
87 87 )
88 88 from .utils import (
89 89 cborutil,
90 90 compression,
91 91 dateutil,
92 92 procutil,
93 93 stringutil,
94 94 )
95 95
96 96 from .revlogutils import deltas as deltautil
97 97
98 98 release = lockmod.release
99 99
100 100 command = registrar.command()
101 101
102 102
103 103 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
104 104 def debugancestor(ui, repo, *args):
105 105 """find the ancestor revision of two revisions in a given index"""
106 106 if len(args) == 3:
107 107 index, rev1, rev2 = args
108 108 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
109 109 lookup = r.lookup
110 110 elif len(args) == 2:
111 111 if not repo:
112 112 raise error.Abort(
113 113 _(b'there is no Mercurial repository here (.hg not found)')
114 114 )
115 115 rev1, rev2 = args
116 116 r = repo.changelog
117 117 lookup = repo.lookup
118 118 else:
119 119 raise error.Abort(_(b'either two or three arguments required'))
120 120 a = r.ancestor(lookup(rev1), lookup(rev2))
121 121 ui.write(b'%d:%s\n' % (r.rev(a), hex(a)))
122 122
123 123
124 124 @command(b'debugapplystreamclonebundle', [], b'FILE')
125 125 def debugapplystreamclonebundle(ui, repo, fname):
126 126 """apply a stream clone bundle file"""
127 127 f = hg.openpath(ui, fname)
128 128 gen = exchange.readbundle(ui, f, fname)
129 129 gen.apply(repo)
130 130
131 131
132 132 @command(
133 133 b'debugbuilddag',
134 134 [
135 135 (
136 136 b'm',
137 137 b'mergeable-file',
138 138 None,
139 139 _(b'add single file mergeable changes'),
140 140 ),
141 141 (
142 142 b'o',
143 143 b'overwritten-file',
144 144 None,
145 145 _(b'add single file all revs overwrite'),
146 146 ),
147 147 (b'n', b'new-file', None, _(b'add new file at each rev')),
148 148 ],
149 149 _(b'[OPTION]... [TEXT]'),
150 150 )
151 151 def debugbuilddag(
152 152 ui,
153 153 repo,
154 154 text=None,
155 155 mergeable_file=False,
156 156 overwritten_file=False,
157 157 new_file=False,
158 158 ):
159 159 """builds a repo with a given DAG from scratch in the current empty repo
160 160
161 161 The description of the DAG is read from stdin if not given on the
162 162 command line.
163 163
164 164 Elements:
165 165
166 166 - "+n" is a linear run of n nodes based on the current default parent
167 167 - "." is a single node based on the current default parent
168 168 - "$" resets the default parent to null (implied at the start);
169 169 otherwise the default parent is always the last node created
170 170 - "<p" sets the default parent to the backref p
171 171 - "*p" is a fork at parent p, which is a backref
172 172 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
173 173 - "/p2" is a merge of the preceding node and p2
174 174 - ":tag" defines a local tag for the preceding node
175 175 - "@branch" sets the named branch for subsequent nodes
176 176 - "#...\\n" is a comment up to the end of the line
177 177
178 178 Whitespace between the above elements is ignored.
179 179
180 180 A backref is either
181 181
182 182 - a number n, which references the node curr-n, where curr is the current
183 183 node, or
184 184 - the name of a local tag you placed earlier using ":tag", or
185 185 - empty to denote the default parent.
186 186
187 187 All string valued-elements are either strictly alphanumeric, or must
188 188 be enclosed in double quotes ("..."), with "\\" as escape character.
189 189 """
190 190
191 191 if text is None:
192 192 ui.status(_(b"reading DAG from stdin\n"))
193 193 text = ui.fin.read()
194 194
195 195 cl = repo.changelog
196 196 if len(cl) > 0:
197 197 raise error.Abort(_(b'repository is not empty'))
198 198
199 199 # determine number of revs in DAG
200 200 total = 0
201 201 for type, data in dagparser.parsedag(text):
202 202 if type == b'n':
203 203 total += 1
204 204
205 205 if mergeable_file:
206 206 linesperrev = 2
207 207 # make a file with k lines per rev
208 208 initialmergedlines = [
209 209 b'%d' % i for i in pycompat.xrange(0, total * linesperrev)
210 210 ]
211 211 initialmergedlines.append(b"")
212 212
213 213 tags = []
214 214 progress = ui.makeprogress(
215 215 _(b'building'), unit=_(b'revisions'), total=total
216 216 )
217 217 with progress, repo.wlock(), repo.lock(), repo.transaction(b"builddag"):
218 218 at = -1
219 219 atbranch = b'default'
220 220 nodeids = []
221 221 id = 0
222 222 progress.update(id)
223 223 for type, data in dagparser.parsedag(text):
224 224 if type == b'n':
225 225 ui.note((b'node %s\n' % pycompat.bytestr(data)))
226 226 id, ps = data
227 227
228 228 files = []
229 229 filecontent = {}
230 230
231 231 p2 = None
232 232 if mergeable_file:
233 233 fn = b"mf"
234 234 p1 = repo[ps[0]]
235 235 if len(ps) > 1:
236 236 p2 = repo[ps[1]]
237 237 pa = p1.ancestor(p2)
238 238 base, local, other = [
239 239 x[fn].data() for x in (pa, p1, p2)
240 240 ]
241 241 m3 = simplemerge.Merge3Text(base, local, other)
242 242 ml = [l.strip() for l in m3.merge_lines()]
243 243 ml.append(b"")
244 244 elif at > 0:
245 245 ml = p1[fn].data().split(b"\n")
246 246 else:
247 247 ml = initialmergedlines
248 248 ml[id * linesperrev] += b" r%i" % id
249 249 mergedtext = b"\n".join(ml)
250 250 files.append(fn)
251 251 filecontent[fn] = mergedtext
252 252
253 253 if overwritten_file:
254 254 fn = b"of"
255 255 files.append(fn)
256 256 filecontent[fn] = b"r%i\n" % id
257 257
258 258 if new_file:
259 259 fn = b"nf%i" % id
260 260 files.append(fn)
261 261 filecontent[fn] = b"r%i\n" % id
262 262 if len(ps) > 1:
263 263 if not p2:
264 264 p2 = repo[ps[1]]
265 265 for fn in p2:
266 266 if fn.startswith(b"nf"):
267 267 files.append(fn)
268 268 filecontent[fn] = p2[fn].data()
269 269
270 270 def fctxfn(repo, cx, path):
271 271 if path in filecontent:
272 272 return context.memfilectx(
273 273 repo, cx, path, filecontent[path]
274 274 )
275 275 return None
276 276
277 277 if len(ps) == 0 or ps[0] < 0:
278 278 pars = [None, None]
279 279 elif len(ps) == 1:
280 280 pars = [nodeids[ps[0]], None]
281 281 else:
282 282 pars = [nodeids[p] for p in ps]
283 283 cx = context.memctx(
284 284 repo,
285 285 pars,
286 286 b"r%i" % id,
287 287 files,
288 288 fctxfn,
289 289 date=(id, 0),
290 290 user=b"debugbuilddag",
291 291 extra={b'branch': atbranch},
292 292 )
293 293 nodeid = repo.commitctx(cx)
294 294 nodeids.append(nodeid)
295 295 at = id
296 296 elif type == b'l':
297 297 id, name = data
298 298 ui.note((b'tag %s\n' % name))
299 299 tags.append(b"%s %s\n" % (hex(repo.changelog.node(id)), name))
300 300 elif type == b'a':
301 301 ui.note((b'branch %s\n' % data))
302 302 atbranch = data
303 303 progress.update(id)
304 304
305 305 if tags:
306 306 repo.vfs.write(b"localtags", b"".join(tags))
307 307
308 308
309 309 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
310 310 indent_string = b' ' * indent
311 311 if all:
312 312 ui.writenoi18n(
313 313 b"%sformat: id, p1, p2, cset, delta base, len(delta)\n"
314 314 % indent_string
315 315 )
316 316
317 317 def showchunks(named):
318 318 ui.write(b"\n%s%s\n" % (indent_string, named))
319 319 for deltadata in gen.deltaiter():
320 320 node, p1, p2, cs, deltabase, delta, flags = deltadata
321 321 ui.write(
322 322 b"%s%s %s %s %s %s %d\n"
323 323 % (
324 324 indent_string,
325 325 hex(node),
326 326 hex(p1),
327 327 hex(p2),
328 328 hex(cs),
329 329 hex(deltabase),
330 330 len(delta),
331 331 )
332 332 )
333 333
334 334 gen.changelogheader()
335 335 showchunks(b"changelog")
336 336 gen.manifestheader()
337 337 showchunks(b"manifest")
338 338 for chunkdata in iter(gen.filelogheader, {}):
339 339 fname = chunkdata[b'filename']
340 340 showchunks(fname)
341 341 else:
342 342 if isinstance(gen, bundle2.unbundle20):
343 343 raise error.Abort(_(b'use debugbundle2 for this file'))
344 344 gen.changelogheader()
345 345 for deltadata in gen.deltaiter():
346 346 node, p1, p2, cs, deltabase, delta, flags = deltadata
347 347 ui.write(b"%s%s\n" % (indent_string, hex(node)))
348 348
349 349
350 350 def _debugobsmarkers(ui, part, indent=0, **opts):
351 351 """display version and markers contained in 'data'"""
352 352 opts = pycompat.byteskwargs(opts)
353 353 data = part.read()
354 354 indent_string = b' ' * indent
355 355 try:
356 356 version, markers = obsolete._readmarkers(data)
357 357 except error.UnknownVersion as exc:
358 358 msg = b"%sunsupported version: %s (%d bytes)\n"
359 359 msg %= indent_string, exc.version, len(data)
360 360 ui.write(msg)
361 361 else:
362 362 msg = b"%sversion: %d (%d bytes)\n"
363 363 msg %= indent_string, version, len(data)
364 364 ui.write(msg)
365 365 fm = ui.formatter(b'debugobsolete', opts)
366 366 for rawmarker in sorted(markers):
367 367 m = obsutil.marker(None, rawmarker)
368 368 fm.startitem()
369 369 fm.plain(indent_string)
370 370 cmdutil.showmarker(fm, m)
371 371 fm.end()
372 372
373 373
374 374 def _debugphaseheads(ui, data, indent=0):
375 375 """display version and markers contained in 'data'"""
376 376 indent_string = b' ' * indent
377 377 headsbyphase = phases.binarydecode(data)
378 378 for phase in phases.allphases:
379 379 for head in headsbyphase[phase]:
380 380 ui.write(indent_string)
381 381 ui.write(b'%s %s\n' % (hex(head), phases.phasenames[phase]))
382 382
383 383
384 384 def _quasirepr(thing):
385 385 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
386 386 return b'{%s}' % (
387 387 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
388 388 )
389 389 return pycompat.bytestr(repr(thing))
390 390
391 391
392 392 def _debugbundle2(ui, gen, all=None, **opts):
393 393 """lists the contents of a bundle2"""
394 394 if not isinstance(gen, bundle2.unbundle20):
395 395 raise error.Abort(_(b'not a bundle2 file'))
396 396 ui.write((b'Stream params: %s\n' % _quasirepr(gen.params)))
397 397 parttypes = opts.get('part_type', [])
398 398 for part in gen.iterparts():
399 399 if parttypes and part.type not in parttypes:
400 400 continue
401 401 msg = b'%s -- %s (mandatory: %r)\n'
402 402 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
403 403 if part.type == b'changegroup':
404 404 version = part.params.get(b'version', b'01')
405 405 cg = changegroup.getunbundler(version, part, b'UN')
406 406 if not ui.quiet:
407 407 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
408 408 if part.type == b'obsmarkers':
409 409 if not ui.quiet:
410 410 _debugobsmarkers(ui, part, indent=4, **opts)
411 411 if part.type == b'phase-heads':
412 412 if not ui.quiet:
413 413 _debugphaseheads(ui, part, indent=4)
414 414
415 415
416 416 @command(
417 417 b'debugbundle',
418 418 [
419 419 (b'a', b'all', None, _(b'show all details')),
420 420 (b'', b'part-type', [], _(b'show only the named part type')),
421 421 (b'', b'spec', None, _(b'print the bundlespec of the bundle')),
422 422 ],
423 423 _(b'FILE'),
424 424 norepo=True,
425 425 )
426 426 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
427 427 """lists the contents of a bundle"""
428 428 with hg.openpath(ui, bundlepath) as f:
429 429 if spec:
430 430 spec = exchange.getbundlespec(ui, f)
431 431 ui.write(b'%s\n' % spec)
432 432 return
433 433
434 434 gen = exchange.readbundle(ui, f, bundlepath)
435 435 if isinstance(gen, bundle2.unbundle20):
436 436 return _debugbundle2(ui, gen, all=all, **opts)
437 437 _debugchangegroup(ui, gen, all=all, **opts)
438 438
439 439
440 440 @command(b'debugcapabilities', [], _(b'PATH'), norepo=True)
441 441 def debugcapabilities(ui, path, **opts):
442 442 """lists the capabilities of a remote peer"""
443 443 opts = pycompat.byteskwargs(opts)
444 444 peer = hg.peer(ui, opts, path)
445 445 caps = peer.capabilities()
446 446 ui.writenoi18n(b'Main capabilities:\n')
447 447 for c in sorted(caps):
448 448 ui.write(b' %s\n' % c)
449 449 b2caps = bundle2.bundle2caps(peer)
450 450 if b2caps:
451 451 ui.writenoi18n(b'Bundle2 capabilities:\n')
452 452 for key, values in sorted(pycompat.iteritems(b2caps)):
453 453 ui.write(b' %s\n' % key)
454 454 for v in values:
455 455 ui.write(b' %s\n' % v)
456 456
457 457
458 458 @command(b'debugcheckstate', [], b'')
459 459 def debugcheckstate(ui, repo):
460 460 """validate the correctness of the current dirstate"""
461 461 parent1, parent2 = repo.dirstate.parents()
462 462 m1 = repo[parent1].manifest()
463 463 m2 = repo[parent2].manifest()
464 464 errors = 0
465 465 for f in repo.dirstate:
466 466 state = repo.dirstate[f]
467 467 if state in b"nr" and f not in m1:
468 468 ui.warn(_(b"%s in state %s, but not in manifest1\n") % (f, state))
469 469 errors += 1
470 470 if state in b"a" and f in m1:
471 471 ui.warn(_(b"%s in state %s, but also in manifest1\n") % (f, state))
472 472 errors += 1
473 473 if state in b"m" and f not in m1 and f not in m2:
474 474 ui.warn(
475 475 _(b"%s in state %s, but not in either manifest\n") % (f, state)
476 476 )
477 477 errors += 1
478 478 for f in m1:
479 479 state = repo.dirstate[f]
480 480 if state not in b"nrm":
481 481 ui.warn(_(b"%s in manifest1, but listed as state %s") % (f, state))
482 482 errors += 1
483 483 if errors:
484 484 errstr = _(b".hg/dirstate inconsistent with current parent's manifest")
485 485 raise error.Abort(errstr)
486 486
487 487
488 488 @command(
489 489 b'debugcolor',
490 490 [(b'', b'style', None, _(b'show all configured styles'))],
491 491 b'hg debugcolor',
492 492 )
493 493 def debugcolor(ui, repo, **opts):
494 494 """show available color, effects or style"""
495 495 ui.writenoi18n(b'color mode: %s\n' % stringutil.pprint(ui._colormode))
496 496 if opts.get('style'):
497 497 return _debugdisplaystyle(ui)
498 498 else:
499 499 return _debugdisplaycolor(ui)
500 500
501 501
502 502 def _debugdisplaycolor(ui):
503 503 ui = ui.copy()
504 504 ui._styles.clear()
505 505 for effect in color._activeeffects(ui).keys():
506 506 ui._styles[effect] = effect
507 507 if ui._terminfoparams:
508 508 for k, v in ui.configitems(b'color'):
509 509 if k.startswith(b'color.'):
510 510 ui._styles[k] = k[6:]
511 511 elif k.startswith(b'terminfo.'):
512 512 ui._styles[k] = k[9:]
513 513 ui.write(_(b'available colors:\n'))
514 514 # sort label with a '_' after the other to group '_background' entry.
515 515 items = sorted(ui._styles.items(), key=lambda i: (b'_' in i[0], i[0], i[1]))
516 516 for colorname, label in items:
517 517 ui.write(b'%s\n' % colorname, label=label)
518 518
519 519
520 520 def _debugdisplaystyle(ui):
521 521 ui.write(_(b'available style:\n'))
522 522 if not ui._styles:
523 523 return
524 524 width = max(len(s) for s in ui._styles)
525 525 for label, effects in sorted(ui._styles.items()):
526 526 ui.write(b'%s' % label, label=label)
527 527 if effects:
528 528 # 50
529 529 ui.write(b': ')
530 530 ui.write(b' ' * (max(0, width - len(label))))
531 531 ui.write(b', '.join(ui.label(e, e) for e in effects.split()))
532 532 ui.write(b'\n')
533 533
534 534
535 535 @command(b'debugcreatestreamclonebundle', [], b'FILE')
536 536 def debugcreatestreamclonebundle(ui, repo, fname):
537 537 """create a stream clone bundle file
538 538
539 539 Stream bundles are special bundles that are essentially archives of
540 540 revlog files. They are commonly used for cloning very quickly.
541 541 """
542 542 # TODO we may want to turn this into an abort when this functionality
543 543 # is moved into `hg bundle`.
544 544 if phases.hassecret(repo):
545 545 ui.warn(
546 546 _(
547 547 b'(warning: stream clone bundle will contain secret '
548 548 b'revisions)\n'
549 549 )
550 550 )
551 551
552 552 requirements, gen = streamclone.generatebundlev1(repo)
553 553 changegroup.writechunks(ui, gen, fname)
554 554
555 555 ui.write(_(b'bundle requirements: %s\n') % b', '.join(sorted(requirements)))
556 556
557 557
558 558 @command(
559 559 b'debugdag',
560 560 [
561 561 (b't', b'tags', None, _(b'use tags as labels')),
562 562 (b'b', b'branches', None, _(b'annotate with branch names')),
563 563 (b'', b'dots', None, _(b'use dots for runs')),
564 564 (b's', b'spaces', None, _(b'separate elements by spaces')),
565 565 ],
566 566 _(b'[OPTION]... [FILE [REV]...]'),
567 567 optionalrepo=True,
568 568 )
569 569 def debugdag(ui, repo, file_=None, *revs, **opts):
570 570 """format the changelog or an index DAG as a concise textual description
571 571
572 572 If you pass a revlog index, the revlog's DAG is emitted. If you list
573 573 revision numbers, they get labeled in the output as rN.
574 574
575 575 Otherwise, the changelog DAG of the current repo is emitted.
576 576 """
577 577 spaces = opts.get('spaces')
578 578 dots = opts.get('dots')
579 579 if file_:
580 580 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
581 581 revs = set((int(r) for r in revs))
582 582
583 583 def events():
584 584 for r in rlog:
585 585 yield b'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
586 586 if r in revs:
587 587 yield b'l', (r, b"r%i" % r)
588 588
589 589 elif repo:
590 590 cl = repo.changelog
591 591 tags = opts.get('tags')
592 592 branches = opts.get('branches')
593 593 if tags:
594 594 labels = {}
595 595 for l, n in repo.tags().items():
596 596 labels.setdefault(cl.rev(n), []).append(l)
597 597
598 598 def events():
599 599 b = b"default"
600 600 for r in cl:
601 601 if branches:
602 602 newb = cl.read(cl.node(r))[5][b'branch']
603 603 if newb != b:
604 604 yield b'a', newb
605 605 b = newb
606 606 yield b'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
607 607 if tags:
608 608 ls = labels.get(r)
609 609 if ls:
610 610 for l in ls:
611 611 yield b'l', (r, l)
612 612
613 613 else:
614 614 raise error.Abort(_(b'need repo for changelog dag'))
615 615
616 616 for line in dagparser.dagtextlines(
617 617 events(),
618 618 addspaces=spaces,
619 619 wraplabels=True,
620 620 wrapannotations=True,
621 621 wrapnonlinear=dots,
622 622 usedots=dots,
623 623 maxlinewidth=70,
624 624 ):
625 625 ui.write(line)
626 626 ui.write(b"\n")
627 627
628 628
629 629 @command(b'debugdata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
630 630 def debugdata(ui, repo, file_, rev=None, **opts):
631 631 """dump the contents of a data file revision"""
632 632 opts = pycompat.byteskwargs(opts)
633 633 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
634 634 if rev is not None:
635 635 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
636 636 file_, rev = None, file_
637 637 elif rev is None:
638 638 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
639 639 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
640 640 try:
641 641 ui.write(r.rawdata(r.lookup(rev)))
642 642 except KeyError:
643 643 raise error.Abort(_(b'invalid revision identifier %s') % rev)
644 644
645 645
646 646 @command(
647 647 b'debugdate',
648 648 [(b'e', b'extended', None, _(b'try extended date formats'))],
649 649 _(b'[-e] DATE [RANGE]'),
650 650 norepo=True,
651 651 optionalrepo=True,
652 652 )
653 653 def debugdate(ui, date, range=None, **opts):
654 654 """parse and display a date"""
655 655 if opts["extended"]:
656 656 d = dateutil.parsedate(date, dateutil.extendeddateformats)
657 657 else:
658 658 d = dateutil.parsedate(date)
659 659 ui.writenoi18n(b"internal: %d %d\n" % d)
660 660 ui.writenoi18n(b"standard: %s\n" % dateutil.datestr(d))
661 661 if range:
662 662 m = dateutil.matchdate(range)
663 663 ui.writenoi18n(b"match: %s\n" % m(d[0]))
664 664
665 665
666 666 @command(
667 667 b'debugdeltachain',
668 668 cmdutil.debugrevlogopts + cmdutil.formatteropts,
669 669 _(b'-c|-m|FILE'),
670 670 optionalrepo=True,
671 671 )
672 672 def debugdeltachain(ui, repo, file_=None, **opts):
673 673 """dump information about delta chains in a revlog
674 674
675 675 Output can be templatized. Available template keywords are:
676 676
677 677 :``rev``: revision number
678 678 :``chainid``: delta chain identifier (numbered by unique base)
679 679 :``chainlen``: delta chain length to this revision
680 680 :``prevrev``: previous revision in delta chain
681 681 :``deltatype``: role of delta / how it was computed
682 682 :``compsize``: compressed size of revision
683 683 :``uncompsize``: uncompressed size of revision
684 684 :``chainsize``: total size of compressed revisions in chain
685 685 :``chainratio``: total chain size divided by uncompressed revision size
686 686 (new delta chains typically start at ratio 2.00)
687 687 :``lindist``: linear distance from base revision in delta chain to end
688 688 of this revision
689 689 :``extradist``: total size of revisions not part of this delta chain from
690 690 base of delta chain to end of this revision; a measurement
691 691 of how much extra data we need to read/seek across to read
692 692 the delta chain for this revision
693 693 :``extraratio``: extradist divided by chainsize; another representation of
694 694 how much unrelated data is needed to load this delta chain
695 695
696 696 If the repository is configured to use the sparse read, additional keywords
697 697 are available:
698 698
699 699 :``readsize``: total size of data read from the disk for a revision
700 700 (sum of the sizes of all the blocks)
701 701 :``largestblock``: size of the largest block of data read from the disk
702 702 :``readdensity``: density of useful bytes in the data read from the disk
703 703 :``srchunks``: in how many data hunks the whole revision would be read
704 704
705 705 The sparse read can be enabled with experimental.sparse-read = True
706 706 """
707 707 opts = pycompat.byteskwargs(opts)
708 708 r = cmdutil.openrevlog(repo, b'debugdeltachain', file_, opts)
709 709 index = r.index
710 710 start = r.start
711 711 length = r.length
712 712 generaldelta = r.version & revlog.FLAG_GENERALDELTA
713 713 withsparseread = getattr(r, '_withsparseread', False)
714 714
715 715 def revinfo(rev):
716 716 e = index[rev]
717 717 compsize = e[1]
718 718 uncompsize = e[2]
719 719 chainsize = 0
720 720
721 721 if generaldelta:
722 722 if e[3] == e[5]:
723 723 deltatype = b'p1'
724 724 elif e[3] == e[6]:
725 725 deltatype = b'p2'
726 726 elif e[3] == rev - 1:
727 727 deltatype = b'prev'
728 728 elif e[3] == rev:
729 729 deltatype = b'base'
730 730 else:
731 731 deltatype = b'other'
732 732 else:
733 733 if e[3] == rev:
734 734 deltatype = b'base'
735 735 else:
736 736 deltatype = b'prev'
737 737
738 738 chain = r._deltachain(rev)[0]
739 739 for iterrev in chain:
740 740 e = index[iterrev]
741 741 chainsize += e[1]
742 742
743 743 return compsize, uncompsize, deltatype, chain, chainsize
744 744
745 745 fm = ui.formatter(b'debugdeltachain', opts)
746 746
747 747 fm.plain(
748 748 b' rev chain# chainlen prev delta '
749 749 b'size rawsize chainsize ratio lindist extradist '
750 750 b'extraratio'
751 751 )
752 752 if withsparseread:
753 753 fm.plain(b' readsize largestblk rddensity srchunks')
754 754 fm.plain(b'\n')
755 755
756 756 chainbases = {}
757 757 for rev in r:
758 758 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
759 759 chainbase = chain[0]
760 760 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
761 761 basestart = start(chainbase)
762 762 revstart = start(rev)
763 763 lineardist = revstart + comp - basestart
764 764 extradist = lineardist - chainsize
765 765 try:
766 766 prevrev = chain[-2]
767 767 except IndexError:
768 768 prevrev = -1
769 769
770 770 if uncomp != 0:
771 771 chainratio = float(chainsize) / float(uncomp)
772 772 else:
773 773 chainratio = chainsize
774 774
775 775 if chainsize != 0:
776 776 extraratio = float(extradist) / float(chainsize)
777 777 else:
778 778 extraratio = extradist
779 779
780 780 fm.startitem()
781 781 fm.write(
782 782 b'rev chainid chainlen prevrev deltatype compsize '
783 783 b'uncompsize chainsize chainratio lindist extradist '
784 784 b'extraratio',
785 785 b'%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
786 786 rev,
787 787 chainid,
788 788 len(chain),
789 789 prevrev,
790 790 deltatype,
791 791 comp,
792 792 uncomp,
793 793 chainsize,
794 794 chainratio,
795 795 lineardist,
796 796 extradist,
797 797 extraratio,
798 798 rev=rev,
799 799 chainid=chainid,
800 800 chainlen=len(chain),
801 801 prevrev=prevrev,
802 802 deltatype=deltatype,
803 803 compsize=comp,
804 804 uncompsize=uncomp,
805 805 chainsize=chainsize,
806 806 chainratio=chainratio,
807 807 lindist=lineardist,
808 808 extradist=extradist,
809 809 extraratio=extraratio,
810 810 )
811 811 if withsparseread:
812 812 readsize = 0
813 813 largestblock = 0
814 814 srchunks = 0
815 815
816 816 for revschunk in deltautil.slicechunk(r, chain):
817 817 srchunks += 1
818 818 blkend = start(revschunk[-1]) + length(revschunk[-1])
819 819 blksize = blkend - start(revschunk[0])
820 820
821 821 readsize += blksize
822 822 if largestblock < blksize:
823 823 largestblock = blksize
824 824
825 825 if readsize:
826 826 readdensity = float(chainsize) / float(readsize)
827 827 else:
828 828 readdensity = 1
829 829
830 830 fm.write(
831 831 b'readsize largestblock readdensity srchunks',
832 832 b' %10d %10d %9.5f %8d',
833 833 readsize,
834 834 largestblock,
835 835 readdensity,
836 836 srchunks,
837 837 readsize=readsize,
838 838 largestblock=largestblock,
839 839 readdensity=readdensity,
840 840 srchunks=srchunks,
841 841 )
842 842
843 843 fm.plain(b'\n')
844 844
845 845 fm.end()
846 846
847 847
848 848 @command(
849 849 b'debugdirstate|debugstate',
850 850 [
851 851 (
852 852 b'',
853 853 b'nodates',
854 854 None,
855 855 _(b'do not display the saved mtime (DEPRECATED)'),
856 856 ),
857 857 (b'', b'dates', True, _(b'display the saved mtime')),
858 858 (b'', b'datesort', None, _(b'sort by saved mtime')),
859 859 ],
860 860 _(b'[OPTION]...'),
861 861 )
862 862 def debugstate(ui, repo, **opts):
863 863 """show the contents of the current dirstate"""
864 864
865 865 nodates = not opts['dates']
866 866 if opts.get('nodates') is not None:
867 867 nodates = True
868 868 datesort = opts.get('datesort')
869 869
870 870 if datesort:
871 871 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
872 872 else:
873 873 keyfunc = None # sort by filename
874 874 for file_, ent in sorted(pycompat.iteritems(repo.dirstate), key=keyfunc):
875 875 if ent[3] == -1:
876 876 timestr = b'unset '
877 877 elif nodates:
878 878 timestr = b'set '
879 879 else:
880 880 timestr = time.strftime(
881 881 "%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
882 882 )
883 883 timestr = encoding.strtolocal(timestr)
884 884 if ent[1] & 0o20000:
885 885 mode = b'lnk'
886 886 else:
887 887 mode = b'%3o' % (ent[1] & 0o777 & ~util.umask)
888 888 ui.write(b"%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
889 889 for f in repo.dirstate.copies():
890 890 ui.write(_(b"copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
891 891
892 892
893 893 @command(
894 894 b'debugdiscovery',
895 895 [
896 896 (b'', b'old', None, _(b'use old-style discovery')),
897 897 (
898 898 b'',
899 899 b'nonheads',
900 900 None,
901 901 _(b'use old-style discovery with non-heads included'),
902 902 ),
903 903 (b'', b'rev', [], b'restrict discovery to this set of revs'),
904 904 (b'', b'seed', b'12323', b'specify the random seed use for discovery'),
905 905 ]
906 906 + cmdutil.remoteopts,
907 907 _(b'[--rev REV] [OTHER]'),
908 908 )
909 909 def debugdiscovery(ui, repo, remoteurl=b"default", **opts):
910 910 """runs the changeset discovery protocol in isolation"""
911 911 opts = pycompat.byteskwargs(opts)
912 912 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
913 913 remote = hg.peer(repo, opts, remoteurl)
914 914 ui.status(_(b'comparing with %s\n') % util.hidepassword(remoteurl))
915 915
916 916 # make sure tests are repeatable
917 917 random.seed(int(opts[b'seed']))
918 918
919 919 if opts.get(b'old'):
920 920
921 921 def doit(pushedrevs, remoteheads, remote=remote):
922 922 if not util.safehasattr(remote, b'branches'):
923 923 # enable in-client legacy support
924 924 remote = localrepo.locallegacypeer(remote.local())
925 925 common, _in, hds = treediscovery.findcommonincoming(
926 926 repo, remote, force=True
927 927 )
928 928 common = set(common)
929 929 if not opts.get(b'nonheads'):
930 930 ui.writenoi18n(
931 931 b"unpruned common: %s\n"
932 932 % b" ".join(sorted(short(n) for n in common))
933 933 )
934 934
935 935 clnode = repo.changelog.node
936 936 common = repo.revs(b'heads(::%ln)', common)
937 937 common = {clnode(r) for r in common}
938 938 return common, hds
939 939
940 940 else:
941 941
942 942 def doit(pushedrevs, remoteheads, remote=remote):
943 943 nodes = None
944 944 if pushedrevs:
945 945 revs = scmutil.revrange(repo, pushedrevs)
946 946 nodes = [repo[r].node() for r in revs]
947 947 common, any, hds = setdiscovery.findcommonheads(
948 948 ui, repo, remote, ancestorsof=nodes
949 949 )
950 950 return common, hds
951 951
952 952 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
953 953 localrevs = opts[b'rev']
954 954 with util.timedcm('debug-discovery') as t:
955 955 common, hds = doit(localrevs, remoterevs)
956 956
957 957 # compute all statistics
958 958 common = set(common)
959 959 rheads = set(hds)
960 960 lheads = set(repo.heads())
961 961
962 962 data = {}
963 963 data[b'elapsed'] = t.elapsed
964 964 data[b'nb-common'] = len(common)
965 965 data[b'nb-common-local'] = len(common & lheads)
966 966 data[b'nb-common-remote'] = len(common & rheads)
967 967 data[b'nb-common-both'] = len(common & rheads & lheads)
968 968 data[b'nb-local'] = len(lheads)
969 969 data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
970 970 data[b'nb-remote'] = len(rheads)
971 971 data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
972 972 data[b'nb-revs'] = len(repo.revs(b'all()'))
973 973 data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
974 974 data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
975 975
976 976 # display discovery summary
977 977 ui.writenoi18n(b"elapsed time: %(elapsed)f seconds\n" % data)
978 978 ui.writenoi18n(b"heads summary:\n")
979 979 ui.writenoi18n(b" total common heads: %(nb-common)9d\n" % data)
980 980 ui.writenoi18n(b" also local heads: %(nb-common-local)9d\n" % data)
981 981 ui.writenoi18n(b" also remote heads: %(nb-common-remote)9d\n" % data)
982 982 ui.writenoi18n(b" both: %(nb-common-both)9d\n" % data)
983 983 ui.writenoi18n(b" local heads: %(nb-local)9d\n" % data)
984 984 ui.writenoi18n(b" common: %(nb-common-local)9d\n" % data)
985 985 ui.writenoi18n(b" missing: %(nb-local-missing)9d\n" % data)
986 986 ui.writenoi18n(b" remote heads: %(nb-remote)9d\n" % data)
987 987 ui.writenoi18n(b" common: %(nb-common-remote)9d\n" % data)
988 988 ui.writenoi18n(b" unknown: %(nb-remote-unknown)9d\n" % data)
989 989 ui.writenoi18n(b"local changesets: %(nb-revs)9d\n" % data)
990 990 ui.writenoi18n(b" common: %(nb-revs-common)9d\n" % data)
991 991 ui.writenoi18n(b" missing: %(nb-revs-missing)9d\n" % data)
992 992
993 993 if ui.verbose:
994 994 ui.writenoi18n(
995 995 b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
996 996 )
997 997
998 998
999 999 _chunksize = 4 << 10
1000 1000
1001 1001
1002 1002 @command(
1003 1003 b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
1004 1004 )
1005 1005 def debugdownload(ui, repo, url, output=None, **opts):
1006 1006 """download a resource using Mercurial logic and config
1007 1007 """
1008 1008 fh = urlmod.open(ui, url, output)
1009 1009
1010 1010 dest = ui
1011 1011 if output:
1012 1012 dest = open(output, b"wb", _chunksize)
1013 1013 try:
1014 1014 data = fh.read(_chunksize)
1015 1015 while data:
1016 1016 dest.write(data)
1017 1017 data = fh.read(_chunksize)
1018 1018 finally:
1019 1019 if output:
1020 1020 dest.close()
1021 1021
1022 1022
1023 1023 @command(b'debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
1024 1024 def debugextensions(ui, repo, **opts):
1025 1025 '''show information about active extensions'''
1026 1026 opts = pycompat.byteskwargs(opts)
1027 1027 exts = extensions.extensions(ui)
1028 1028 hgver = util.version()
1029 1029 fm = ui.formatter(b'debugextensions', opts)
1030 1030 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
1031 1031 isinternal = extensions.ismoduleinternal(extmod)
1032 1032 extsource = None
1033 1033
1034 1034 if util.safehasattr(extmod, '__file__'):
1035 1035 extsource = pycompat.fsencode(extmod.__file__)
1036 1036 elif getattr(sys, 'oxidized', False):
1037 1037 extsource = pycompat.sysexecutable
1038 1038 if isinternal:
1039 1039 exttestedwith = [] # never expose magic string to users
1040 1040 else:
1041 1041 exttestedwith = getattr(extmod, 'testedwith', b'').split()
1042 1042 extbuglink = getattr(extmod, 'buglink', None)
1043 1043
1044 1044 fm.startitem()
1045 1045
1046 1046 if ui.quiet or ui.verbose:
1047 1047 fm.write(b'name', b'%s\n', extname)
1048 1048 else:
1049 1049 fm.write(b'name', b'%s', extname)
1050 1050 if isinternal or hgver in exttestedwith:
1051 1051 fm.plain(b'\n')
1052 1052 elif not exttestedwith:
1053 1053 fm.plain(_(b' (untested!)\n'))
1054 1054 else:
1055 1055 lasttestedversion = exttestedwith[-1]
1056 1056 fm.plain(b' (%s!)\n' % lasttestedversion)
1057 1057
1058 1058 fm.condwrite(
1059 1059 ui.verbose and extsource,
1060 1060 b'source',
1061 1061 _(b' location: %s\n'),
1062 1062 extsource or b"",
1063 1063 )
1064 1064
1065 1065 if ui.verbose:
1066 1066 fm.plain(_(b' bundled: %s\n') % [b'no', b'yes'][isinternal])
1067 1067 fm.data(bundled=isinternal)
1068 1068
1069 1069 fm.condwrite(
1070 1070 ui.verbose and exttestedwith,
1071 1071 b'testedwith',
1072 1072 _(b' tested with: %s\n'),
1073 1073 fm.formatlist(exttestedwith, name=b'ver'),
1074 1074 )
1075 1075
1076 1076 fm.condwrite(
1077 1077 ui.verbose and extbuglink,
1078 1078 b'buglink',
1079 1079 _(b' bug reporting: %s\n'),
1080 1080 extbuglink or b"",
1081 1081 )
1082 1082
1083 1083 fm.end()
1084 1084
1085 1085
1086 1086 @command(
1087 1087 b'debugfileset',
1088 1088 [
1089 1089 (
1090 1090 b'r',
1091 1091 b'rev',
1092 1092 b'',
1093 1093 _(b'apply the filespec on this revision'),
1094 1094 _(b'REV'),
1095 1095 ),
1096 1096 (
1097 1097 b'',
1098 1098 b'all-files',
1099 1099 False,
1100 1100 _(b'test files from all revisions and working directory'),
1101 1101 ),
1102 1102 (
1103 1103 b's',
1104 1104 b'show-matcher',
1105 1105 None,
1106 1106 _(b'print internal representation of matcher'),
1107 1107 ),
1108 1108 (
1109 1109 b'p',
1110 1110 b'show-stage',
1111 1111 [],
1112 1112 _(b'print parsed tree at the given stage'),
1113 1113 _(b'NAME'),
1114 1114 ),
1115 1115 ],
1116 1116 _(b'[-r REV] [--all-files] [OPTION]... FILESPEC'),
1117 1117 )
1118 1118 def debugfileset(ui, repo, expr, **opts):
1119 1119 '''parse and apply a fileset specification'''
1120 1120 from . import fileset
1121 1121
1122 1122 fileset.symbols # force import of fileset so we have predicates to optimize
1123 1123 opts = pycompat.byteskwargs(opts)
1124 1124 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
1125 1125
1126 1126 stages = [
1127 1127 (b'parsed', pycompat.identity),
1128 1128 (b'analyzed', filesetlang.analyze),
1129 1129 (b'optimized', filesetlang.optimize),
1130 1130 ]
1131 1131 stagenames = set(n for n, f in stages)
1132 1132
1133 1133 showalways = set()
1134 1134 if ui.verbose and not opts[b'show_stage']:
1135 1135 # show parsed tree by --verbose (deprecated)
1136 1136 showalways.add(b'parsed')
1137 1137 if opts[b'show_stage'] == [b'all']:
1138 1138 showalways.update(stagenames)
1139 1139 else:
1140 1140 for n in opts[b'show_stage']:
1141 1141 if n not in stagenames:
1142 1142 raise error.Abort(_(b'invalid stage name: %s') % n)
1143 1143 showalways.update(opts[b'show_stage'])
1144 1144
1145 1145 tree = filesetlang.parse(expr)
1146 1146 for n, f in stages:
1147 1147 tree = f(tree)
1148 1148 if n in showalways:
1149 1149 if opts[b'show_stage'] or n != b'parsed':
1150 1150 ui.write(b"* %s:\n" % n)
1151 1151 ui.write(filesetlang.prettyformat(tree), b"\n")
1152 1152
1153 1153 files = set()
1154 1154 if opts[b'all_files']:
1155 1155 for r in repo:
1156 1156 c = repo[r]
1157 1157 files.update(c.files())
1158 1158 files.update(c.substate)
1159 1159 if opts[b'all_files'] or ctx.rev() is None:
1160 1160 wctx = repo[None]
1161 1161 files.update(
1162 1162 repo.dirstate.walk(
1163 1163 scmutil.matchall(repo),
1164 1164 subrepos=list(wctx.substate),
1165 1165 unknown=True,
1166 1166 ignored=True,
1167 1167 )
1168 1168 )
1169 1169 files.update(wctx.substate)
1170 1170 else:
1171 1171 files.update(ctx.files())
1172 1172 files.update(ctx.substate)
1173 1173
1174 1174 m = ctx.matchfileset(repo.getcwd(), expr)
1175 1175 if opts[b'show_matcher'] or (opts[b'show_matcher'] is None and ui.verbose):
1176 1176 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
1177 1177 for f in sorted(files):
1178 1178 if not m(f):
1179 1179 continue
1180 1180 ui.write(b"%s\n" % f)
1181 1181
1182 1182
1183 1183 @command(b'debugformat', [] + cmdutil.formatteropts)
1184 1184 def debugformat(ui, repo, **opts):
1185 1185 """display format information about the current repository
1186 1186
1187 1187 Use --verbose to get extra information about current config value and
1188 1188 Mercurial default."""
1189 1189 opts = pycompat.byteskwargs(opts)
1190 1190 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1191 1191 maxvariantlength = max(len(b'format-variant'), maxvariantlength)
1192 1192
1193 1193 def makeformatname(name):
1194 1194 return b'%s:' + (b' ' * (maxvariantlength - len(name)))
1195 1195
1196 1196 fm = ui.formatter(b'debugformat', opts)
1197 1197 if fm.isplain():
1198 1198
1199 1199 def formatvalue(value):
1200 1200 if util.safehasattr(value, b'startswith'):
1201 1201 return value
1202 1202 if value:
1203 1203 return b'yes'
1204 1204 else:
1205 1205 return b'no'
1206 1206
1207 1207 else:
1208 1208 formatvalue = pycompat.identity
1209 1209
1210 1210 fm.plain(b'format-variant')
1211 1211 fm.plain(b' ' * (maxvariantlength - len(b'format-variant')))
1212 1212 fm.plain(b' repo')
1213 1213 if ui.verbose:
1214 1214 fm.plain(b' config default')
1215 1215 fm.plain(b'\n')
1216 1216 for fv in upgrade.allformatvariant:
1217 1217 fm.startitem()
1218 1218 repovalue = fv.fromrepo(repo)
1219 1219 configvalue = fv.fromconfig(repo)
1220 1220
1221 1221 if repovalue != configvalue:
1222 1222 namelabel = b'formatvariant.name.mismatchconfig'
1223 1223 repolabel = b'formatvariant.repo.mismatchconfig'
1224 1224 elif repovalue != fv.default:
1225 1225 namelabel = b'formatvariant.name.mismatchdefault'
1226 1226 repolabel = b'formatvariant.repo.mismatchdefault'
1227 1227 else:
1228 1228 namelabel = b'formatvariant.name.uptodate'
1229 1229 repolabel = b'formatvariant.repo.uptodate'
1230 1230
1231 1231 fm.write(b'name', makeformatname(fv.name), fv.name, label=namelabel)
1232 1232 fm.write(b'repo', b' %3s', formatvalue(repovalue), label=repolabel)
1233 1233 if fv.default != configvalue:
1234 1234 configlabel = b'formatvariant.config.special'
1235 1235 else:
1236 1236 configlabel = b'formatvariant.config.default'
1237 1237 fm.condwrite(
1238 1238 ui.verbose,
1239 1239 b'config',
1240 1240 b' %6s',
1241 1241 formatvalue(configvalue),
1242 1242 label=configlabel,
1243 1243 )
1244 1244 fm.condwrite(
1245 1245 ui.verbose,
1246 1246 b'default',
1247 1247 b' %7s',
1248 1248 formatvalue(fv.default),
1249 1249 label=b'formatvariant.default',
1250 1250 )
1251 1251 fm.plain(b'\n')
1252 1252 fm.end()
1253 1253
1254 1254
1255 1255 @command(b'debugfsinfo', [], _(b'[PATH]'), norepo=True)
1256 1256 def debugfsinfo(ui, path=b"."):
1257 1257 """show information detected about current filesystem"""
1258 1258 ui.writenoi18n(b'path: %s\n' % path)
1259 1259 ui.writenoi18n(
1260 1260 b'mounted on: %s\n' % (util.getfsmountpoint(path) or b'(unknown)')
1261 1261 )
1262 1262 ui.writenoi18n(b'exec: %s\n' % (util.checkexec(path) and b'yes' or b'no'))
1263 1263 ui.writenoi18n(b'fstype: %s\n' % (util.getfstype(path) or b'(unknown)'))
1264 1264 ui.writenoi18n(
1265 1265 b'symlink: %s\n' % (util.checklink(path) and b'yes' or b'no')
1266 1266 )
1267 1267 ui.writenoi18n(
1268 1268 b'hardlink: %s\n' % (util.checknlink(path) and b'yes' or b'no')
1269 1269 )
1270 1270 casesensitive = b'(unknown)'
1271 1271 try:
1272 1272 with pycompat.namedtempfile(prefix=b'.debugfsinfo', dir=path) as f:
1273 1273 casesensitive = util.fscasesensitive(f.name) and b'yes' or b'no'
1274 1274 except OSError:
1275 1275 pass
1276 1276 ui.writenoi18n(b'case-sensitive: %s\n' % casesensitive)
1277 1277
1278 1278
1279 1279 @command(
1280 1280 b'debuggetbundle',
1281 1281 [
1282 1282 (b'H', b'head', [], _(b'id of head node'), _(b'ID')),
1283 1283 (b'C', b'common', [], _(b'id of common node'), _(b'ID')),
1284 1284 (
1285 1285 b't',
1286 1286 b'type',
1287 1287 b'bzip2',
1288 1288 _(b'bundle compression type to use'),
1289 1289 _(b'TYPE'),
1290 1290 ),
1291 1291 ],
1292 1292 _(b'REPO FILE [-H|-C ID]...'),
1293 1293 norepo=True,
1294 1294 )
1295 1295 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1296 1296 """retrieves a bundle from a repo
1297 1297
1298 1298 Every ID must be a full-length hex node id string. Saves the bundle to the
1299 1299 given file.
1300 1300 """
1301 1301 opts = pycompat.byteskwargs(opts)
1302 1302 repo = hg.peer(ui, opts, repopath)
1303 1303 if not repo.capable(b'getbundle'):
1304 1304 raise error.Abort(b"getbundle() not supported by target repository")
1305 1305 args = {}
1306 1306 if common:
1307 1307 args['common'] = [bin(s) for s in common]
1308 1308 if head:
1309 1309 args['heads'] = [bin(s) for s in head]
1310 1310 # TODO: get desired bundlecaps from command line.
1311 1311 args['bundlecaps'] = None
1312 1312 bundle = repo.getbundle(b'debug', **args)
1313 1313
1314 1314 bundletype = opts.get(b'type', b'bzip2').lower()
1315 1315 btypes = {
1316 1316 b'none': b'HG10UN',
1317 1317 b'bzip2': b'HG10BZ',
1318 1318 b'gzip': b'HG10GZ',
1319 1319 b'bundle2': b'HG20',
1320 1320 }
1321 1321 bundletype = btypes.get(bundletype)
1322 1322 if bundletype not in bundle2.bundletypes:
1323 1323 raise error.Abort(_(b'unknown bundle type specified with --type'))
1324 1324 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1325 1325
1326 1326
1327 1327 @command(b'debugignore', [], b'[FILE]')
1328 1328 def debugignore(ui, repo, *files, **opts):
1329 1329 """display the combined ignore pattern and information about ignored files
1330 1330
1331 1331 With no argument display the combined ignore pattern.
1332 1332
1333 1333 Given space separated file names, shows if the given file is ignored and
1334 1334 if so, show the ignore rule (file and line number) that matched it.
1335 1335 """
1336 1336 ignore = repo.dirstate._ignore
1337 1337 if not files:
1338 1338 # Show all the patterns
1339 1339 ui.write(b"%s\n" % pycompat.byterepr(ignore))
1340 1340 else:
1341 1341 m = scmutil.match(repo[None], pats=files)
1342 1342 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1343 1343 for f in m.files():
1344 1344 nf = util.normpath(f)
1345 1345 ignored = None
1346 1346 ignoredata = None
1347 1347 if nf != b'.':
1348 1348 if ignore(nf):
1349 1349 ignored = nf
1350 1350 ignoredata = repo.dirstate._ignorefileandline(nf)
1351 1351 else:
1352 1352 for p in pathutil.finddirs(nf):
1353 1353 if ignore(p):
1354 1354 ignored = p
1355 1355 ignoredata = repo.dirstate._ignorefileandline(p)
1356 1356 break
1357 1357 if ignored:
1358 1358 if ignored == nf:
1359 1359 ui.write(_(b"%s is ignored\n") % uipathfn(f))
1360 1360 else:
1361 1361 ui.write(
1362 1362 _(
1363 1363 b"%s is ignored because of "
1364 1364 b"containing directory %s\n"
1365 1365 )
1366 1366 % (uipathfn(f), ignored)
1367 1367 )
1368 1368 ignorefile, lineno, line = ignoredata
1369 1369 ui.write(
1370 1370 _(b"(ignore rule in %s, line %d: '%s')\n")
1371 1371 % (ignorefile, lineno, line)
1372 1372 )
1373 1373 else:
1374 1374 ui.write(_(b"%s is not ignored\n") % uipathfn(f))
1375 1375
1376 1376
1377 1377 @command(
1378 1378 b'debugindex',
1379 1379 cmdutil.debugrevlogopts + cmdutil.formatteropts,
1380 1380 _(b'-c|-m|FILE'),
1381 1381 )
1382 1382 def debugindex(ui, repo, file_=None, **opts):
1383 1383 """dump index data for a storage primitive"""
1384 1384 opts = pycompat.byteskwargs(opts)
1385 1385 store = cmdutil.openstorage(repo, b'debugindex', file_, opts)
1386 1386
1387 1387 if ui.debugflag:
1388 1388 shortfn = hex
1389 1389 else:
1390 1390 shortfn = short
1391 1391
1392 1392 idlen = 12
1393 1393 for i in store:
1394 1394 idlen = len(shortfn(store.node(i)))
1395 1395 break
1396 1396
1397 1397 fm = ui.formatter(b'debugindex', opts)
1398 1398 fm.plain(
1399 1399 b' rev linkrev %s %s p2\n'
1400 1400 % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
1401 1401 )
1402 1402
1403 1403 for rev in store:
1404 1404 node = store.node(rev)
1405 1405 parents = store.parents(node)
1406 1406
1407 1407 fm.startitem()
1408 1408 fm.write(b'rev', b'%6d ', rev)
1409 1409 fm.write(b'linkrev', b'%7d ', store.linkrev(rev))
1410 1410 fm.write(b'node', b'%s ', shortfn(node))
1411 1411 fm.write(b'p1', b'%s ', shortfn(parents[0]))
1412 1412 fm.write(b'p2', b'%s', shortfn(parents[1]))
1413 1413 fm.plain(b'\n')
1414 1414
1415 1415 fm.end()
1416 1416
1417 1417
1418 1418 @command(
1419 1419 b'debugindexdot',
1420 1420 cmdutil.debugrevlogopts,
1421 1421 _(b'-c|-m|FILE'),
1422 1422 optionalrepo=True,
1423 1423 )
1424 1424 def debugindexdot(ui, repo, file_=None, **opts):
1425 1425 """dump an index DAG as a graphviz dot file"""
1426 1426 opts = pycompat.byteskwargs(opts)
1427 1427 r = cmdutil.openstorage(repo, b'debugindexdot', file_, opts)
1428 1428 ui.writenoi18n(b"digraph G {\n")
1429 1429 for i in r:
1430 1430 node = r.node(i)
1431 1431 pp = r.parents(node)
1432 1432 ui.write(b"\t%d -> %d\n" % (r.rev(pp[0]), i))
1433 1433 if pp[1] != nullid:
1434 1434 ui.write(b"\t%d -> %d\n" % (r.rev(pp[1]), i))
1435 1435 ui.write(b"}\n")
1436 1436
1437 1437
1438 1438 @command(b'debugindexstats', [])
1439 1439 def debugindexstats(ui, repo):
1440 1440 """show stats related to the changelog index"""
1441 1441 repo.changelog.shortest(nullid, 1)
1442 1442 index = repo.changelog.index
1443 1443 if not util.safehasattr(index, b'stats'):
1444 1444 raise error.Abort(_(b'debugindexstats only works with native code'))
1445 1445 for k, v in sorted(index.stats().items()):
1446 1446 ui.write(b'%s: %d\n' % (k, v))
1447 1447
1448 1448
1449 1449 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
1450 1450 def debuginstall(ui, **opts):
1451 1451 '''test Mercurial installation
1452 1452
1453 1453 Returns 0 on success.
1454 1454 '''
1455 1455 opts = pycompat.byteskwargs(opts)
1456 1456
1457 1457 problems = 0
1458 1458
1459 1459 fm = ui.formatter(b'debuginstall', opts)
1460 1460 fm.startitem()
1461 1461
1462 1462 # encoding
1463 1463 fm.write(b'encoding', _(b"checking encoding (%s)...\n"), encoding.encoding)
1464 1464 err = None
1465 1465 try:
1466 1466 codecs.lookup(pycompat.sysstr(encoding.encoding))
1467 1467 except LookupError as inst:
1468 1468 err = stringutil.forcebytestr(inst)
1469 1469 problems += 1
1470 1470 fm.condwrite(
1471 1471 err,
1472 1472 b'encodingerror',
1473 1473 _(b" %s\n (check that your locale is properly set)\n"),
1474 1474 err,
1475 1475 )
1476 1476
1477 1477 # Python
1478 1478 pythonlib = None
1479 1479 if util.safehasattr(os, '__file__'):
1480 1480 pythonlib = os.path.dirname(pycompat.fsencode(os.__file__))
1481 1481 elif getattr(sys, 'oxidized', False):
1482 1482 pythonlib = pycompat.sysexecutable
1483 1483
1484 1484 fm.write(
1485 1485 b'pythonexe',
1486 1486 _(b"checking Python executable (%s)\n"),
1487 1487 pycompat.sysexecutable or _(b"unknown"),
1488 1488 )
1489 1489 fm.write(
1490 1490 b'pythonver',
1491 1491 _(b"checking Python version (%s)\n"),
1492 1492 (b"%d.%d.%d" % sys.version_info[:3]),
1493 1493 )
1494 1494 fm.write(
1495 1495 b'pythonlib',
1496 1496 _(b"checking Python lib (%s)...\n"),
1497 1497 pythonlib or _(b"unknown"),
1498 1498 )
1499 1499
1500 1500 security = set(sslutil.supportedprotocols)
1501 1501 if sslutil.hassni:
1502 1502 security.add(b'sni')
1503 1503
1504 1504 fm.write(
1505 1505 b'pythonsecurity',
1506 1506 _(b"checking Python security support (%s)\n"),
1507 1507 fm.formatlist(sorted(security), name=b'protocol', fmt=b'%s', sep=b','),
1508 1508 )
1509 1509
1510 1510 # These are warnings, not errors. So don't increment problem count. This
1511 1511 # may change in the future.
1512 1512 if b'tls1.2' not in security:
1513 1513 fm.plain(
1514 1514 _(
1515 1515 b' TLS 1.2 not supported by Python install; '
1516 1516 b'network connections lack modern security\n'
1517 1517 )
1518 1518 )
1519 1519 if b'sni' not in security:
1520 1520 fm.plain(
1521 1521 _(
1522 1522 b' SNI not supported by Python install; may have '
1523 1523 b'connectivity issues with some servers\n'
1524 1524 )
1525 1525 )
1526 1526
1527 1527 # TODO print CA cert info
1528 1528
1529 1529 # hg version
1530 1530 hgver = util.version()
1531 1531 fm.write(
1532 1532 b'hgver', _(b"checking Mercurial version (%s)\n"), hgver.split(b'+')[0]
1533 1533 )
1534 1534 fm.write(
1535 1535 b'hgverextra',
1536 1536 _(b"checking Mercurial custom build (%s)\n"),
1537 1537 b'+'.join(hgver.split(b'+')[1:]),
1538 1538 )
1539 1539
1540 1540 # compiled modules
1541 1541 hgmodules = None
1542 1542 if util.safehasattr(sys.modules[__name__], '__file__'):
1543 1543 hgmodules = os.path.dirname(pycompat.fsencode(__file__))
1544 1544 elif getattr(sys, 'oxidized', False):
1545 1545 hgmodules = pycompat.sysexecutable
1546 1546
1547 1547 fm.write(
1548 1548 b'hgmodulepolicy', _(b"checking module policy (%s)\n"), policy.policy
1549 1549 )
1550 1550 fm.write(
1551 1551 b'hgmodules',
1552 1552 _(b"checking installed modules (%s)...\n"),
1553 1553 hgmodules or _(b"unknown"),
1554 1554 )
1555 1555
1556 1556 rustandc = policy.policy in (b'rust+c', b'rust+c-allow')
1557 1557 rustext = rustandc # for now, that's the only case
1558 1558 cext = policy.policy in (b'c', b'allow') or rustandc
1559 1559 nopure = cext or rustext
1560 1560 if nopure:
1561 1561 err = None
1562 1562 try:
1563 1563 if cext:
1564 1564 from .cext import ( # pytype: disable=import-error
1565 1565 base85,
1566 1566 bdiff,
1567 1567 mpatch,
1568 1568 osutil,
1569 1569 )
1570 1570
1571 1571 # quiet pyflakes
1572 1572 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1573 1573 if rustext:
1574 1574 from .rustext import ( # pytype: disable=import-error
1575 1575 ancestor,
1576 1576 dirstate,
1577 1577 )
1578 1578
1579 1579 dir(ancestor), dir(dirstate) # quiet pyflakes
1580 1580 except Exception as inst:
1581 1581 err = stringutil.forcebytestr(inst)
1582 1582 problems += 1
1583 1583 fm.condwrite(err, b'extensionserror', b" %s\n", err)
1584 1584
1585 1585 compengines = util.compengines._engines.values()
1586 1586 fm.write(
1587 1587 b'compengines',
1588 1588 _(b'checking registered compression engines (%s)\n'),
1589 1589 fm.formatlist(
1590 1590 sorted(e.name() for e in compengines),
1591 1591 name=b'compengine',
1592 1592 fmt=b'%s',
1593 1593 sep=b', ',
1594 1594 ),
1595 1595 )
1596 1596 fm.write(
1597 1597 b'compenginesavail',
1598 1598 _(b'checking available compression engines (%s)\n'),
1599 1599 fm.formatlist(
1600 1600 sorted(e.name() for e in compengines if e.available()),
1601 1601 name=b'compengine',
1602 1602 fmt=b'%s',
1603 1603 sep=b', ',
1604 1604 ),
1605 1605 )
1606 1606 wirecompengines = compression.compengines.supportedwireengines(
1607 1607 compression.SERVERROLE
1608 1608 )
1609 1609 fm.write(
1610 1610 b'compenginesserver',
1611 1611 _(
1612 1612 b'checking available compression engines '
1613 1613 b'for wire protocol (%s)\n'
1614 1614 ),
1615 1615 fm.formatlist(
1616 1616 [e.name() for e in wirecompengines if e.wireprotosupport()],
1617 1617 name=b'compengine',
1618 1618 fmt=b'%s',
1619 1619 sep=b', ',
1620 1620 ),
1621 1621 )
1622 1622 re2 = b'missing'
1623 1623 if util._re2:
1624 1624 re2 = b'available'
1625 1625 fm.plain(_(b'checking "re2" regexp engine (%s)\n') % re2)
1626 1626 fm.data(re2=bool(util._re2))
1627 1627
1628 1628 # templates
1629 1629 p = templater.templatepaths()
1630 1630 fm.write(b'templatedirs', b'checking templates (%s)...\n', b' '.join(p))
1631 1631 fm.condwrite(not p, b'', _(b" no template directories found\n"))
1632 1632 if p:
1633 1633 m = templater.templatepath(b"map-cmdline.default")
1634 1634 if m:
1635 1635 # template found, check if it is working
1636 1636 err = None
1637 1637 try:
1638 1638 templater.templater.frommapfile(m)
1639 1639 except Exception as inst:
1640 1640 err = stringutil.forcebytestr(inst)
1641 1641 p = None
1642 1642 fm.condwrite(err, b'defaulttemplateerror', b" %s\n", err)
1643 1643 else:
1644 1644 p = None
1645 1645 fm.condwrite(
1646 1646 p, b'defaulttemplate', _(b"checking default template (%s)\n"), m
1647 1647 )
1648 1648 fm.condwrite(
1649 1649 not m,
1650 1650 b'defaulttemplatenotfound',
1651 1651 _(b" template '%s' not found\n"),
1652 1652 b"default",
1653 1653 )
1654 1654 if not p:
1655 1655 problems += 1
1656 1656 fm.condwrite(
1657 1657 not p, b'', _(b" (templates seem to have been installed incorrectly)\n")
1658 1658 )
1659 1659
1660 1660 # editor
1661 1661 editor = ui.geteditor()
1662 1662 editor = util.expandpath(editor)
1663 1663 editorbin = procutil.shellsplit(editor)[0]
1664 1664 fm.write(b'editor', _(b"checking commit editor... (%s)\n"), editorbin)
1665 1665 cmdpath = procutil.findexe(editorbin)
1666 1666 fm.condwrite(
1667 1667 not cmdpath and editor == b'vi',
1668 1668 b'vinotfound',
1669 1669 _(
1670 1670 b" No commit editor set and can't find %s in PATH\n"
1671 1671 b" (specify a commit editor in your configuration"
1672 1672 b" file)\n"
1673 1673 ),
1674 1674 not cmdpath and editor == b'vi' and editorbin,
1675 1675 )
1676 1676 fm.condwrite(
1677 1677 not cmdpath and editor != b'vi',
1678 1678 b'editornotfound',
1679 1679 _(
1680 1680 b" Can't find editor '%s' in PATH\n"
1681 1681 b" (specify a commit editor in your configuration"
1682 1682 b" file)\n"
1683 1683 ),
1684 1684 not cmdpath and editorbin,
1685 1685 )
1686 1686 if not cmdpath and editor != b'vi':
1687 1687 problems += 1
1688 1688
1689 1689 # check username
1690 1690 username = None
1691 1691 err = None
1692 1692 try:
1693 1693 username = ui.username()
1694 1694 except error.Abort as e:
1695 1695 err = stringutil.forcebytestr(e)
1696 1696 problems += 1
1697 1697
1698 1698 fm.condwrite(
1699 1699 username, b'username', _(b"checking username (%s)\n"), username
1700 1700 )
1701 1701 fm.condwrite(
1702 1702 err,
1703 1703 b'usernameerror',
1704 1704 _(
1705 1705 b"checking username...\n %s\n"
1706 1706 b" (specify a username in your configuration file)\n"
1707 1707 ),
1708 1708 err,
1709 1709 )
1710 1710
1711 1711 for name, mod in extensions.extensions():
1712 1712 handler = getattr(mod, 'debuginstall', None)
1713 1713 if handler is not None:
1714 1714 problems += handler(ui, fm)
1715 1715
1716 1716 fm.condwrite(not problems, b'', _(b"no problems detected\n"))
1717 1717 if not problems:
1718 1718 fm.data(problems=problems)
1719 1719 fm.condwrite(
1720 1720 problems,
1721 1721 b'problems',
1722 1722 _(b"%d problems detected, please check your install!\n"),
1723 1723 problems,
1724 1724 )
1725 1725 fm.end()
1726 1726
1727 1727 return problems
1728 1728
1729 1729
1730 1730 @command(b'debugknown', [], _(b'REPO ID...'), norepo=True)
1731 1731 def debugknown(ui, repopath, *ids, **opts):
1732 1732 """test whether node ids are known to a repo
1733 1733
1734 1734 Every ID must be a full-length hex node id string. Returns a list of 0s
1735 1735 and 1s indicating unknown/known.
1736 1736 """
1737 1737 opts = pycompat.byteskwargs(opts)
1738 1738 repo = hg.peer(ui, opts, repopath)
1739 1739 if not repo.capable(b'known'):
1740 1740 raise error.Abort(b"known() not supported by target repository")
1741 1741 flags = repo.known([bin(s) for s in ids])
1742 1742 ui.write(b"%s\n" % (b"".join([f and b"1" or b"0" for f in flags])))
1743 1743
1744 1744
1745 1745 @command(b'debuglabelcomplete', [], _(b'LABEL...'))
1746 1746 def debuglabelcomplete(ui, repo, *args):
1747 1747 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1748 1748 debugnamecomplete(ui, repo, *args)
1749 1749
1750 1750
1751 1751 @command(
1752 1752 b'debuglocks',
1753 1753 [
1754 1754 (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
1755 1755 (
1756 1756 b'W',
1757 1757 b'force-wlock',
1758 1758 None,
1759 1759 _(b'free the working state lock (DANGEROUS)'),
1760 1760 ),
1761 1761 (b's', b'set-lock', None, _(b'set the store lock until stopped')),
1762 1762 (
1763 1763 b'S',
1764 1764 b'set-wlock',
1765 1765 None,
1766 1766 _(b'set the working state lock until stopped'),
1767 1767 ),
1768 1768 ],
1769 1769 _(b'[OPTION]...'),
1770 1770 )
1771 1771 def debuglocks(ui, repo, **opts):
1772 1772 """show or modify state of locks
1773 1773
1774 1774 By default, this command will show which locks are held. This
1775 1775 includes the user and process holding the lock, the amount of time
1776 1776 the lock has been held, and the machine name where the process is
1777 1777 running if it's not local.
1778 1778
1779 1779 Locks protect the integrity of Mercurial's data, so should be
1780 1780 treated with care. System crashes or other interruptions may cause
1781 1781 locks to not be properly released, though Mercurial will usually
1782 1782 detect and remove such stale locks automatically.
1783 1783
1784 1784 However, detecting stale locks may not always be possible (for
1785 1785 instance, on a shared filesystem). Removing locks may also be
1786 1786 blocked by filesystem permissions.
1787 1787
1788 1788 Setting a lock will prevent other commands from changing the data.
1789 1789 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1790 1790 The set locks are removed when the command exits.
1791 1791
1792 1792 Returns 0 if no locks are held.
1793 1793
1794 1794 """
1795 1795
1796 1796 if opts.get('force_lock'):
1797 1797 repo.svfs.unlink(b'lock')
1798 1798 if opts.get('force_wlock'):
1799 1799 repo.vfs.unlink(b'wlock')
1800 1800 if opts.get('force_lock') or opts.get('force_wlock'):
1801 1801 return 0
1802 1802
1803 1803 locks = []
1804 1804 try:
1805 1805 if opts.get('set_wlock'):
1806 1806 try:
1807 1807 locks.append(repo.wlock(False))
1808 1808 except error.LockHeld:
1809 1809 raise error.Abort(_(b'wlock is already held'))
1810 1810 if opts.get('set_lock'):
1811 1811 try:
1812 1812 locks.append(repo.lock(False))
1813 1813 except error.LockHeld:
1814 1814 raise error.Abort(_(b'lock is already held'))
1815 1815 if len(locks):
1816 1816 ui.promptchoice(_(b"ready to release the lock (y)? $$ &Yes"))
1817 1817 return 0
1818 1818 finally:
1819 1819 release(*locks)
1820 1820
1821 1821 now = time.time()
1822 1822 held = 0
1823 1823
1824 1824 def report(vfs, name, method):
1825 1825 # this causes stale locks to get reaped for more accurate reporting
1826 1826 try:
1827 1827 l = method(False)
1828 1828 except error.LockHeld:
1829 1829 l = None
1830 1830
1831 1831 if l:
1832 1832 l.release()
1833 1833 else:
1834 1834 try:
1835 1835 st = vfs.lstat(name)
1836 1836 age = now - st[stat.ST_MTIME]
1837 1837 user = util.username(st.st_uid)
1838 1838 locker = vfs.readlock(name)
1839 1839 if b":" in locker:
1840 1840 host, pid = locker.split(b':')
1841 1841 if host == socket.gethostname():
1842 1842 locker = b'user %s, process %s' % (user or b'None', pid)
1843 1843 else:
1844 1844 locker = b'user %s, process %s, host %s' % (
1845 1845 user or b'None',
1846 1846 pid,
1847 1847 host,
1848 1848 )
1849 1849 ui.writenoi18n(b"%-6s %s (%ds)\n" % (name + b":", locker, age))
1850 1850 return 1
1851 1851 except OSError as e:
1852 1852 if e.errno != errno.ENOENT:
1853 1853 raise
1854 1854
1855 1855 ui.writenoi18n(b"%-6s free\n" % (name + b":"))
1856 1856 return 0
1857 1857
1858 1858 held += report(repo.svfs, b"lock", repo.lock)
1859 1859 held += report(repo.vfs, b"wlock", repo.wlock)
1860 1860
1861 1861 return held
1862 1862
1863 1863
1864 1864 @command(
1865 1865 b'debugmanifestfulltextcache',
1866 1866 [
1867 1867 (b'', b'clear', False, _(b'clear the cache')),
1868 1868 (
1869 1869 b'a',
1870 1870 b'add',
1871 1871 [],
1872 1872 _(b'add the given manifest nodes to the cache'),
1873 1873 _(b'NODE'),
1874 1874 ),
1875 1875 ],
1876 1876 b'',
1877 1877 )
1878 1878 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1879 1879 """show, clear or amend the contents of the manifest fulltext cache"""
1880 1880
1881 1881 def getcache():
1882 1882 r = repo.manifestlog.getstorage(b'')
1883 1883 try:
1884 1884 return r._fulltextcache
1885 1885 except AttributeError:
1886 1886 msg = _(
1887 1887 b"Current revlog implementation doesn't appear to have a "
1888 1888 b"manifest fulltext cache\n"
1889 1889 )
1890 1890 raise error.Abort(msg)
1891 1891
1892 1892 if opts.get('clear'):
1893 1893 with repo.wlock():
1894 1894 cache = getcache()
1895 1895 cache.clear(clear_persisted_data=True)
1896 1896 return
1897 1897
1898 1898 if add:
1899 1899 with repo.wlock():
1900 1900 m = repo.manifestlog
1901 1901 store = m.getstorage(b'')
1902 1902 for n in add:
1903 1903 try:
1904 1904 manifest = m[store.lookup(n)]
1905 1905 except error.LookupError as e:
1906 1906 raise error.Abort(e, hint=b"Check your manifest node id")
1907 1907 manifest.read() # stores revisision in cache too
1908 1908 return
1909 1909
1910 1910 cache = getcache()
1911 1911 if not len(cache):
1912 1912 ui.write(_(b'cache empty\n'))
1913 1913 else:
1914 1914 ui.write(
1915 1915 _(
1916 1916 b'cache contains %d manifest entries, in order of most to '
1917 1917 b'least recent:\n'
1918 1918 )
1919 1919 % (len(cache),)
1920 1920 )
1921 1921 totalsize = 0
1922 1922 for nodeid in cache:
1923 1923 # Use cache.get to not update the LRU order
1924 1924 data = cache.peek(nodeid)
1925 1925 size = len(data)
1926 1926 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1927 1927 ui.write(
1928 1928 _(b'id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
1929 1929 )
1930 1930 ondisk = cache._opener.stat(b'manifestfulltextcache').st_size
1931 1931 ui.write(
1932 1932 _(b'total cache data size %s, on-disk %s\n')
1933 1933 % (util.bytecount(totalsize), util.bytecount(ondisk))
1934 1934 )
1935 1935
1936 1936
1937 1937 @command(b'debugmergestate', [], b'')
1938 1938 def debugmergestate(ui, repo, *args):
1939 1939 """print merge state
1940 1940
1941 1941 Use --verbose to print out information about whether v1 or v2 merge state
1942 1942 was chosen."""
1943 1943
1944 1944 def _hashornull(h):
1945 1945 if h == nullhex:
1946 1946 return b'null'
1947 1947 else:
1948 1948 return h
1949 1949
1950 1950 def printrecords(version):
1951 1951 ui.writenoi18n(b'* version %d records\n' % version)
1952 1952 if version == 1:
1953 1953 records = v1records
1954 1954 else:
1955 1955 records = v2records
1956 1956
1957 1957 for rtype, record in records:
1958 1958 # pretty print some record types
1959 1959 if rtype == b'L':
1960 1960 ui.writenoi18n(b'local: %s\n' % record)
1961 1961 elif rtype == b'O':
1962 1962 ui.writenoi18n(b'other: %s\n' % record)
1963 1963 elif rtype == b'm':
1964 1964 driver, mdstate = record.split(b'\0', 1)
1965 1965 ui.writenoi18n(
1966 1966 b'merge driver: %s (state "%s")\n' % (driver, mdstate)
1967 1967 )
1968 1968 elif rtype in b'FDC':
1969 1969 r = record.split(b'\0')
1970 1970 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1971 1971 if version == 1:
1972 1972 onode = b'not stored in v1 format'
1973 1973 flags = r[7]
1974 1974 else:
1975 1975 onode, flags = r[7:9]
1976 1976 ui.writenoi18n(
1977 1977 b'file: %s (record type "%s", state "%s", hash %s)\n'
1978 1978 % (f, rtype, state, _hashornull(hash))
1979 1979 )
1980 1980 ui.writenoi18n(
1981 1981 b' local path: %s (flags "%s")\n' % (lfile, flags)
1982 1982 )
1983 1983 ui.writenoi18n(
1984 1984 b' ancestor path: %s (node %s)\n'
1985 1985 % (afile, _hashornull(anode))
1986 1986 )
1987 1987 ui.writenoi18n(
1988 1988 b' other path: %s (node %s)\n'
1989 1989 % (ofile, _hashornull(onode))
1990 1990 )
1991 1991 elif rtype == b'f':
1992 1992 filename, rawextras = record.split(b'\0', 1)
1993 1993 extras = rawextras.split(b'\0')
1994 1994 i = 0
1995 1995 extrastrings = []
1996 1996 while i < len(extras):
1997 1997 extrastrings.append(b'%s = %s' % (extras[i], extras[i + 1]))
1998 1998 i += 2
1999 1999
2000 2000 ui.writenoi18n(
2001 2001 b'file extras: %s (%s)\n'
2002 2002 % (filename, b', '.join(extrastrings))
2003 2003 )
2004 2004 elif rtype == b'l':
2005 2005 labels = record.split(b'\0', 2)
2006 2006 labels = [l for l in labels if len(l) > 0]
2007 2007 ui.writenoi18n(b'labels:\n')
2008 2008 ui.write((b' local: %s\n' % labels[0]))
2009 2009 ui.write((b' other: %s\n' % labels[1]))
2010 2010 if len(labels) > 2:
2011 2011 ui.write((b' base: %s\n' % labels[2]))
2012 2012 else:
2013 2013 ui.writenoi18n(
2014 2014 b'unrecognized entry: %s\t%s\n'
2015 2015 % (rtype, record.replace(b'\0', b'\t'))
2016 2016 )
2017 2017
2018 2018 # Avoid mergestate.read() since it may raise an exception for unsupported
2019 2019 # merge state records. We shouldn't be doing this, but this is OK since this
2020 2020 # command is pretty low-level.
2021 2021 ms = mergemod.mergestate(repo)
2022 2022
2023 2023 # sort so that reasonable information is on top
2024 2024 v1records = ms._readrecordsv1()
2025 2025 v2records = ms._readrecordsv2()
2026 2026 order = b'LOml'
2027 2027
2028 2028 def key(r):
2029 2029 idx = order.find(r[0])
2030 2030 if idx == -1:
2031 2031 return (1, r[1])
2032 2032 else:
2033 2033 return (0, idx)
2034 2034
2035 2035 v1records.sort(key=key)
2036 2036 v2records.sort(key=key)
2037 2037
2038 2038 if not v1records and not v2records:
2039 2039 ui.writenoi18n(b'no merge state found\n')
2040 2040 elif not v2records:
2041 2041 ui.notenoi18n(b'no version 2 merge state\n')
2042 2042 printrecords(1)
2043 2043 elif ms._v1v2match(v1records, v2records):
2044 2044 ui.notenoi18n(b'v1 and v2 states match: using v2\n')
2045 2045 printrecords(2)
2046 2046 else:
2047 2047 ui.notenoi18n(b'v1 and v2 states mismatch: using v1\n')
2048 2048 printrecords(1)
2049 2049 if ui.verbose:
2050 2050 printrecords(2)
2051 2051
2052 2052
2053 2053 @command(b'debugnamecomplete', [], _(b'NAME...'))
2054 2054 def debugnamecomplete(ui, repo, *args):
2055 2055 '''complete "names" - tags, open branch names, bookmark names'''
2056 2056
2057 2057 names = set()
2058 2058 # since we previously only listed open branches, we will handle that
2059 2059 # specially (after this for loop)
2060 2060 for name, ns in pycompat.iteritems(repo.names):
2061 2061 if name != b'branches':
2062 2062 names.update(ns.listnames(repo))
2063 2063 names.update(
2064 2064 tag
2065 2065 for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
2066 2066 if not closed
2067 2067 )
2068 2068 completions = set()
2069 2069 if not args:
2070 2070 args = [b'']
2071 2071 for a in args:
2072 2072 completions.update(n for n in names if n.startswith(a))
2073 2073 ui.write(b'\n'.join(sorted(completions)))
2074 2074 ui.write(b'\n')
2075 2075
2076 2076
2077 2077 @command(
2078 2078 b'debugobsolete',
2079 2079 [
2080 2080 (b'', b'flags', 0, _(b'markers flag')),
2081 2081 (
2082 2082 b'',
2083 2083 b'record-parents',
2084 2084 False,
2085 2085 _(b'record parent information for the precursor'),
2086 2086 ),
2087 2087 (b'r', b'rev', [], _(b'display markers relevant to REV')),
2088 2088 (
2089 2089 b'',
2090 2090 b'exclusive',
2091 2091 False,
2092 2092 _(b'restrict display to markers only relevant to REV'),
2093 2093 ),
2094 2094 (b'', b'index', False, _(b'display index of the marker')),
2095 2095 (b'', b'delete', [], _(b'delete markers specified by indices')),
2096 2096 ]
2097 2097 + cmdutil.commitopts2
2098 2098 + cmdutil.formatteropts,
2099 2099 _(b'[OBSOLETED [REPLACEMENT ...]]'),
2100 2100 )
2101 2101 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2102 2102 """create arbitrary obsolete marker
2103 2103
2104 2104 With no arguments, displays the list of obsolescence markers."""
2105 2105
2106 2106 opts = pycompat.byteskwargs(opts)
2107 2107
2108 2108 def parsenodeid(s):
2109 2109 try:
2110 2110 # We do not use revsingle/revrange functions here to accept
2111 2111 # arbitrary node identifiers, possibly not present in the
2112 2112 # local repository.
2113 2113 n = bin(s)
2114 2114 if len(n) != len(nullid):
2115 2115 raise TypeError()
2116 2116 return n
2117 2117 except TypeError:
2118 2118 raise error.Abort(
2119 2119 b'changeset references must be full hexadecimal '
2120 2120 b'node identifiers'
2121 2121 )
2122 2122
2123 2123 if opts.get(b'delete'):
2124 2124 indices = []
2125 2125 for v in opts.get(b'delete'):
2126 2126 try:
2127 2127 indices.append(int(v))
2128 2128 except ValueError:
2129 2129 raise error.Abort(
2130 2130 _(b'invalid index value: %r') % v,
2131 2131 hint=_(b'use integers for indices'),
2132 2132 )
2133 2133
2134 2134 if repo.currenttransaction():
2135 2135 raise error.Abort(
2136 2136 _(b'cannot delete obsmarkers in the middle of transaction.')
2137 2137 )
2138 2138
2139 2139 with repo.lock():
2140 2140 n = repair.deleteobsmarkers(repo.obsstore, indices)
2141 2141 ui.write(_(b'deleted %i obsolescence markers\n') % n)
2142 2142
2143 2143 return
2144 2144
2145 2145 if precursor is not None:
2146 2146 if opts[b'rev']:
2147 2147 raise error.Abort(b'cannot select revision when creating marker')
2148 2148 metadata = {}
2149 2149 metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
2150 2150 succs = tuple(parsenodeid(succ) for succ in successors)
2151 2151 l = repo.lock()
2152 2152 try:
2153 2153 tr = repo.transaction(b'debugobsolete')
2154 2154 try:
2155 2155 date = opts.get(b'date')
2156 2156 if date:
2157 2157 date = dateutil.parsedate(date)
2158 2158 else:
2159 2159 date = None
2160 2160 prec = parsenodeid(precursor)
2161 2161 parents = None
2162 2162 if opts[b'record_parents']:
2163 2163 if prec not in repo.unfiltered():
2164 2164 raise error.Abort(
2165 2165 b'cannot used --record-parents on '
2166 2166 b'unknown changesets'
2167 2167 )
2168 2168 parents = repo.unfiltered()[prec].parents()
2169 2169 parents = tuple(p.node() for p in parents)
2170 2170 repo.obsstore.create(
2171 2171 tr,
2172 2172 prec,
2173 2173 succs,
2174 2174 opts[b'flags'],
2175 2175 parents=parents,
2176 2176 date=date,
2177 2177 metadata=metadata,
2178 2178 ui=ui,
2179 2179 )
2180 2180 tr.close()
2181 2181 except ValueError as exc:
2182 2182 raise error.Abort(
2183 2183 _(b'bad obsmarker input: %s') % pycompat.bytestr(exc)
2184 2184 )
2185 2185 finally:
2186 2186 tr.release()
2187 2187 finally:
2188 2188 l.release()
2189 2189 else:
2190 2190 if opts[b'rev']:
2191 2191 revs = scmutil.revrange(repo, opts[b'rev'])
2192 2192 nodes = [repo[r].node() for r in revs]
2193 2193 markers = list(
2194 2194 obsutil.getmarkers(
2195 2195 repo, nodes=nodes, exclusive=opts[b'exclusive']
2196 2196 )
2197 2197 )
2198 2198 markers.sort(key=lambda x: x._data)
2199 2199 else:
2200 2200 markers = obsutil.getmarkers(repo)
2201 2201
2202 2202 markerstoiter = markers
2203 2203 isrelevant = lambda m: True
2204 2204 if opts.get(b'rev') and opts.get(b'index'):
2205 2205 markerstoiter = obsutil.getmarkers(repo)
2206 2206 markerset = set(markers)
2207 2207 isrelevant = lambda m: m in markerset
2208 2208
2209 2209 fm = ui.formatter(b'debugobsolete', opts)
2210 2210 for i, m in enumerate(markerstoiter):
2211 2211 if not isrelevant(m):
2212 2212 # marker can be irrelevant when we're iterating over a set
2213 2213 # of markers (markerstoiter) which is bigger than the set
2214 2214 # of markers we want to display (markers)
2215 2215 # this can happen if both --index and --rev options are
2216 2216 # provided and thus we need to iterate over all of the markers
2217 2217 # to get the correct indices, but only display the ones that
2218 2218 # are relevant to --rev value
2219 2219 continue
2220 2220 fm.startitem()
2221 2221 ind = i if opts.get(b'index') else None
2222 2222 cmdutil.showmarker(fm, m, index=ind)
2223 2223 fm.end()
2224 2224
2225 2225
2226 2226 @command(
2227 2227 b'debugp1copies',
2228 2228 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2229 2229 _(b'[-r REV]'),
2230 2230 )
2231 2231 def debugp1copies(ui, repo, **opts):
2232 2232 """dump copy information compared to p1"""
2233 2233
2234 2234 opts = pycompat.byteskwargs(opts)
2235 2235 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2236 2236 for dst, src in ctx.p1copies().items():
2237 2237 ui.write(b'%s -> %s\n' % (src, dst))
2238 2238
2239 2239
2240 2240 @command(
2241 2241 b'debugp2copies',
2242 2242 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2243 2243 _(b'[-r REV]'),
2244 2244 )
2245 2245 def debugp1copies(ui, repo, **opts):
2246 2246 """dump copy information compared to p2"""
2247 2247
2248 2248 opts = pycompat.byteskwargs(opts)
2249 2249 ctx = scmutil.revsingle(repo, opts.get(b'rev'), default=None)
2250 2250 for dst, src in ctx.p2copies().items():
2251 2251 ui.write(b'%s -> %s\n' % (src, dst))
2252 2252
2253 2253
2254 2254 @command(
2255 2255 b'debugpathcomplete',
2256 2256 [
2257 2257 (b'f', b'full', None, _(b'complete an entire path')),
2258 2258 (b'n', b'normal', None, _(b'show only normal files')),
2259 2259 (b'a', b'added', None, _(b'show only added files')),
2260 2260 (b'r', b'removed', None, _(b'show only removed files')),
2261 2261 ],
2262 2262 _(b'FILESPEC...'),
2263 2263 )
2264 2264 def debugpathcomplete(ui, repo, *specs, **opts):
2265 2265 '''complete part or all of a tracked path
2266 2266
2267 2267 This command supports shells that offer path name completion. It
2268 2268 currently completes only files already known to the dirstate.
2269 2269
2270 2270 Completion extends only to the next path segment unless
2271 2271 --full is specified, in which case entire paths are used.'''
2272 2272
2273 2273 def complete(path, acceptable):
2274 2274 dirstate = repo.dirstate
2275 2275 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
2276 2276 rootdir = repo.root + pycompat.ossep
2277 2277 if spec != repo.root and not spec.startswith(rootdir):
2278 2278 return [], []
2279 2279 if os.path.isdir(spec):
2280 2280 spec += b'/'
2281 2281 spec = spec[len(rootdir) :]
2282 2282 fixpaths = pycompat.ossep != b'/'
2283 2283 if fixpaths:
2284 2284 spec = spec.replace(pycompat.ossep, b'/')
2285 2285 speclen = len(spec)
2286 2286 fullpaths = opts['full']
2287 2287 files, dirs = set(), set()
2288 2288 adddir, addfile = dirs.add, files.add
2289 2289 for f, st in pycompat.iteritems(dirstate):
2290 2290 if f.startswith(spec) and st[0] in acceptable:
2291 2291 if fixpaths:
2292 2292 f = f.replace(b'/', pycompat.ossep)
2293 2293 if fullpaths:
2294 2294 addfile(f)
2295 2295 continue
2296 2296 s = f.find(pycompat.ossep, speclen)
2297 2297 if s >= 0:
2298 2298 adddir(f[:s])
2299 2299 else:
2300 2300 addfile(f)
2301 2301 return files, dirs
2302 2302
2303 2303 acceptable = b''
2304 2304 if opts['normal']:
2305 2305 acceptable += b'nm'
2306 2306 if opts['added']:
2307 2307 acceptable += b'a'
2308 2308 if opts['removed']:
2309 2309 acceptable += b'r'
2310 2310 cwd = repo.getcwd()
2311 2311 if not specs:
2312 2312 specs = [b'.']
2313 2313
2314 2314 files, dirs = set(), set()
2315 2315 for spec in specs:
2316 2316 f, d = complete(spec, acceptable or b'nmar')
2317 2317 files.update(f)
2318 2318 dirs.update(d)
2319 2319 files.update(dirs)
2320 2320 ui.write(b'\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2321 2321 ui.write(b'\n')
2322 2322
2323 2323
2324 2324 @command(
2325 2325 b'debugpathcopies',
2326 2326 cmdutil.walkopts,
2327 2327 b'hg debugpathcopies REV1 REV2 [FILE]',
2328 2328 inferrepo=True,
2329 2329 )
2330 2330 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
2331 2331 """show copies between two revisions"""
2332 2332 ctx1 = scmutil.revsingle(repo, rev1)
2333 2333 ctx2 = scmutil.revsingle(repo, rev2)
2334 2334 m = scmutil.match(ctx1, pats, opts)
2335 2335 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
2336 2336 ui.write(b'%s -> %s\n' % (src, dst))
2337 2337
2338 2338
2339 2339 @command(b'debugpeer', [], _(b'PATH'), norepo=True)
2340 2340 def debugpeer(ui, path):
2341 2341 """establish a connection to a peer repository"""
2342 2342 # Always enable peer request logging. Requires --debug to display
2343 2343 # though.
2344 2344 overrides = {
2345 2345 (b'devel', b'debug.peer-request'): True,
2346 2346 }
2347 2347
2348 2348 with ui.configoverride(overrides):
2349 2349 peer = hg.peer(ui, {}, path)
2350 2350
2351 2351 local = peer.local() is not None
2352 2352 canpush = peer.canpush()
2353 2353
2354 2354 ui.write(_(b'url: %s\n') % peer.url())
2355 2355 ui.write(_(b'local: %s\n') % (_(b'yes') if local else _(b'no')))
2356 2356 ui.write(_(b'pushable: %s\n') % (_(b'yes') if canpush else _(b'no')))
2357 2357
2358 2358
2359 2359 @command(
2360 2360 b'debugpickmergetool',
2361 2361 [
2362 2362 (b'r', b'rev', b'', _(b'check for files in this revision'), _(b'REV')),
2363 2363 (b'', b'changedelete', None, _(b'emulate merging change and delete')),
2364 2364 ]
2365 2365 + cmdutil.walkopts
2366 2366 + cmdutil.mergetoolopts,
2367 2367 _(b'[PATTERN]...'),
2368 2368 inferrepo=True,
2369 2369 )
2370 2370 def debugpickmergetool(ui, repo, *pats, **opts):
2371 2371 """examine which merge tool is chosen for specified file
2372 2372
2373 2373 As described in :hg:`help merge-tools`, Mercurial examines
2374 2374 configurations below in this order to decide which merge tool is
2375 2375 chosen for specified file.
2376 2376
2377 2377 1. ``--tool`` option
2378 2378 2. ``HGMERGE`` environment variable
2379 2379 3. configurations in ``merge-patterns`` section
2380 2380 4. configuration of ``ui.merge``
2381 2381 5. configurations in ``merge-tools`` section
2382 2382 6. ``hgmerge`` tool (for historical reason only)
2383 2383 7. default tool for fallback (``:merge`` or ``:prompt``)
2384 2384
2385 2385 This command writes out examination result in the style below::
2386 2386
2387 2387 FILE = MERGETOOL
2388 2388
2389 2389 By default, all files known in the first parent context of the
2390 2390 working directory are examined. Use file patterns and/or -I/-X
2391 2391 options to limit target files. -r/--rev is also useful to examine
2392 2392 files in another context without actual updating to it.
2393 2393
2394 2394 With --debug, this command shows warning messages while matching
2395 2395 against ``merge-patterns`` and so on, too. It is recommended to
2396 2396 use this option with explicit file patterns and/or -I/-X options,
2397 2397 because this option increases amount of output per file according
2398 2398 to configurations in hgrc.
2399 2399
2400 2400 With -v/--verbose, this command shows configurations below at
2401 2401 first (only if specified).
2402 2402
2403 2403 - ``--tool`` option
2404 2404 - ``HGMERGE`` environment variable
2405 2405 - configuration of ``ui.merge``
2406 2406
2407 2407 If merge tool is chosen before matching against
2408 2408 ``merge-patterns``, this command can't show any helpful
2409 2409 information, even with --debug. In such case, information above is
2410 2410 useful to know why a merge tool is chosen.
2411 2411 """
2412 2412 opts = pycompat.byteskwargs(opts)
2413 2413 overrides = {}
2414 2414 if opts[b'tool']:
2415 2415 overrides[(b'ui', b'forcemerge')] = opts[b'tool']
2416 2416 ui.notenoi18n(b'with --tool %r\n' % (pycompat.bytestr(opts[b'tool'])))
2417 2417
2418 2418 with ui.configoverride(overrides, b'debugmergepatterns'):
2419 2419 hgmerge = encoding.environ.get(b"HGMERGE")
2420 2420 if hgmerge is not None:
2421 2421 ui.notenoi18n(b'with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
2422 2422 uimerge = ui.config(b"ui", b"merge")
2423 2423 if uimerge:
2424 2424 ui.notenoi18n(b'with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
2425 2425
2426 2426 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2427 2427 m = scmutil.match(ctx, pats, opts)
2428 2428 changedelete = opts[b'changedelete']
2429 2429 for path in ctx.walk(m):
2430 2430 fctx = ctx[path]
2431 2431 try:
2432 2432 if not ui.debugflag:
2433 2433 ui.pushbuffer(error=True)
2434 2434 tool, toolpath = filemerge._picktool(
2435 2435 repo,
2436 2436 ui,
2437 2437 path,
2438 2438 fctx.isbinary(),
2439 2439 b'l' in fctx.flags(),
2440 2440 changedelete,
2441 2441 )
2442 2442 finally:
2443 2443 if not ui.debugflag:
2444 2444 ui.popbuffer()
2445 2445 ui.write(b'%s = %s\n' % (path, tool))
2446 2446
2447 2447
2448 2448 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2449 2449 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2450 2450 '''access the pushkey key/value protocol
2451 2451
2452 2452 With two args, list the keys in the given namespace.
2453 2453
2454 2454 With five args, set a key to new if it currently is set to old.
2455 2455 Reports success or failure.
2456 2456 '''
2457 2457
2458 2458 target = hg.peer(ui, {}, repopath)
2459 2459 if keyinfo:
2460 2460 key, old, new = keyinfo
2461 2461 with target.commandexecutor() as e:
2462 2462 r = e.callcommand(
2463 2463 b'pushkey',
2464 2464 {
2465 2465 b'namespace': namespace,
2466 2466 b'key': key,
2467 2467 b'old': old,
2468 2468 b'new': new,
2469 2469 },
2470 2470 ).result()
2471 2471
2472 2472 ui.status(pycompat.bytestr(r) + b'\n')
2473 2473 return not r
2474 2474 else:
2475 2475 for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
2476 2476 ui.write(
2477 2477 b"%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
2478 2478 )
2479 2479
2480 2480
2481 2481 @command(b'debugpvec', [], _(b'A B'))
2482 2482 def debugpvec(ui, repo, a, b=None):
2483 2483 ca = scmutil.revsingle(repo, a)
2484 2484 cb = scmutil.revsingle(repo, b)
2485 2485 pa = pvec.ctxpvec(ca)
2486 2486 pb = pvec.ctxpvec(cb)
2487 2487 if pa == pb:
2488 2488 rel = b"="
2489 2489 elif pa > pb:
2490 2490 rel = b">"
2491 2491 elif pa < pb:
2492 2492 rel = b"<"
2493 2493 elif pa | pb:
2494 2494 rel = b"|"
2495 2495 ui.write(_(b"a: %s\n") % pa)
2496 2496 ui.write(_(b"b: %s\n") % pb)
2497 2497 ui.write(_(b"depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2498 2498 ui.write(
2499 2499 _(b"delta: %d hdist: %d distance: %d relation: %s\n")
2500 2500 % (
2501 2501 abs(pa._depth - pb._depth),
2502 2502 pvec._hamming(pa._vec, pb._vec),
2503 2503 pa.distance(pb),
2504 2504 rel,
2505 2505 )
2506 2506 )
2507 2507
2508 2508
2509 2509 @command(
2510 2510 b'debugrebuilddirstate|debugrebuildstate',
2511 2511 [
2512 2512 (b'r', b'rev', b'', _(b'revision to rebuild to'), _(b'REV')),
2513 2513 (
2514 2514 b'',
2515 2515 b'minimal',
2516 2516 None,
2517 2517 _(
2518 2518 b'only rebuild files that are inconsistent with '
2519 2519 b'the working copy parent'
2520 2520 ),
2521 2521 ),
2522 2522 ],
2523 2523 _(b'[-r REV]'),
2524 2524 )
2525 2525 def debugrebuilddirstate(ui, repo, rev, **opts):
2526 2526 """rebuild the dirstate as it would look like for the given revision
2527 2527
2528 2528 If no revision is specified the first current parent will be used.
2529 2529
2530 2530 The dirstate will be set to the files of the given revision.
2531 2531 The actual working directory content or existing dirstate
2532 2532 information such as adds or removes is not considered.
2533 2533
2534 2534 ``minimal`` will only rebuild the dirstate status for files that claim to be
2535 2535 tracked but are not in the parent manifest, or that exist in the parent
2536 2536 manifest but are not in the dirstate. It will not change adds, removes, or
2537 2537 modified files that are in the working copy parent.
2538 2538
2539 2539 One use of this command is to make the next :hg:`status` invocation
2540 2540 check the actual file content.
2541 2541 """
2542 2542 ctx = scmutil.revsingle(repo, rev)
2543 2543 with repo.wlock():
2544 2544 dirstate = repo.dirstate
2545 2545 changedfiles = None
2546 2546 # See command doc for what minimal does.
2547 2547 if opts.get('minimal'):
2548 2548 manifestfiles = set(ctx.manifest().keys())
2549 2549 dirstatefiles = set(dirstate)
2550 2550 manifestonly = manifestfiles - dirstatefiles
2551 2551 dsonly = dirstatefiles - manifestfiles
2552 2552 dsnotadded = set(f for f in dsonly if dirstate[f] != b'a')
2553 2553 changedfiles = manifestonly | dsnotadded
2554 2554
2555 2555 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2556 2556
2557 2557
2558 2558 @command(b'debugrebuildfncache', [], b'')
2559 2559 def debugrebuildfncache(ui, repo):
2560 2560 """rebuild the fncache file"""
2561 2561 repair.rebuildfncache(ui, repo)
2562 2562
2563 2563
2564 2564 @command(
2565 2565 b'debugrename',
2566 2566 [(b'r', b'rev', b'', _(b'revision to debug'), _(b'REV'))],
2567 2567 _(b'[-r REV] [FILE]...'),
2568 2568 )
2569 2569 def debugrename(ui, repo, *pats, **opts):
2570 2570 """dump rename information"""
2571 2571
2572 2572 opts = pycompat.byteskwargs(opts)
2573 2573 ctx = scmutil.revsingle(repo, opts.get(b'rev'))
2574 2574 m = scmutil.match(ctx, pats, opts)
2575 2575 for abs in ctx.walk(m):
2576 2576 fctx = ctx[abs]
2577 2577 o = fctx.filelog().renamed(fctx.filenode())
2578 2578 rel = repo.pathto(abs)
2579 2579 if o:
2580 2580 ui.write(_(b"%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2581 2581 else:
2582 2582 ui.write(_(b"%s not renamed\n") % rel)
2583 2583
2584 2584
2585 2585 @command(
2586 2586 b'debugrevlog',
2587 2587 cmdutil.debugrevlogopts + [(b'd', b'dump', False, _(b'dump index data'))],
2588 2588 _(b'-c|-m|FILE'),
2589 2589 optionalrepo=True,
2590 2590 )
2591 2591 def debugrevlog(ui, repo, file_=None, **opts):
2592 2592 """show data and statistics about a revlog"""
2593 2593 opts = pycompat.byteskwargs(opts)
2594 2594 r = cmdutil.openrevlog(repo, b'debugrevlog', file_, opts)
2595 2595
2596 2596 if opts.get(b"dump"):
2597 2597 numrevs = len(r)
2598 2598 ui.write(
2599 2599 (
2600 2600 b"# rev p1rev p2rev start end deltastart base p1 p2"
2601 2601 b" rawsize totalsize compression heads chainlen\n"
2602 2602 )
2603 2603 )
2604 2604 ts = 0
2605 2605 heads = set()
2606 2606
2607 2607 for rev in pycompat.xrange(numrevs):
2608 2608 dbase = r.deltaparent(rev)
2609 2609 if dbase == -1:
2610 2610 dbase = rev
2611 2611 cbase = r.chainbase(rev)
2612 2612 clen = r.chainlen(rev)
2613 2613 p1, p2 = r.parentrevs(rev)
2614 2614 rs = r.rawsize(rev)
2615 2615 ts = ts + rs
2616 2616 heads -= set(r.parentrevs(rev))
2617 2617 heads.add(rev)
2618 2618 try:
2619 2619 compression = ts / r.end(rev)
2620 2620 except ZeroDivisionError:
2621 2621 compression = 0
2622 2622 ui.write(
2623 2623 b"%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2624 2624 b"%11d %5d %8d\n"
2625 2625 % (
2626 2626 rev,
2627 2627 p1,
2628 2628 p2,
2629 2629 r.start(rev),
2630 2630 r.end(rev),
2631 2631 r.start(dbase),
2632 2632 r.start(cbase),
2633 2633 r.start(p1),
2634 2634 r.start(p2),
2635 2635 rs,
2636 2636 ts,
2637 2637 compression,
2638 2638 len(heads),
2639 2639 clen,
2640 2640 )
2641 2641 )
2642 2642 return 0
2643 2643
2644 2644 v = r.version
2645 2645 format = v & 0xFFFF
2646 2646 flags = []
2647 2647 gdelta = False
2648 2648 if v & revlog.FLAG_INLINE_DATA:
2649 2649 flags.append(b'inline')
2650 2650 if v & revlog.FLAG_GENERALDELTA:
2651 2651 gdelta = True
2652 2652 flags.append(b'generaldelta')
2653 2653 if not flags:
2654 2654 flags = [b'(none)']
2655 2655
2656 2656 ### tracks merge vs single parent
2657 2657 nummerges = 0
2658 2658
2659 2659 ### tracks ways the "delta" are build
2660 2660 # nodelta
2661 2661 numempty = 0
2662 2662 numemptytext = 0
2663 2663 numemptydelta = 0
2664 2664 # full file content
2665 2665 numfull = 0
2666 2666 # intermediate snapshot against a prior snapshot
2667 2667 numsemi = 0
2668 2668 # snapshot count per depth
2669 2669 numsnapdepth = collections.defaultdict(lambda: 0)
2670 2670 # delta against previous revision
2671 2671 numprev = 0
2672 2672 # delta against first or second parent (not prev)
2673 2673 nump1 = 0
2674 2674 nump2 = 0
2675 2675 # delta against neither prev nor parents
2676 2676 numother = 0
2677 2677 # delta against prev that are also first or second parent
2678 2678 # (details of `numprev`)
2679 2679 nump1prev = 0
2680 2680 nump2prev = 0
2681 2681
2682 2682 # data about delta chain of each revs
2683 2683 chainlengths = []
2684 2684 chainbases = []
2685 2685 chainspans = []
2686 2686
2687 2687 # data about each revision
2688 2688 datasize = [None, 0, 0]
2689 2689 fullsize = [None, 0, 0]
2690 2690 semisize = [None, 0, 0]
2691 2691 # snapshot count per depth
2692 2692 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2693 2693 deltasize = [None, 0, 0]
2694 2694 chunktypecounts = {}
2695 2695 chunktypesizes = {}
2696 2696
2697 2697 def addsize(size, l):
2698 2698 if l[0] is None or size < l[0]:
2699 2699 l[0] = size
2700 2700 if size > l[1]:
2701 2701 l[1] = size
2702 2702 l[2] += size
2703 2703
2704 2704 numrevs = len(r)
2705 2705 for rev in pycompat.xrange(numrevs):
2706 2706 p1, p2 = r.parentrevs(rev)
2707 2707 delta = r.deltaparent(rev)
2708 2708 if format > 0:
2709 2709 addsize(r.rawsize(rev), datasize)
2710 2710 if p2 != nullrev:
2711 2711 nummerges += 1
2712 2712 size = r.length(rev)
2713 2713 if delta == nullrev:
2714 2714 chainlengths.append(0)
2715 2715 chainbases.append(r.start(rev))
2716 2716 chainspans.append(size)
2717 2717 if size == 0:
2718 2718 numempty += 1
2719 2719 numemptytext += 1
2720 2720 else:
2721 2721 numfull += 1
2722 2722 numsnapdepth[0] += 1
2723 2723 addsize(size, fullsize)
2724 2724 addsize(size, snapsizedepth[0])
2725 2725 else:
2726 2726 chainlengths.append(chainlengths[delta] + 1)
2727 2727 baseaddr = chainbases[delta]
2728 2728 revaddr = r.start(rev)
2729 2729 chainbases.append(baseaddr)
2730 2730 chainspans.append((revaddr - baseaddr) + size)
2731 2731 if size == 0:
2732 2732 numempty += 1
2733 2733 numemptydelta += 1
2734 2734 elif r.issnapshot(rev):
2735 2735 addsize(size, semisize)
2736 2736 numsemi += 1
2737 2737 depth = r.snapshotdepth(rev)
2738 2738 numsnapdepth[depth] += 1
2739 2739 addsize(size, snapsizedepth[depth])
2740 2740 else:
2741 2741 addsize(size, deltasize)
2742 2742 if delta == rev - 1:
2743 2743 numprev += 1
2744 2744 if delta == p1:
2745 2745 nump1prev += 1
2746 2746 elif delta == p2:
2747 2747 nump2prev += 1
2748 2748 elif delta == p1:
2749 2749 nump1 += 1
2750 2750 elif delta == p2:
2751 2751 nump2 += 1
2752 2752 elif delta != nullrev:
2753 2753 numother += 1
2754 2754
2755 2755 # Obtain data on the raw chunks in the revlog.
2756 2756 if util.safehasattr(r, b'_getsegmentforrevs'):
2757 2757 segment = r._getsegmentforrevs(rev, rev)[1]
2758 2758 else:
2759 2759 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2760 2760 if segment:
2761 2761 chunktype = bytes(segment[0:1])
2762 2762 else:
2763 2763 chunktype = b'empty'
2764 2764
2765 2765 if chunktype not in chunktypecounts:
2766 2766 chunktypecounts[chunktype] = 0
2767 2767 chunktypesizes[chunktype] = 0
2768 2768
2769 2769 chunktypecounts[chunktype] += 1
2770 2770 chunktypesizes[chunktype] += size
2771 2771
2772 2772 # Adjust size min value for empty cases
2773 2773 for size in (datasize, fullsize, semisize, deltasize):
2774 2774 if size[0] is None:
2775 2775 size[0] = 0
2776 2776
2777 2777 numdeltas = numrevs - numfull - numempty - numsemi
2778 2778 numoprev = numprev - nump1prev - nump2prev
2779 2779 totalrawsize = datasize[2]
2780 2780 datasize[2] /= numrevs
2781 2781 fulltotal = fullsize[2]
2782 2782 if numfull == 0:
2783 2783 fullsize[2] = 0
2784 2784 else:
2785 2785 fullsize[2] /= numfull
2786 2786 semitotal = semisize[2]
2787 2787 snaptotal = {}
2788 2788 if numsemi > 0:
2789 2789 semisize[2] /= numsemi
2790 2790 for depth in snapsizedepth:
2791 2791 snaptotal[depth] = snapsizedepth[depth][2]
2792 2792 snapsizedepth[depth][2] /= numsnapdepth[depth]
2793 2793
2794 2794 deltatotal = deltasize[2]
2795 2795 if numdeltas > 0:
2796 2796 deltasize[2] /= numdeltas
2797 2797 totalsize = fulltotal + semitotal + deltatotal
2798 2798 avgchainlen = sum(chainlengths) / numrevs
2799 2799 maxchainlen = max(chainlengths)
2800 2800 maxchainspan = max(chainspans)
2801 2801 compratio = 1
2802 2802 if totalsize:
2803 2803 compratio = totalrawsize / totalsize
2804 2804
2805 2805 basedfmtstr = b'%%%dd\n'
2806 2806 basepcfmtstr = b'%%%dd %s(%%5.2f%%%%)\n'
2807 2807
2808 2808 def dfmtstr(max):
2809 2809 return basedfmtstr % len(str(max))
2810 2810
2811 2811 def pcfmtstr(max, padding=0):
2812 2812 return basepcfmtstr % (len(str(max)), b' ' * padding)
2813 2813
2814 2814 def pcfmt(value, total):
2815 2815 if total:
2816 2816 return (value, 100 * float(value) / total)
2817 2817 else:
2818 2818 return value, 100.0
2819 2819
2820 2820 ui.writenoi18n(b'format : %d\n' % format)
2821 2821 ui.writenoi18n(b'flags : %s\n' % b', '.join(flags))
2822 2822
2823 2823 ui.write(b'\n')
2824 2824 fmt = pcfmtstr(totalsize)
2825 2825 fmt2 = dfmtstr(totalsize)
2826 2826 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2827 2827 ui.writenoi18n(b' merges : ' + fmt % pcfmt(nummerges, numrevs))
2828 2828 ui.writenoi18n(
2829 2829 b' normal : ' + fmt % pcfmt(numrevs - nummerges, numrevs)
2830 2830 )
2831 2831 ui.writenoi18n(b'revisions : ' + fmt2 % numrevs)
2832 2832 ui.writenoi18n(b' empty : ' + fmt % pcfmt(numempty, numrevs))
2833 2833 ui.writenoi18n(
2834 2834 b' text : '
2835 2835 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
2836 2836 )
2837 2837 ui.writenoi18n(
2838 2838 b' delta : '
2839 2839 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
2840 2840 )
2841 2841 ui.writenoi18n(
2842 2842 b' snapshot : ' + fmt % pcfmt(numfull + numsemi, numrevs)
2843 2843 )
2844 2844 for depth in sorted(numsnapdepth):
2845 2845 ui.write(
2846 2846 (b' lvl-%-3d : ' % depth)
2847 2847 + fmt % pcfmt(numsnapdepth[depth], numrevs)
2848 2848 )
2849 2849 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(numdeltas, numrevs))
2850 2850 ui.writenoi18n(b'revision size : ' + fmt2 % totalsize)
2851 2851 ui.writenoi18n(
2852 2852 b' snapshot : ' + fmt % pcfmt(fulltotal + semitotal, totalsize)
2853 2853 )
2854 2854 for depth in sorted(numsnapdepth):
2855 2855 ui.write(
2856 2856 (b' lvl-%-3d : ' % depth)
2857 2857 + fmt % pcfmt(snaptotal[depth], totalsize)
2858 2858 )
2859 2859 ui.writenoi18n(b' deltas : ' + fmt % pcfmt(deltatotal, totalsize))
2860 2860
2861 2861 def fmtchunktype(chunktype):
2862 2862 if chunktype == b'empty':
2863 2863 return b' %s : ' % chunktype
2864 2864 elif chunktype in pycompat.bytestr(string.ascii_letters):
2865 2865 return b' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2866 2866 else:
2867 2867 return b' 0x%s : ' % hex(chunktype)
2868 2868
2869 2869 ui.write(b'\n')
2870 2870 ui.writenoi18n(b'chunks : ' + fmt2 % numrevs)
2871 2871 for chunktype in sorted(chunktypecounts):
2872 2872 ui.write(fmtchunktype(chunktype))
2873 2873 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2874 2874 ui.writenoi18n(b'chunks size : ' + fmt2 % totalsize)
2875 2875 for chunktype in sorted(chunktypecounts):
2876 2876 ui.write(fmtchunktype(chunktype))
2877 2877 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2878 2878
2879 2879 ui.write(b'\n')
2880 2880 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2881 2881 ui.writenoi18n(b'avg chain length : ' + fmt % avgchainlen)
2882 2882 ui.writenoi18n(b'max chain length : ' + fmt % maxchainlen)
2883 2883 ui.writenoi18n(b'max chain reach : ' + fmt % maxchainspan)
2884 2884 ui.writenoi18n(b'compression ratio : ' + fmt % compratio)
2885 2885
2886 2886 if format > 0:
2887 2887 ui.write(b'\n')
2888 2888 ui.writenoi18n(
2889 2889 b'uncompressed data size (min/max/avg) : %d / %d / %d\n'
2890 2890 % tuple(datasize)
2891 2891 )
2892 2892 ui.writenoi18n(
2893 2893 b'full revision size (min/max/avg) : %d / %d / %d\n'
2894 2894 % tuple(fullsize)
2895 2895 )
2896 2896 ui.writenoi18n(
2897 2897 b'inter-snapshot size (min/max/avg) : %d / %d / %d\n'
2898 2898 % tuple(semisize)
2899 2899 )
2900 2900 for depth in sorted(snapsizedepth):
2901 2901 if depth == 0:
2902 2902 continue
2903 2903 ui.writenoi18n(
2904 2904 b' level-%-3d (min/max/avg) : %d / %d / %d\n'
2905 2905 % ((depth,) + tuple(snapsizedepth[depth]))
2906 2906 )
2907 2907 ui.writenoi18n(
2908 2908 b'delta size (min/max/avg) : %d / %d / %d\n'
2909 2909 % tuple(deltasize)
2910 2910 )
2911 2911
2912 2912 if numdeltas > 0:
2913 2913 ui.write(b'\n')
2914 2914 fmt = pcfmtstr(numdeltas)
2915 2915 fmt2 = pcfmtstr(numdeltas, 4)
2916 2916 ui.writenoi18n(
2917 2917 b'deltas against prev : ' + fmt % pcfmt(numprev, numdeltas)
2918 2918 )
2919 2919 if numprev > 0:
2920 2920 ui.writenoi18n(
2921 2921 b' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev)
2922 2922 )
2923 2923 ui.writenoi18n(
2924 2924 b' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev)
2925 2925 )
2926 2926 ui.writenoi18n(
2927 2927 b' other : ' + fmt2 % pcfmt(numoprev, numprev)
2928 2928 )
2929 2929 if gdelta:
2930 2930 ui.writenoi18n(
2931 2931 b'deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas)
2932 2932 )
2933 2933 ui.writenoi18n(
2934 2934 b'deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas)
2935 2935 )
2936 2936 ui.writenoi18n(
2937 2937 b'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
2938 2938 )
2939 2939
2940 2940
2941 2941 @command(
2942 2942 b'debugrevlogindex',
2943 2943 cmdutil.debugrevlogopts
2944 2944 + [(b'f', b'format', 0, _(b'revlog format'), _(b'FORMAT'))],
2945 2945 _(b'[-f FORMAT] -c|-m|FILE'),
2946 2946 optionalrepo=True,
2947 2947 )
2948 2948 def debugrevlogindex(ui, repo, file_=None, **opts):
2949 2949 """dump the contents of a revlog index"""
2950 2950 opts = pycompat.byteskwargs(opts)
2951 2951 r = cmdutil.openrevlog(repo, b'debugrevlogindex', file_, opts)
2952 2952 format = opts.get(b'format', 0)
2953 2953 if format not in (0, 1):
2954 2954 raise error.Abort(_(b"unknown format %d") % format)
2955 2955
2956 2956 if ui.debugflag:
2957 2957 shortfn = hex
2958 2958 else:
2959 2959 shortfn = short
2960 2960
2961 2961 # There might not be anything in r, so have a sane default
2962 2962 idlen = 12
2963 2963 for i in r:
2964 2964 idlen = len(shortfn(r.node(i)))
2965 2965 break
2966 2966
2967 2967 if format == 0:
2968 2968 if ui.verbose:
2969 2969 ui.writenoi18n(
2970 2970 b" rev offset length linkrev %s %s p2\n"
2971 2971 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2972 2972 )
2973 2973 else:
2974 2974 ui.writenoi18n(
2975 2975 b" rev linkrev %s %s p2\n"
2976 2976 % (b"nodeid".ljust(idlen), b"p1".ljust(idlen))
2977 2977 )
2978 2978 elif format == 1:
2979 2979 if ui.verbose:
2980 2980 ui.writenoi18n(
2981 2981 (
2982 2982 b" rev flag offset length size link p1"
2983 2983 b" p2 %s\n"
2984 2984 )
2985 2985 % b"nodeid".rjust(idlen)
2986 2986 )
2987 2987 else:
2988 2988 ui.writenoi18n(
2989 2989 b" rev flag size link p1 p2 %s\n"
2990 2990 % b"nodeid".rjust(idlen)
2991 2991 )
2992 2992
2993 2993 for i in r:
2994 2994 node = r.node(i)
2995 2995 if format == 0:
2996 2996 try:
2997 2997 pp = r.parents(node)
2998 2998 except Exception:
2999 2999 pp = [nullid, nullid]
3000 3000 if ui.verbose:
3001 3001 ui.write(
3002 3002 b"% 6d % 9d % 7d % 7d %s %s %s\n"
3003 3003 % (
3004 3004 i,
3005 3005 r.start(i),
3006 3006 r.length(i),
3007 3007 r.linkrev(i),
3008 3008 shortfn(node),
3009 3009 shortfn(pp[0]),
3010 3010 shortfn(pp[1]),
3011 3011 )
3012 3012 )
3013 3013 else:
3014 3014 ui.write(
3015 3015 b"% 6d % 7d %s %s %s\n"
3016 3016 % (
3017 3017 i,
3018 3018 r.linkrev(i),
3019 3019 shortfn(node),
3020 3020 shortfn(pp[0]),
3021 3021 shortfn(pp[1]),
3022 3022 )
3023 3023 )
3024 3024 elif format == 1:
3025 3025 pr = r.parentrevs(i)
3026 3026 if ui.verbose:
3027 3027 ui.write(
3028 3028 b"% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
3029 3029 % (
3030 3030 i,
3031 3031 r.flags(i),
3032 3032 r.start(i),
3033 3033 r.length(i),
3034 3034 r.rawsize(i),
3035 3035 r.linkrev(i),
3036 3036 pr[0],
3037 3037 pr[1],
3038 3038 shortfn(node),
3039 3039 )
3040 3040 )
3041 3041 else:
3042 3042 ui.write(
3043 3043 b"% 6d %04x % 8d % 6d % 6d % 6d %s\n"
3044 3044 % (
3045 3045 i,
3046 3046 r.flags(i),
3047 3047 r.rawsize(i),
3048 3048 r.linkrev(i),
3049 3049 pr[0],
3050 3050 pr[1],
3051 3051 shortfn(node),
3052 3052 )
3053 3053 )
3054 3054
3055 3055
3056 3056 @command(
3057 3057 b'debugrevspec',
3058 3058 [
3059 3059 (
3060 3060 b'',
3061 3061 b'optimize',
3062 3062 None,
3063 3063 _(b'print parsed tree after optimizing (DEPRECATED)'),
3064 3064 ),
3065 3065 (
3066 3066 b'',
3067 3067 b'show-revs',
3068 3068 True,
3069 3069 _(b'print list of result revisions (default)'),
3070 3070 ),
3071 3071 (
3072 3072 b's',
3073 3073 b'show-set',
3074 3074 None,
3075 3075 _(b'print internal representation of result set'),
3076 3076 ),
3077 3077 (
3078 3078 b'p',
3079 3079 b'show-stage',
3080 3080 [],
3081 3081 _(b'print parsed tree at the given stage'),
3082 3082 _(b'NAME'),
3083 3083 ),
3084 3084 (b'', b'no-optimized', False, _(b'evaluate tree without optimization')),
3085 3085 (b'', b'verify-optimized', False, _(b'verify optimized result')),
3086 3086 ],
3087 3087 b'REVSPEC',
3088 3088 )
3089 3089 def debugrevspec(ui, repo, expr, **opts):
3090 3090 """parse and apply a revision specification
3091 3091
3092 3092 Use -p/--show-stage option to print the parsed tree at the given stages.
3093 3093 Use -p all to print tree at every stage.
3094 3094
3095 3095 Use --no-show-revs option with -s or -p to print only the set
3096 3096 representation or the parsed tree respectively.
3097 3097
3098 3098 Use --verify-optimized to compare the optimized result with the unoptimized
3099 3099 one. Returns 1 if the optimized result differs.
3100 3100 """
3101 3101 opts = pycompat.byteskwargs(opts)
3102 3102 aliases = ui.configitems(b'revsetalias')
3103 3103 stages = [
3104 3104 (b'parsed', lambda tree: tree),
3105 3105 (
3106 3106 b'expanded',
3107 3107 lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
3108 3108 ),
3109 3109 (b'concatenated', revsetlang.foldconcat),
3110 3110 (b'analyzed', revsetlang.analyze),
3111 3111 (b'optimized', revsetlang.optimize),
3112 3112 ]
3113 3113 if opts[b'no_optimized']:
3114 3114 stages = stages[:-1]
3115 3115 if opts[b'verify_optimized'] and opts[b'no_optimized']:
3116 3116 raise error.Abort(
3117 3117 _(b'cannot use --verify-optimized with --no-optimized')
3118 3118 )
3119 3119 stagenames = set(n for n, f in stages)
3120 3120
3121 3121 showalways = set()
3122 3122 showchanged = set()
3123 3123 if ui.verbose and not opts[b'show_stage']:
3124 3124 # show parsed tree by --verbose (deprecated)
3125 3125 showalways.add(b'parsed')
3126 3126 showchanged.update([b'expanded', b'concatenated'])
3127 3127 if opts[b'optimize']:
3128 3128 showalways.add(b'optimized')
3129 3129 if opts[b'show_stage'] and opts[b'optimize']:
3130 3130 raise error.Abort(_(b'cannot use --optimize with --show-stage'))
3131 3131 if opts[b'show_stage'] == [b'all']:
3132 3132 showalways.update(stagenames)
3133 3133 else:
3134 3134 for n in opts[b'show_stage']:
3135 3135 if n not in stagenames:
3136 3136 raise error.Abort(_(b'invalid stage name: %s') % n)
3137 3137 showalways.update(opts[b'show_stage'])
3138 3138
3139 3139 treebystage = {}
3140 3140 printedtree = None
3141 3141 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
3142 3142 for n, f in stages:
3143 3143 treebystage[n] = tree = f(tree)
3144 3144 if n in showalways or (n in showchanged and tree != printedtree):
3145 3145 if opts[b'show_stage'] or n != b'parsed':
3146 3146 ui.write(b"* %s:\n" % n)
3147 3147 ui.write(revsetlang.prettyformat(tree), b"\n")
3148 3148 printedtree = tree
3149 3149
3150 3150 if opts[b'verify_optimized']:
3151 3151 arevs = revset.makematcher(treebystage[b'analyzed'])(repo)
3152 3152 brevs = revset.makematcher(treebystage[b'optimized'])(repo)
3153 3153 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3154 3154 ui.writenoi18n(
3155 3155 b"* analyzed set:\n", stringutil.prettyrepr(arevs), b"\n"
3156 3156 )
3157 3157 ui.writenoi18n(
3158 3158 b"* optimized set:\n", stringutil.prettyrepr(brevs), b"\n"
3159 3159 )
3160 3160 arevs = list(arevs)
3161 3161 brevs = list(brevs)
3162 3162 if arevs == brevs:
3163 3163 return 0
3164 3164 ui.writenoi18n(b'--- analyzed\n', label=b'diff.file_a')
3165 3165 ui.writenoi18n(b'+++ optimized\n', label=b'diff.file_b')
3166 3166 sm = difflib.SequenceMatcher(None, arevs, brevs)
3167 3167 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3168 3168 if tag in ('delete', 'replace'):
3169 3169 for c in arevs[alo:ahi]:
3170 3170 ui.write(b'-%d\n' % c, label=b'diff.deleted')
3171 3171 if tag in ('insert', 'replace'):
3172 3172 for c in brevs[blo:bhi]:
3173 3173 ui.write(b'+%d\n' % c, label=b'diff.inserted')
3174 3174 if tag == 'equal':
3175 3175 for c in arevs[alo:ahi]:
3176 3176 ui.write(b' %d\n' % c)
3177 3177 return 1
3178 3178
3179 3179 func = revset.makematcher(tree)
3180 3180 revs = func(repo)
3181 3181 if opts[b'show_set'] or (opts[b'show_set'] is None and ui.verbose):
3182 3182 ui.writenoi18n(b"* set:\n", stringutil.prettyrepr(revs), b"\n")
3183 3183 if not opts[b'show_revs']:
3184 3184 return
3185 3185 for c in revs:
3186 3186 ui.write(b"%d\n" % c)
3187 3187
3188 3188
3189 3189 @command(
3190 3190 b'debugserve',
3191 3191 [
3192 3192 (
3193 3193 b'',
3194 3194 b'sshstdio',
3195 3195 False,
3196 3196 _(b'run an SSH server bound to process handles'),
3197 3197 ),
3198 3198 (b'', b'logiofd', b'', _(b'file descriptor to log server I/O to')),
3199 3199 (b'', b'logiofile', b'', _(b'file to log server I/O to')),
3200 3200 ],
3201 3201 b'',
3202 3202 )
3203 3203 def debugserve(ui, repo, **opts):
3204 3204 """run a server with advanced settings
3205 3205
3206 3206 This command is similar to :hg:`serve`. It exists partially as a
3207 3207 workaround to the fact that ``hg serve --stdio`` must have specific
3208 3208 arguments for security reasons.
3209 3209 """
3210 3210 opts = pycompat.byteskwargs(opts)
3211 3211
3212 3212 if not opts[b'sshstdio']:
3213 3213 raise error.Abort(_(b'only --sshstdio is currently supported'))
3214 3214
3215 3215 logfh = None
3216 3216
3217 3217 if opts[b'logiofd'] and opts[b'logiofile']:
3218 3218 raise error.Abort(_(b'cannot use both --logiofd and --logiofile'))
3219 3219
3220 3220 if opts[b'logiofd']:
3221 # Line buffered because output is line based.
3221 # Ideally we would be line buffered. But line buffering in binary
3222 # mode isn't supported and emits a warning in Python 3.8+. Disabling
3223 # buffering could have performance impacts. But since this isn't
3224 # performance critical code, it should be fine.
3222 3225 try:
3223 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 1)
3226 logfh = os.fdopen(int(opts[b'logiofd']), 'ab', 0)
3224 3227 except OSError as e:
3225 3228 if e.errno != errno.ESPIPE:
3226 3229 raise
3227 3230 # can't seek a pipe, so `ab` mode fails on py3
3228 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 1)
3231 logfh = os.fdopen(int(opts[b'logiofd']), 'wb', 0)
3229 3232 elif opts[b'logiofile']:
3230 logfh = open(opts[b'logiofile'], b'ab', 1)
3233 logfh = open(opts[b'logiofile'], b'ab', 0)
3231 3234
3232 3235 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
3233 3236 s.serve_forever()
3234 3237
3235 3238
3236 3239 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
3237 3240 def debugsetparents(ui, repo, rev1, rev2=None):
3238 3241 """manually set the parents of the current working directory
3239 3242
3240 3243 This is useful for writing repository conversion tools, but should
3241 3244 be used with care. For example, neither the working directory nor the
3242 3245 dirstate is updated, so file status may be incorrect after running this
3243 3246 command.
3244 3247
3245 3248 Returns 0 on success.
3246 3249 """
3247 3250
3248 3251 node1 = scmutil.revsingle(repo, rev1).node()
3249 3252 node2 = scmutil.revsingle(repo, rev2, b'null').node()
3250 3253
3251 3254 with repo.wlock():
3252 3255 repo.setparents(node1, node2)
3253 3256
3254 3257
3255 3258 @command(b'debugsidedata', cmdutil.debugrevlogopts, _(b'-c|-m|FILE REV'))
3256 3259 def debugsidedata(ui, repo, file_, rev=None, **opts):
3257 3260 """dump the side data for a cl/manifest/file revision
3258 3261
3259 3262 Use --verbose to dump the sidedata content."""
3260 3263 opts = pycompat.byteskwargs(opts)
3261 3264 if opts.get(b'changelog') or opts.get(b'manifest') or opts.get(b'dir'):
3262 3265 if rev is not None:
3263 3266 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3264 3267 file_, rev = None, file_
3265 3268 elif rev is None:
3266 3269 raise error.CommandError(b'debugdata', _(b'invalid arguments'))
3267 3270 r = cmdutil.openstorage(repo, b'debugdata', file_, opts)
3268 3271 r = getattr(r, '_revlog', r)
3269 3272 try:
3270 3273 sidedata = r.sidedata(r.lookup(rev))
3271 3274 except KeyError:
3272 3275 raise error.Abort(_(b'invalid revision identifier %s') % rev)
3273 3276 if sidedata:
3274 3277 sidedata = list(sidedata.items())
3275 3278 sidedata.sort()
3276 3279 ui.writenoi18n(b'%d sidedata entries\n' % len(sidedata))
3277 3280 for key, value in sidedata:
3278 3281 ui.writenoi18n(b' entry-%04o size %d\n' % (key, len(value)))
3279 3282 if ui.verbose:
3280 3283 ui.writenoi18n(b' %s\n' % stringutil.pprint(value))
3281 3284
3282 3285
3283 3286 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
3284 3287 def debugssl(ui, repo, source=None, **opts):
3285 3288 '''test a secure connection to a server
3286 3289
3287 3290 This builds the certificate chain for the server on Windows, installing the
3288 3291 missing intermediates and trusted root via Windows Update if necessary. It
3289 3292 does nothing on other platforms.
3290 3293
3291 3294 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
3292 3295 that server is used. See :hg:`help urls` for more information.
3293 3296
3294 3297 If the update succeeds, retry the original operation. Otherwise, the cause
3295 3298 of the SSL error is likely another issue.
3296 3299 '''
3297 3300 if not pycompat.iswindows:
3298 3301 raise error.Abort(
3299 3302 _(b'certificate chain building is only possible on Windows')
3300 3303 )
3301 3304
3302 3305 if not source:
3303 3306 if not repo:
3304 3307 raise error.Abort(
3305 3308 _(
3306 3309 b"there is no Mercurial repository here, and no "
3307 3310 b"server specified"
3308 3311 )
3309 3312 )
3310 3313 source = b"default"
3311 3314
3312 3315 source, branches = hg.parseurl(ui.expandpath(source))
3313 3316 url = util.url(source)
3314 3317
3315 3318 defaultport = {b'https': 443, b'ssh': 22}
3316 3319 if url.scheme in defaultport:
3317 3320 try:
3318 3321 addr = (url.host, int(url.port or defaultport[url.scheme]))
3319 3322 except ValueError:
3320 3323 raise error.Abort(_(b"malformed port number in URL"))
3321 3324 else:
3322 3325 raise error.Abort(_(b"only https and ssh connections are supported"))
3323 3326
3324 3327 from . import win32
3325 3328
3326 3329 s = ssl.wrap_socket(
3327 3330 socket.socket(),
3328 3331 ssl_version=ssl.PROTOCOL_TLS,
3329 3332 cert_reqs=ssl.CERT_NONE,
3330 3333 ca_certs=None,
3331 3334 )
3332 3335
3333 3336 try:
3334 3337 s.connect(addr)
3335 3338 cert = s.getpeercert(True)
3336 3339
3337 3340 ui.status(_(b'checking the certificate chain for %s\n') % url.host)
3338 3341
3339 3342 complete = win32.checkcertificatechain(cert, build=False)
3340 3343
3341 3344 if not complete:
3342 3345 ui.status(_(b'certificate chain is incomplete, updating... '))
3343 3346
3344 3347 if not win32.checkcertificatechain(cert):
3345 3348 ui.status(_(b'failed.\n'))
3346 3349 else:
3347 3350 ui.status(_(b'done.\n'))
3348 3351 else:
3349 3352 ui.status(_(b'full certificate chain is available\n'))
3350 3353 finally:
3351 3354 s.close()
3352 3355
3353 3356
3354 3357 @command(
3355 3358 b'debugsub',
3356 3359 [(b'r', b'rev', b'', _(b'revision to check'), _(b'REV'))],
3357 3360 _(b'[-r REV] [REV]'),
3358 3361 )
3359 3362 def debugsub(ui, repo, rev=None):
3360 3363 ctx = scmutil.revsingle(repo, rev, None)
3361 3364 for k, v in sorted(ctx.substate.items()):
3362 3365 ui.writenoi18n(b'path %s\n' % k)
3363 3366 ui.writenoi18n(b' source %s\n' % v[0])
3364 3367 ui.writenoi18n(b' revision %s\n' % v[1])
3365 3368
3366 3369
3367 3370 @command(
3368 3371 b'debugsuccessorssets',
3369 3372 [(b'', b'closest', False, _(b'return closest successors sets only'))],
3370 3373 _(b'[REV]'),
3371 3374 )
3372 3375 def debugsuccessorssets(ui, repo, *revs, **opts):
3373 3376 """show set of successors for revision
3374 3377
3375 3378 A successors set of changeset A is a consistent group of revisions that
3376 3379 succeed A. It contains non-obsolete changesets only unless closests
3377 3380 successors set is set.
3378 3381
3379 3382 In most cases a changeset A has a single successors set containing a single
3380 3383 successor (changeset A replaced by A').
3381 3384
3382 3385 A changeset that is made obsolete with no successors are called "pruned".
3383 3386 Such changesets have no successors sets at all.
3384 3387
3385 3388 A changeset that has been "split" will have a successors set containing
3386 3389 more than one successor.
3387 3390
3388 3391 A changeset that has been rewritten in multiple different ways is called
3389 3392 "divergent". Such changesets have multiple successor sets (each of which
3390 3393 may also be split, i.e. have multiple successors).
3391 3394
3392 3395 Results are displayed as follows::
3393 3396
3394 3397 <rev1>
3395 3398 <successors-1A>
3396 3399 <rev2>
3397 3400 <successors-2A>
3398 3401 <successors-2B1> <successors-2B2> <successors-2B3>
3399 3402
3400 3403 Here rev2 has two possible (i.e. divergent) successors sets. The first
3401 3404 holds one element, whereas the second holds three (i.e. the changeset has
3402 3405 been split).
3403 3406 """
3404 3407 # passed to successorssets caching computation from one call to another
3405 3408 cache = {}
3406 3409 ctx2str = bytes
3407 3410 node2str = short
3408 3411 for rev in scmutil.revrange(repo, revs):
3409 3412 ctx = repo[rev]
3410 3413 ui.write(b'%s\n' % ctx2str(ctx))
3411 3414 for succsset in obsutil.successorssets(
3412 3415 repo, ctx.node(), closest=opts['closest'], cache=cache
3413 3416 ):
3414 3417 if succsset:
3415 3418 ui.write(b' ')
3416 3419 ui.write(node2str(succsset[0]))
3417 3420 for node in succsset[1:]:
3418 3421 ui.write(b' ')
3419 3422 ui.write(node2str(node))
3420 3423 ui.write(b'\n')
3421 3424
3422 3425
3423 3426 @command(
3424 3427 b'debugtemplate',
3425 3428 [
3426 3429 (b'r', b'rev', [], _(b'apply template on changesets'), _(b'REV')),
3427 3430 (b'D', b'define', [], _(b'define template keyword'), _(b'KEY=VALUE')),
3428 3431 ],
3429 3432 _(b'[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
3430 3433 optionalrepo=True,
3431 3434 )
3432 3435 def debugtemplate(ui, repo, tmpl, **opts):
3433 3436 """parse and apply a template
3434 3437
3435 3438 If -r/--rev is given, the template is processed as a log template and
3436 3439 applied to the given changesets. Otherwise, it is processed as a generic
3437 3440 template.
3438 3441
3439 3442 Use --verbose to print the parsed tree.
3440 3443 """
3441 3444 revs = None
3442 3445 if opts['rev']:
3443 3446 if repo is None:
3444 3447 raise error.RepoError(
3445 3448 _(b'there is no Mercurial repository here (.hg not found)')
3446 3449 )
3447 3450 revs = scmutil.revrange(repo, opts['rev'])
3448 3451
3449 3452 props = {}
3450 3453 for d in opts['define']:
3451 3454 try:
3452 3455 k, v = (e.strip() for e in d.split(b'=', 1))
3453 3456 if not k or k == b'ui':
3454 3457 raise ValueError
3455 3458 props[k] = v
3456 3459 except ValueError:
3457 3460 raise error.Abort(_(b'malformed keyword definition: %s') % d)
3458 3461
3459 3462 if ui.verbose:
3460 3463 aliases = ui.configitems(b'templatealias')
3461 3464 tree = templater.parse(tmpl)
3462 3465 ui.note(templater.prettyformat(tree), b'\n')
3463 3466 newtree = templater.expandaliases(tree, aliases)
3464 3467 if newtree != tree:
3465 3468 ui.notenoi18n(
3466 3469 b"* expanded:\n", templater.prettyformat(newtree), b'\n'
3467 3470 )
3468 3471
3469 3472 if revs is None:
3470 3473 tres = formatter.templateresources(ui, repo)
3471 3474 t = formatter.maketemplater(ui, tmpl, resources=tres)
3472 3475 if ui.verbose:
3473 3476 kwds, funcs = t.symbolsuseddefault()
3474 3477 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3475 3478 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3476 3479 ui.write(t.renderdefault(props))
3477 3480 else:
3478 3481 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
3479 3482 if ui.verbose:
3480 3483 kwds, funcs = displayer.t.symbolsuseddefault()
3481 3484 ui.writenoi18n(b"* keywords: %s\n" % b', '.join(sorted(kwds)))
3482 3485 ui.writenoi18n(b"* functions: %s\n" % b', '.join(sorted(funcs)))
3483 3486 for r in revs:
3484 3487 displayer.show(repo[r], **pycompat.strkwargs(props))
3485 3488 displayer.close()
3486 3489
3487 3490
3488 3491 @command(
3489 3492 b'debuguigetpass',
3490 3493 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3491 3494 _(b'[-p TEXT]'),
3492 3495 norepo=True,
3493 3496 )
3494 3497 def debuguigetpass(ui, prompt=b''):
3495 3498 """show prompt to type password"""
3496 3499 r = ui.getpass(prompt)
3497 3500 ui.writenoi18n(b'respose: %s\n' % r)
3498 3501
3499 3502
3500 3503 @command(
3501 3504 b'debuguiprompt',
3502 3505 [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
3503 3506 _(b'[-p TEXT]'),
3504 3507 norepo=True,
3505 3508 )
3506 3509 def debuguiprompt(ui, prompt=b''):
3507 3510 """show plain prompt"""
3508 3511 r = ui.prompt(prompt)
3509 3512 ui.writenoi18n(b'response: %s\n' % r)
3510 3513
3511 3514
3512 3515 @command(b'debugupdatecaches', [])
3513 3516 def debugupdatecaches(ui, repo, *pats, **opts):
3514 3517 """warm all known caches in the repository"""
3515 3518 with repo.wlock(), repo.lock():
3516 3519 repo.updatecaches(full=True)
3517 3520
3518 3521
3519 3522 @command(
3520 3523 b'debugupgraderepo',
3521 3524 [
3522 3525 (
3523 3526 b'o',
3524 3527 b'optimize',
3525 3528 [],
3526 3529 _(b'extra optimization to perform'),
3527 3530 _(b'NAME'),
3528 3531 ),
3529 3532 (b'', b'run', False, _(b'performs an upgrade')),
3530 3533 (b'', b'backup', True, _(b'keep the old repository content around')),
3531 3534 (b'', b'changelog', None, _(b'select the changelog for upgrade')),
3532 3535 (b'', b'manifest', None, _(b'select the manifest for upgrade')),
3533 3536 ],
3534 3537 )
3535 3538 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
3536 3539 """upgrade a repository to use different features
3537 3540
3538 3541 If no arguments are specified, the repository is evaluated for upgrade
3539 3542 and a list of problems and potential optimizations is printed.
3540 3543
3541 3544 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
3542 3545 can be influenced via additional arguments. More details will be provided
3543 3546 by the command output when run without ``--run``.
3544 3547
3545 3548 During the upgrade, the repository will be locked and no writes will be
3546 3549 allowed.
3547 3550
3548 3551 At the end of the upgrade, the repository may not be readable while new
3549 3552 repository data is swapped in. This window will be as long as it takes to
3550 3553 rename some directories inside the ``.hg`` directory. On most machines, this
3551 3554 should complete almost instantaneously and the chances of a consumer being
3552 3555 unable to access the repository should be low.
3553 3556
3554 3557 By default, all revlog will be upgraded. You can restrict this using flag
3555 3558 such as `--manifest`:
3556 3559
3557 3560 * `--manifest`: only optimize the manifest
3558 3561 * `--no-manifest`: optimize all revlog but the manifest
3559 3562 * `--changelog`: optimize the changelog only
3560 3563 * `--no-changelog --no-manifest`: optimize filelogs only
3561 3564 """
3562 3565 return upgrade.upgraderepo(
3563 3566 ui, repo, run=run, optimize=optimize, backup=backup, **opts
3564 3567 )
3565 3568
3566 3569
3567 3570 @command(
3568 3571 b'debugwalk', cmdutil.walkopts, _(b'[OPTION]... [FILE]...'), inferrepo=True
3569 3572 )
3570 3573 def debugwalk(ui, repo, *pats, **opts):
3571 3574 """show how files match on given patterns"""
3572 3575 opts = pycompat.byteskwargs(opts)
3573 3576 m = scmutil.match(repo[None], pats, opts)
3574 3577 if ui.verbose:
3575 3578 ui.writenoi18n(b'* matcher:\n', stringutil.prettyrepr(m), b'\n')
3576 3579 items = list(repo[None].walk(m))
3577 3580 if not items:
3578 3581 return
3579 3582 f = lambda fn: fn
3580 3583 if ui.configbool(b'ui', b'slash') and pycompat.ossep != b'/':
3581 3584 f = lambda fn: util.normpath(fn)
3582 3585 fmt = b'f %%-%ds %%-%ds %%s' % (
3583 3586 max([len(abs) for abs in items]),
3584 3587 max([len(repo.pathto(abs)) for abs in items]),
3585 3588 )
3586 3589 for abs in items:
3587 3590 line = fmt % (
3588 3591 abs,
3589 3592 f(repo.pathto(abs)),
3590 3593 m.exact(abs) and b'exact' or b'',
3591 3594 )
3592 3595 ui.write(b"%s\n" % line.rstrip())
3593 3596
3594 3597
3595 3598 @command(b'debugwhyunstable', [], _(b'REV'))
3596 3599 def debugwhyunstable(ui, repo, rev):
3597 3600 """explain instabilities of a changeset"""
3598 3601 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
3599 3602 dnodes = b''
3600 3603 if entry.get(b'divergentnodes'):
3601 3604 dnodes = (
3602 3605 b' '.join(
3603 3606 b'%s (%s)' % (ctx.hex(), ctx.phasestr())
3604 3607 for ctx in entry[b'divergentnodes']
3605 3608 )
3606 3609 + b' '
3607 3610 )
3608 3611 ui.write(
3609 3612 b'%s: %s%s %s\n'
3610 3613 % (entry[b'instability'], dnodes, entry[b'reason'], entry[b'node'])
3611 3614 )
3612 3615
3613 3616
3614 3617 @command(
3615 3618 b'debugwireargs',
3616 3619 [
3617 3620 (b'', b'three', b'', b'three'),
3618 3621 (b'', b'four', b'', b'four'),
3619 3622 (b'', b'five', b'', b'five'),
3620 3623 ]
3621 3624 + cmdutil.remoteopts,
3622 3625 _(b'REPO [OPTIONS]... [ONE [TWO]]'),
3623 3626 norepo=True,
3624 3627 )
3625 3628 def debugwireargs(ui, repopath, *vals, **opts):
3626 3629 opts = pycompat.byteskwargs(opts)
3627 3630 repo = hg.peer(ui, opts, repopath)
3628 3631 for opt in cmdutil.remoteopts:
3629 3632 del opts[opt[1]]
3630 3633 args = {}
3631 3634 for k, v in pycompat.iteritems(opts):
3632 3635 if v:
3633 3636 args[k] = v
3634 3637 args = pycompat.strkwargs(args)
3635 3638 # run twice to check that we don't mess up the stream for the next command
3636 3639 res1 = repo.debugwireargs(*vals, **args)
3637 3640 res2 = repo.debugwireargs(*vals, **args)
3638 3641 ui.write(b"%s\n" % res1)
3639 3642 if res1 != res2:
3640 3643 ui.warn(b"%s\n" % res2)
3641 3644
3642 3645
3643 3646 def _parsewirelangblocks(fh):
3644 3647 activeaction = None
3645 3648 blocklines = []
3646 3649 lastindent = 0
3647 3650
3648 3651 for line in fh:
3649 3652 line = line.rstrip()
3650 3653 if not line:
3651 3654 continue
3652 3655
3653 3656 if line.startswith(b'#'):
3654 3657 continue
3655 3658
3656 3659 if not line.startswith(b' '):
3657 3660 # New block. Flush previous one.
3658 3661 if activeaction:
3659 3662 yield activeaction, blocklines
3660 3663
3661 3664 activeaction = line
3662 3665 blocklines = []
3663 3666 lastindent = 0
3664 3667 continue
3665 3668
3666 3669 # Else we start with an indent.
3667 3670
3668 3671 if not activeaction:
3669 3672 raise error.Abort(_(b'indented line outside of block'))
3670 3673
3671 3674 indent = len(line) - len(line.lstrip())
3672 3675
3673 3676 # If this line is indented more than the last line, concatenate it.
3674 3677 if indent > lastindent and blocklines:
3675 3678 blocklines[-1] += line.lstrip()
3676 3679 else:
3677 3680 blocklines.append(line)
3678 3681 lastindent = indent
3679 3682
3680 3683 # Flush last block.
3681 3684 if activeaction:
3682 3685 yield activeaction, blocklines
3683 3686
3684 3687
3685 3688 @command(
3686 3689 b'debugwireproto',
3687 3690 [
3688 3691 (b'', b'localssh', False, _(b'start an SSH server for this repo')),
3689 3692 (b'', b'peer', b'', _(b'construct a specific version of the peer')),
3690 3693 (
3691 3694 b'',
3692 3695 b'noreadstderr',
3693 3696 False,
3694 3697 _(b'do not read from stderr of the remote'),
3695 3698 ),
3696 3699 (
3697 3700 b'',
3698 3701 b'nologhandshake',
3699 3702 False,
3700 3703 _(b'do not log I/O related to the peer handshake'),
3701 3704 ),
3702 3705 ]
3703 3706 + cmdutil.remoteopts,
3704 3707 _(b'[PATH]'),
3705 3708 optionalrepo=True,
3706 3709 )
3707 3710 def debugwireproto(ui, repo, path=None, **opts):
3708 3711 """send wire protocol commands to a server
3709 3712
3710 3713 This command can be used to issue wire protocol commands to remote
3711 3714 peers and to debug the raw data being exchanged.
3712 3715
3713 3716 ``--localssh`` will start an SSH server against the current repository
3714 3717 and connect to that. By default, the connection will perform a handshake
3715 3718 and establish an appropriate peer instance.
3716 3719
3717 3720 ``--peer`` can be used to bypass the handshake protocol and construct a
3718 3721 peer instance using the specified class type. Valid values are ``raw``,
3719 3722 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
3720 3723 raw data payloads and don't support higher-level command actions.
3721 3724
3722 3725 ``--noreadstderr`` can be used to disable automatic reading from stderr
3723 3726 of the peer (for SSH connections only). Disabling automatic reading of
3724 3727 stderr is useful for making output more deterministic.
3725 3728
3726 3729 Commands are issued via a mini language which is specified via stdin.
3727 3730 The language consists of individual actions to perform. An action is
3728 3731 defined by a block. A block is defined as a line with no leading
3729 3732 space followed by 0 or more lines with leading space. Blocks are
3730 3733 effectively a high-level command with additional metadata.
3731 3734
3732 3735 Lines beginning with ``#`` are ignored.
3733 3736
3734 3737 The following sections denote available actions.
3735 3738
3736 3739 raw
3737 3740 ---
3738 3741
3739 3742 Send raw data to the server.
3740 3743
3741 3744 The block payload contains the raw data to send as one atomic send
3742 3745 operation. The data may not actually be delivered in a single system
3743 3746 call: it depends on the abilities of the transport being used.
3744 3747
3745 3748 Each line in the block is de-indented and concatenated. Then, that
3746 3749 value is evaluated as a Python b'' literal. This allows the use of
3747 3750 backslash escaping, etc.
3748 3751
3749 3752 raw+
3750 3753 ----
3751 3754
3752 3755 Behaves like ``raw`` except flushes output afterwards.
3753 3756
3754 3757 command <X>
3755 3758 -----------
3756 3759
3757 3760 Send a request to run a named command, whose name follows the ``command``
3758 3761 string.
3759 3762
3760 3763 Arguments to the command are defined as lines in this block. The format of
3761 3764 each line is ``<key> <value>``. e.g.::
3762 3765
3763 3766 command listkeys
3764 3767 namespace bookmarks
3765 3768
3766 3769 If the value begins with ``eval:``, it will be interpreted as a Python
3767 3770 literal expression. Otherwise values are interpreted as Python b'' literals.
3768 3771 This allows sending complex types and encoding special byte sequences via
3769 3772 backslash escaping.
3770 3773
3771 3774 The following arguments have special meaning:
3772 3775
3773 3776 ``PUSHFILE``
3774 3777 When defined, the *push* mechanism of the peer will be used instead
3775 3778 of the static request-response mechanism and the content of the
3776 3779 file specified in the value of this argument will be sent as the
3777 3780 command payload.
3778 3781
3779 3782 This can be used to submit a local bundle file to the remote.
3780 3783
3781 3784 batchbegin
3782 3785 ----------
3783 3786
3784 3787 Instruct the peer to begin a batched send.
3785 3788
3786 3789 All ``command`` blocks are queued for execution until the next
3787 3790 ``batchsubmit`` block.
3788 3791
3789 3792 batchsubmit
3790 3793 -----------
3791 3794
3792 3795 Submit previously queued ``command`` blocks as a batch request.
3793 3796
3794 3797 This action MUST be paired with a ``batchbegin`` action.
3795 3798
3796 3799 httprequest <method> <path>
3797 3800 ---------------------------
3798 3801
3799 3802 (HTTP peer only)
3800 3803
3801 3804 Send an HTTP request to the peer.
3802 3805
3803 3806 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3804 3807
3805 3808 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3806 3809 headers to add to the request. e.g. ``Accept: foo``.
3807 3810
3808 3811 The following arguments are special:
3809 3812
3810 3813 ``BODYFILE``
3811 3814 The content of the file defined as the value to this argument will be
3812 3815 transferred verbatim as the HTTP request body.
3813 3816
3814 3817 ``frame <type> <flags> <payload>``
3815 3818 Send a unified protocol frame as part of the request body.
3816 3819
3817 3820 All frames will be collected and sent as the body to the HTTP
3818 3821 request.
3819 3822
3820 3823 close
3821 3824 -----
3822 3825
3823 3826 Close the connection to the server.
3824 3827
3825 3828 flush
3826 3829 -----
3827 3830
3828 3831 Flush data written to the server.
3829 3832
3830 3833 readavailable
3831 3834 -------------
3832 3835
3833 3836 Close the write end of the connection and read all available data from
3834 3837 the server.
3835 3838
3836 3839 If the connection to the server encompasses multiple pipes, we poll both
3837 3840 pipes and read available data.
3838 3841
3839 3842 readline
3840 3843 --------
3841 3844
3842 3845 Read a line of output from the server. If there are multiple output
3843 3846 pipes, reads only the main pipe.
3844 3847
3845 3848 ereadline
3846 3849 ---------
3847 3850
3848 3851 Like ``readline``, but read from the stderr pipe, if available.
3849 3852
3850 3853 read <X>
3851 3854 --------
3852 3855
3853 3856 ``read()`` N bytes from the server's main output pipe.
3854 3857
3855 3858 eread <X>
3856 3859 ---------
3857 3860
3858 3861 ``read()`` N bytes from the server's stderr pipe, if available.
3859 3862
3860 3863 Specifying Unified Frame-Based Protocol Frames
3861 3864 ----------------------------------------------
3862 3865
3863 3866 It is possible to emit a *Unified Frame-Based Protocol* by using special
3864 3867 syntax.
3865 3868
3866 3869 A frame is composed as a type, flags, and payload. These can be parsed
3867 3870 from a string of the form:
3868 3871
3869 3872 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3870 3873
3871 3874 ``request-id`` and ``stream-id`` are integers defining the request and
3872 3875 stream identifiers.
3873 3876
3874 3877 ``type`` can be an integer value for the frame type or the string name
3875 3878 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3876 3879 ``command-name``.
3877 3880
3878 3881 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3879 3882 components. Each component (and there can be just one) can be an integer
3880 3883 or a flag name for stream flags or frame flags, respectively. Values are
3881 3884 resolved to integers and then bitwise OR'd together.
3882 3885
3883 3886 ``payload`` represents the raw frame payload. If it begins with
3884 3887 ``cbor:``, the following string is evaluated as Python code and the
3885 3888 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3886 3889 as a Python byte string literal.
3887 3890 """
3888 3891 opts = pycompat.byteskwargs(opts)
3889 3892
3890 3893 if opts[b'localssh'] and not repo:
3891 3894 raise error.Abort(_(b'--localssh requires a repository'))
3892 3895
3893 3896 if opts[b'peer'] and opts[b'peer'] not in (
3894 3897 b'raw',
3895 3898 b'http2',
3896 3899 b'ssh1',
3897 3900 b'ssh2',
3898 3901 ):
3899 3902 raise error.Abort(
3900 3903 _(b'invalid value for --peer'),
3901 3904 hint=_(b'valid values are "raw", "ssh1", and "ssh2"'),
3902 3905 )
3903 3906
3904 3907 if path and opts[b'localssh']:
3905 3908 raise error.Abort(_(b'cannot specify --localssh with an explicit path'))
3906 3909
3907 3910 if ui.interactive():
3908 3911 ui.write(_(b'(waiting for commands on stdin)\n'))
3909 3912
3910 3913 blocks = list(_parsewirelangblocks(ui.fin))
3911 3914
3912 3915 proc = None
3913 3916 stdin = None
3914 3917 stdout = None
3915 3918 stderr = None
3916 3919 opener = None
3917 3920
3918 3921 if opts[b'localssh']:
3919 3922 # We start the SSH server in its own process so there is process
3920 3923 # separation. This prevents a whole class of potential bugs around
3921 3924 # shared state from interfering with server operation.
3922 3925 args = procutil.hgcmd() + [
3923 3926 b'-R',
3924 3927 repo.root,
3925 3928 b'debugserve',
3926 3929 b'--sshstdio',
3927 3930 ]
3928 3931 proc = subprocess.Popen(
3929 3932 pycompat.rapply(procutil.tonativestr, args),
3930 3933 stdin=subprocess.PIPE,
3931 3934 stdout=subprocess.PIPE,
3932 3935 stderr=subprocess.PIPE,
3933 3936 bufsize=0,
3934 3937 )
3935 3938
3936 3939 stdin = proc.stdin
3937 3940 stdout = proc.stdout
3938 3941 stderr = proc.stderr
3939 3942
3940 3943 # We turn the pipes into observers so we can log I/O.
3941 3944 if ui.verbose or opts[b'peer'] == b'raw':
3942 3945 stdin = util.makeloggingfileobject(
3943 3946 ui, proc.stdin, b'i', logdata=True
3944 3947 )
3945 3948 stdout = util.makeloggingfileobject(
3946 3949 ui, proc.stdout, b'o', logdata=True
3947 3950 )
3948 3951 stderr = util.makeloggingfileobject(
3949 3952 ui, proc.stderr, b'e', logdata=True
3950 3953 )
3951 3954
3952 3955 # --localssh also implies the peer connection settings.
3953 3956
3954 3957 url = b'ssh://localserver'
3955 3958 autoreadstderr = not opts[b'noreadstderr']
3956 3959
3957 3960 if opts[b'peer'] == b'ssh1':
3958 3961 ui.write(_(b'creating ssh peer for wire protocol version 1\n'))
3959 3962 peer = sshpeer.sshv1peer(
3960 3963 ui,
3961 3964 url,
3962 3965 proc,
3963 3966 stdin,
3964 3967 stdout,
3965 3968 stderr,
3966 3969 None,
3967 3970 autoreadstderr=autoreadstderr,
3968 3971 )
3969 3972 elif opts[b'peer'] == b'ssh2':
3970 3973 ui.write(_(b'creating ssh peer for wire protocol version 2\n'))
3971 3974 peer = sshpeer.sshv2peer(
3972 3975 ui,
3973 3976 url,
3974 3977 proc,
3975 3978 stdin,
3976 3979 stdout,
3977 3980 stderr,
3978 3981 None,
3979 3982 autoreadstderr=autoreadstderr,
3980 3983 )
3981 3984 elif opts[b'peer'] == b'raw':
3982 3985 ui.write(_(b'using raw connection to peer\n'))
3983 3986 peer = None
3984 3987 else:
3985 3988 ui.write(_(b'creating ssh peer from handshake results\n'))
3986 3989 peer = sshpeer.makepeer(
3987 3990 ui,
3988 3991 url,
3989 3992 proc,
3990 3993 stdin,
3991 3994 stdout,
3992 3995 stderr,
3993 3996 autoreadstderr=autoreadstderr,
3994 3997 )
3995 3998
3996 3999 elif path:
3997 4000 # We bypass hg.peer() so we can proxy the sockets.
3998 4001 # TODO consider not doing this because we skip
3999 4002 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
4000 4003 u = util.url(path)
4001 4004 if u.scheme != b'http':
4002 4005 raise error.Abort(_(b'only http:// paths are currently supported'))
4003 4006
4004 4007 url, authinfo = u.authinfo()
4005 4008 openerargs = {
4006 4009 'useragent': b'Mercurial debugwireproto',
4007 4010 }
4008 4011
4009 4012 # Turn pipes/sockets into observers so we can log I/O.
4010 4013 if ui.verbose:
4011 4014 openerargs.update(
4012 4015 {
4013 4016 'loggingfh': ui,
4014 4017 'loggingname': b's',
4015 4018 'loggingopts': {'logdata': True, 'logdataapis': False,},
4016 4019 }
4017 4020 )
4018 4021
4019 4022 if ui.debugflag:
4020 4023 openerargs['loggingopts']['logdataapis'] = True
4021 4024
4022 4025 # Don't send default headers when in raw mode. This allows us to
4023 4026 # bypass most of the behavior of our URL handling code so we can
4024 4027 # have near complete control over what's sent on the wire.
4025 4028 if opts[b'peer'] == b'raw':
4026 4029 openerargs['sendaccept'] = False
4027 4030
4028 4031 opener = urlmod.opener(ui, authinfo, **openerargs)
4029 4032
4030 4033 if opts[b'peer'] == b'http2':
4031 4034 ui.write(_(b'creating http peer for wire protocol version 2\n'))
4032 4035 # We go through makepeer() because we need an API descriptor for
4033 4036 # the peer instance to be useful.
4034 4037 with ui.configoverride(
4035 4038 {(b'experimental', b'httppeer.advertise-v2'): True}
4036 4039 ):
4037 4040 if opts[b'nologhandshake']:
4038 4041 ui.pushbuffer()
4039 4042
4040 4043 peer = httppeer.makepeer(ui, path, opener=opener)
4041 4044
4042 4045 if opts[b'nologhandshake']:
4043 4046 ui.popbuffer()
4044 4047
4045 4048 if not isinstance(peer, httppeer.httpv2peer):
4046 4049 raise error.Abort(
4047 4050 _(
4048 4051 b'could not instantiate HTTP peer for '
4049 4052 b'wire protocol version 2'
4050 4053 ),
4051 4054 hint=_(
4052 4055 b'the server may not have the feature '
4053 4056 b'enabled or is not allowing this '
4054 4057 b'client version'
4055 4058 ),
4056 4059 )
4057 4060
4058 4061 elif opts[b'peer'] == b'raw':
4059 4062 ui.write(_(b'using raw connection to peer\n'))
4060 4063 peer = None
4061 4064 elif opts[b'peer']:
4062 4065 raise error.Abort(
4063 4066 _(b'--peer %s not supported with HTTP peers') % opts[b'peer']
4064 4067 )
4065 4068 else:
4066 4069 peer = httppeer.makepeer(ui, path, opener=opener)
4067 4070
4068 4071 # We /could/ populate stdin/stdout with sock.makefile()...
4069 4072 else:
4070 4073 raise error.Abort(_(b'unsupported connection configuration'))
4071 4074
4072 4075 batchedcommands = None
4073 4076
4074 4077 # Now perform actions based on the parsed wire language instructions.
4075 4078 for action, lines in blocks:
4076 4079 if action in (b'raw', b'raw+'):
4077 4080 if not stdin:
4078 4081 raise error.Abort(_(b'cannot call raw/raw+ on this peer'))
4079 4082
4080 4083 # Concatenate the data together.
4081 4084 data = b''.join(l.lstrip() for l in lines)
4082 4085 data = stringutil.unescapestr(data)
4083 4086 stdin.write(data)
4084 4087
4085 4088 if action == b'raw+':
4086 4089 stdin.flush()
4087 4090 elif action == b'flush':
4088 4091 if not stdin:
4089 4092 raise error.Abort(_(b'cannot call flush on this peer'))
4090 4093 stdin.flush()
4091 4094 elif action.startswith(b'command'):
4092 4095 if not peer:
4093 4096 raise error.Abort(
4094 4097 _(
4095 4098 b'cannot send commands unless peer instance '
4096 4099 b'is available'
4097 4100 )
4098 4101 )
4099 4102
4100 4103 command = action.split(b' ', 1)[1]
4101 4104
4102 4105 args = {}
4103 4106 for line in lines:
4104 4107 # We need to allow empty values.
4105 4108 fields = line.lstrip().split(b' ', 1)
4106 4109 if len(fields) == 1:
4107 4110 key = fields[0]
4108 4111 value = b''
4109 4112 else:
4110 4113 key, value = fields
4111 4114
4112 4115 if value.startswith(b'eval:'):
4113 4116 value = stringutil.evalpythonliteral(value[5:])
4114 4117 else:
4115 4118 value = stringutil.unescapestr(value)
4116 4119
4117 4120 args[key] = value
4118 4121
4119 4122 if batchedcommands is not None:
4120 4123 batchedcommands.append((command, args))
4121 4124 continue
4122 4125
4123 4126 ui.status(_(b'sending %s command\n') % command)
4124 4127
4125 4128 if b'PUSHFILE' in args:
4126 4129 with open(args[b'PUSHFILE'], 'rb') as fh:
4127 4130 del args[b'PUSHFILE']
4128 4131 res, output = peer._callpush(
4129 4132 command, fh, **pycompat.strkwargs(args)
4130 4133 )
4131 4134 ui.status(_(b'result: %s\n') % stringutil.escapestr(res))
4132 4135 ui.status(
4133 4136 _(b'remote output: %s\n') % stringutil.escapestr(output)
4134 4137 )
4135 4138 else:
4136 4139 with peer.commandexecutor() as e:
4137 4140 res = e.callcommand(command, args).result()
4138 4141
4139 4142 if isinstance(res, wireprotov2peer.commandresponse):
4140 4143 val = res.objects()
4141 4144 ui.status(
4142 4145 _(b'response: %s\n')
4143 4146 % stringutil.pprint(val, bprefix=True, indent=2)
4144 4147 )
4145 4148 else:
4146 4149 ui.status(
4147 4150 _(b'response: %s\n')
4148 4151 % stringutil.pprint(res, bprefix=True, indent=2)
4149 4152 )
4150 4153
4151 4154 elif action == b'batchbegin':
4152 4155 if batchedcommands is not None:
4153 4156 raise error.Abort(_(b'nested batchbegin not allowed'))
4154 4157
4155 4158 batchedcommands = []
4156 4159 elif action == b'batchsubmit':
4157 4160 # There is a batching API we could go through. But it would be
4158 4161 # difficult to normalize requests into function calls. It is easier
4159 4162 # to bypass this layer and normalize to commands + args.
4160 4163 ui.status(
4161 4164 _(b'sending batch with %d sub-commands\n')
4162 4165 % len(batchedcommands)
4163 4166 )
4164 4167 assert peer is not None
4165 4168 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
4166 4169 ui.status(
4167 4170 _(b'response #%d: %s\n') % (i, stringutil.escapestr(chunk))
4168 4171 )
4169 4172
4170 4173 batchedcommands = None
4171 4174
4172 4175 elif action.startswith(b'httprequest '):
4173 4176 if not opener:
4174 4177 raise error.Abort(
4175 4178 _(b'cannot use httprequest without an HTTP peer')
4176 4179 )
4177 4180
4178 4181 request = action.split(b' ', 2)
4179 4182 if len(request) != 3:
4180 4183 raise error.Abort(
4181 4184 _(
4182 4185 b'invalid httprequest: expected format is '
4183 4186 b'"httprequest <method> <path>'
4184 4187 )
4185 4188 )
4186 4189
4187 4190 method, httppath = request[1:]
4188 4191 headers = {}
4189 4192 body = None
4190 4193 frames = []
4191 4194 for line in lines:
4192 4195 line = line.lstrip()
4193 4196 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
4194 4197 if m:
4195 4198 # Headers need to use native strings.
4196 4199 key = pycompat.strurl(m.group(1))
4197 4200 value = pycompat.strurl(m.group(2))
4198 4201 headers[key] = value
4199 4202 continue
4200 4203
4201 4204 if line.startswith(b'BODYFILE '):
4202 4205 with open(line.split(b' ', 1), b'rb') as fh:
4203 4206 body = fh.read()
4204 4207 elif line.startswith(b'frame '):
4205 4208 frame = wireprotoframing.makeframefromhumanstring(
4206 4209 line[len(b'frame ') :]
4207 4210 )
4208 4211
4209 4212 frames.append(frame)
4210 4213 else:
4211 4214 raise error.Abort(
4212 4215 _(b'unknown argument to httprequest: %s') % line
4213 4216 )
4214 4217
4215 4218 url = path + httppath
4216 4219
4217 4220 if frames:
4218 4221 body = b''.join(bytes(f) for f in frames)
4219 4222
4220 4223 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
4221 4224
4222 4225 # urllib.Request insists on using has_data() as a proxy for
4223 4226 # determining the request method. Override that to use our
4224 4227 # explicitly requested method.
4225 4228 req.get_method = lambda: pycompat.sysstr(method)
4226 4229
4227 4230 try:
4228 4231 res = opener.open(req)
4229 4232 body = res.read()
4230 4233 except util.urlerr.urlerror as e:
4231 4234 # read() method must be called, but only exists in Python 2
4232 4235 getattr(e, 'read', lambda: None)()
4233 4236 continue
4234 4237
4235 4238 ct = res.headers.get('Content-Type')
4236 4239 if ct == 'application/mercurial-cbor':
4237 4240 ui.write(
4238 4241 _(b'cbor> %s\n')
4239 4242 % stringutil.pprint(
4240 4243 cborutil.decodeall(body), bprefix=True, indent=2
4241 4244 )
4242 4245 )
4243 4246
4244 4247 elif action == b'close':
4245 4248 assert peer is not None
4246 4249 peer.close()
4247 4250 elif action == b'readavailable':
4248 4251 if not stdout or not stderr:
4249 4252 raise error.Abort(
4250 4253 _(b'readavailable not available on this peer')
4251 4254 )
4252 4255
4253 4256 stdin.close()
4254 4257 stdout.read()
4255 4258 stderr.read()
4256 4259
4257 4260 elif action == b'readline':
4258 4261 if not stdout:
4259 4262 raise error.Abort(_(b'readline not available on this peer'))
4260 4263 stdout.readline()
4261 4264 elif action == b'ereadline':
4262 4265 if not stderr:
4263 4266 raise error.Abort(_(b'ereadline not available on this peer'))
4264 4267 stderr.readline()
4265 4268 elif action.startswith(b'read '):
4266 4269 count = int(action.split(b' ', 1)[1])
4267 4270 if not stdout:
4268 4271 raise error.Abort(_(b'read not available on this peer'))
4269 4272 stdout.read(count)
4270 4273 elif action.startswith(b'eread '):
4271 4274 count = int(action.split(b' ', 1)[1])
4272 4275 if not stderr:
4273 4276 raise error.Abort(_(b'eread not available on this peer'))
4274 4277 stderr.read(count)
4275 4278 else:
4276 4279 raise error.Abort(_(b'unknown action: %s') % action)
4277 4280
4278 4281 if batchedcommands is not None:
4279 4282 raise error.Abort(_(b'unclosed "batchbegin" request'))
4280 4283
4281 4284 if peer:
4282 4285 peer.close()
4283 4286
4284 4287 if proc:
4285 4288 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now